1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296}
297
298type JobId = u64;
299
300#[derive(Clone, Debug, PartialEq, Eq)]
301pub struct JobInfo {
302 pub start: Instant,
303 pub message: SharedString,
304}
305
306struct GraphCommitDataHandler {
307 _task: Task<()>,
308 commit_data_request: smol::channel::Sender<Oid>,
309}
310
311enum GraphCommitHandlerState {
312 Starting,
313 Open(GraphCommitDataHandler),
314 Closed,
315}
316
317pub struct InitialGitGraphData {
318 fetch_task: Task<()>,
319 pub error: Option<SharedString>,
320 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
321 pub commit_oid_to_index: HashMap<Oid, usize>,
322}
323
324pub struct GraphDataResponse<'a> {
325 pub commits: &'a [Arc<InitialGraphCommitData>],
326 pub is_loading: bool,
327 pub error: Option<SharedString>,
328}
329
330pub struct Repository {
331 this: WeakEntity<Self>,
332 snapshot: RepositorySnapshot,
333 commit_message_buffer: Option<Entity<Buffer>>,
334 git_store: WeakEntity<GitStore>,
335 // For a local repository, holds paths that have had worktree events since the last status scan completed,
336 // and that should be examined during the next status scan.
337 paths_needing_status_update: Vec<Vec<RepoPath>>,
338 job_sender: mpsc::UnboundedSender<GitJob>,
339 active_jobs: HashMap<JobId, JobInfo>,
340 pending_ops: SumTree<PendingOps>,
341 job_id: JobId,
342 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
343 latest_askpass_id: u64,
344 repository_state: Shared<Task<Result<RepositoryState, String>>>,
345 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
346 graph_commit_data_handler: GraphCommitHandlerState,
347 commit_data: HashMap<Oid, CommitDataState>,
348}
349
350impl std::ops::Deref for Repository {
351 type Target = RepositorySnapshot;
352
353 fn deref(&self) -> &Self::Target {
354 &self.snapshot
355 }
356}
357
358#[derive(Clone)]
359pub struct LocalRepositoryState {
360 pub fs: Arc<dyn Fs>,
361 pub backend: Arc<dyn GitRepository>,
362 pub environment: Arc<HashMap<String, String>>,
363}
364
365impl LocalRepositoryState {
366 async fn new(
367 work_directory_abs_path: Arc<Path>,
368 dot_git_abs_path: Arc<Path>,
369 project_environment: WeakEntity<ProjectEnvironment>,
370 fs: Arc<dyn Fs>,
371 is_trusted: bool,
372 cx: &mut AsyncApp,
373 ) -> anyhow::Result<Self> {
374 let environment = project_environment
375 .update(cx, |project_environment, cx| {
376 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
377 })?
378 .await
379 .unwrap_or_else(|| {
380 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
381 HashMap::default()
382 });
383 let search_paths = environment.get("PATH").map(|val| val.to_owned());
384 let backend = cx
385 .background_spawn({
386 let fs = fs.clone();
387 async move {
388 let system_git_binary_path = search_paths
389 .and_then(|search_paths| {
390 which::which_in("git", Some(search_paths), &work_directory_abs_path)
391 .ok()
392 })
393 .or_else(|| which::which("git").ok());
394 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
395 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
396 }
397 })
398 .await?;
399 backend.set_trusted(is_trusted);
400 Ok(LocalRepositoryState {
401 backend,
402 environment: Arc::new(environment),
403 fs,
404 })
405 }
406}
407
408#[derive(Clone)]
409pub struct RemoteRepositoryState {
410 pub project_id: ProjectId,
411 pub client: AnyProtoClient,
412}
413
414#[derive(Clone)]
415pub enum RepositoryState {
416 Local(LocalRepositoryState),
417 Remote(RemoteRepositoryState),
418}
419
420#[derive(Clone, Debug, PartialEq, Eq)]
421pub enum GitGraphEvent {
422 CountUpdated(usize),
423 FullyLoaded,
424 LoadingError,
425}
426
427#[derive(Clone, Debug, PartialEq, Eq)]
428pub enum RepositoryEvent {
429 StatusesChanged,
430 BranchChanged,
431 StashEntriesChanged,
432 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
433 GraphEvent((LogSource, LogOrder), GitGraphEvent),
434}
435
436#[derive(Clone, Debug)]
437pub struct JobsUpdated;
438
439#[derive(Debug)]
440pub enum GitStoreEvent {
441 ActiveRepositoryChanged(Option<RepositoryId>),
442 /// Bool is true when the repository that's updated is the active repository
443 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
444 RepositoryAdded,
445 RepositoryRemoved(RepositoryId),
446 IndexWriteError(anyhow::Error),
447 JobsUpdated,
448 ConflictsUpdated,
449}
450
451impl EventEmitter<RepositoryEvent> for Repository {}
452impl EventEmitter<JobsUpdated> for Repository {}
453impl EventEmitter<GitStoreEvent> for GitStore {}
454
455pub struct GitJob {
456 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
457 key: Option<GitJobKey>,
458}
459
460#[derive(PartialEq, Eq)]
461enum GitJobKey {
462 WriteIndex(Vec<RepoPath>),
463 ReloadBufferDiffBases,
464 RefreshStatuses,
465 ReloadGitState,
466}
467
468impl GitStore {
469 pub fn local(
470 worktree_store: &Entity<WorktreeStore>,
471 buffer_store: Entity<BufferStore>,
472 environment: Entity<ProjectEnvironment>,
473 fs: Arc<dyn Fs>,
474 cx: &mut Context<Self>,
475 ) -> Self {
476 Self::new(
477 worktree_store.clone(),
478 buffer_store,
479 GitStoreState::Local {
480 next_repository_id: Arc::new(AtomicU64::new(1)),
481 downstream: None,
482 project_environment: environment,
483 fs,
484 },
485 cx,
486 )
487 }
488
489 pub fn remote(
490 worktree_store: &Entity<WorktreeStore>,
491 buffer_store: Entity<BufferStore>,
492 upstream_client: AnyProtoClient,
493 project_id: u64,
494 cx: &mut Context<Self>,
495 ) -> Self {
496 Self::new(
497 worktree_store.clone(),
498 buffer_store,
499 GitStoreState::Remote {
500 upstream_client,
501 upstream_project_id: project_id,
502 downstream: None,
503 },
504 cx,
505 )
506 }
507
508 fn new(
509 worktree_store: Entity<WorktreeStore>,
510 buffer_store: Entity<BufferStore>,
511 state: GitStoreState,
512 cx: &mut Context<Self>,
513 ) -> Self {
514 let mut _subscriptions = vec![
515 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
516 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
517 ];
518
519 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
520 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
521 }
522
523 GitStore {
524 state,
525 buffer_store,
526 worktree_store,
527 repositories: HashMap::default(),
528 worktree_ids: HashMap::default(),
529 active_repo_id: None,
530 _subscriptions,
531 loading_diffs: HashMap::default(),
532 shared_diffs: HashMap::default(),
533 diffs: HashMap::default(),
534 }
535 }
536
537 pub fn init(client: &AnyProtoClient) {
538 client.add_entity_request_handler(Self::handle_get_remotes);
539 client.add_entity_request_handler(Self::handle_get_branches);
540 client.add_entity_request_handler(Self::handle_get_default_branch);
541 client.add_entity_request_handler(Self::handle_change_branch);
542 client.add_entity_request_handler(Self::handle_create_branch);
543 client.add_entity_request_handler(Self::handle_rename_branch);
544 client.add_entity_request_handler(Self::handle_create_remote);
545 client.add_entity_request_handler(Self::handle_remove_remote);
546 client.add_entity_request_handler(Self::handle_delete_branch);
547 client.add_entity_request_handler(Self::handle_git_init);
548 client.add_entity_request_handler(Self::handle_push);
549 client.add_entity_request_handler(Self::handle_pull);
550 client.add_entity_request_handler(Self::handle_fetch);
551 client.add_entity_request_handler(Self::handle_stage);
552 client.add_entity_request_handler(Self::handle_unstage);
553 client.add_entity_request_handler(Self::handle_stash);
554 client.add_entity_request_handler(Self::handle_stash_pop);
555 client.add_entity_request_handler(Self::handle_stash_apply);
556 client.add_entity_request_handler(Self::handle_stash_drop);
557 client.add_entity_request_handler(Self::handle_commit);
558 client.add_entity_request_handler(Self::handle_run_hook);
559 client.add_entity_request_handler(Self::handle_reset);
560 client.add_entity_request_handler(Self::handle_show);
561 client.add_entity_request_handler(Self::handle_load_commit_diff);
562 client.add_entity_request_handler(Self::handle_file_history);
563 client.add_entity_request_handler(Self::handle_checkout_files);
564 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
565 client.add_entity_request_handler(Self::handle_set_index_text);
566 client.add_entity_request_handler(Self::handle_askpass);
567 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
568 client.add_entity_request_handler(Self::handle_git_diff);
569 client.add_entity_request_handler(Self::handle_tree_diff);
570 client.add_entity_request_handler(Self::handle_get_blob_content);
571 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
572 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
573 client.add_entity_message_handler(Self::handle_update_diff_bases);
574 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
575 client.add_entity_request_handler(Self::handle_blame_buffer);
576 client.add_entity_message_handler(Self::handle_update_repository);
577 client.add_entity_message_handler(Self::handle_remove_repository);
578 client.add_entity_request_handler(Self::handle_git_clone);
579 client.add_entity_request_handler(Self::handle_get_worktrees);
580 client.add_entity_request_handler(Self::handle_create_worktree);
581 client.add_entity_request_handler(Self::handle_remove_worktree);
582 client.add_entity_request_handler(Self::handle_rename_worktree);
583 }
584
585 pub fn is_local(&self) -> bool {
586 matches!(self.state, GitStoreState::Local { .. })
587 }
588 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
589 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
590 let id = repo.read(cx).id;
591 if self.active_repo_id != Some(id) {
592 self.active_repo_id = Some(id);
593 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
594 }
595 }
596 }
597
598 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
599 match &mut self.state {
600 GitStoreState::Remote {
601 downstream: downstream_client,
602 ..
603 } => {
604 for repo in self.repositories.values() {
605 let update = repo.read(cx).snapshot.initial_update(project_id);
606 for update in split_repository_update(update) {
607 client.send(update).log_err();
608 }
609 }
610 *downstream_client = Some((client, ProjectId(project_id)));
611 }
612 GitStoreState::Local {
613 downstream: downstream_client,
614 ..
615 } => {
616 let mut snapshots = HashMap::default();
617 let (updates_tx, mut updates_rx) = mpsc::unbounded();
618 for repo in self.repositories.values() {
619 updates_tx
620 .unbounded_send(DownstreamUpdate::UpdateRepository(
621 repo.read(cx).snapshot.clone(),
622 ))
623 .ok();
624 }
625 *downstream_client = Some(LocalDownstreamState {
626 client: client.clone(),
627 project_id: ProjectId(project_id),
628 updates_tx,
629 _task: cx.spawn(async move |this, cx| {
630 cx.background_spawn(async move {
631 while let Some(update) = updates_rx.next().await {
632 match update {
633 DownstreamUpdate::UpdateRepository(snapshot) => {
634 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
635 {
636 let update =
637 snapshot.build_update(old_snapshot, project_id);
638 *old_snapshot = snapshot;
639 for update in split_repository_update(update) {
640 client.send(update)?;
641 }
642 } else {
643 let update = snapshot.initial_update(project_id);
644 for update in split_repository_update(update) {
645 client.send(update)?;
646 }
647 snapshots.insert(snapshot.id, snapshot);
648 }
649 }
650 DownstreamUpdate::RemoveRepository(id) => {
651 client.send(proto::RemoveRepository {
652 project_id,
653 id: id.to_proto(),
654 })?;
655 }
656 }
657 }
658 anyhow::Ok(())
659 })
660 .await
661 .ok();
662 this.update(cx, |this, _| {
663 if let GitStoreState::Local {
664 downstream: downstream_client,
665 ..
666 } = &mut this.state
667 {
668 downstream_client.take();
669 } else {
670 unreachable!("unshared called on remote store");
671 }
672 })
673 }),
674 });
675 }
676 }
677 }
678
679 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
680 match &mut self.state {
681 GitStoreState::Local {
682 downstream: downstream_client,
683 ..
684 } => {
685 downstream_client.take();
686 }
687 GitStoreState::Remote {
688 downstream: downstream_client,
689 ..
690 } => {
691 downstream_client.take();
692 }
693 }
694 self.shared_diffs.clear();
695 }
696
697 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
698 self.shared_diffs.remove(peer_id);
699 }
700
701 pub fn active_repository(&self) -> Option<Entity<Repository>> {
702 self.active_repo_id
703 .as_ref()
704 .map(|id| self.repositories[id].clone())
705 }
706
707 pub fn open_unstaged_diff(
708 &mut self,
709 buffer: Entity<Buffer>,
710 cx: &mut Context<Self>,
711 ) -> Task<Result<Entity<BufferDiff>>> {
712 let buffer_id = buffer.read(cx).remote_id();
713 if let Some(diff_state) = self.diffs.get(&buffer_id)
714 && let Some(unstaged_diff) = diff_state
715 .read(cx)
716 .unstaged_diff
717 .as_ref()
718 .and_then(|weak| weak.upgrade())
719 {
720 if let Some(task) =
721 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
722 {
723 return cx.background_executor().spawn(async move {
724 task.await;
725 Ok(unstaged_diff)
726 });
727 }
728 return Task::ready(Ok(unstaged_diff));
729 }
730
731 let Some((repo, repo_path)) =
732 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
733 else {
734 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
735 };
736
737 let task = self
738 .loading_diffs
739 .entry((buffer_id, DiffKind::Unstaged))
740 .or_insert_with(|| {
741 let staged_text = repo.update(cx, |repo, cx| {
742 repo.load_staged_text(buffer_id, repo_path, cx)
743 });
744 cx.spawn(async move |this, cx| {
745 Self::open_diff_internal(
746 this,
747 DiffKind::Unstaged,
748 staged_text.await.map(DiffBasesChange::SetIndex),
749 buffer,
750 cx,
751 )
752 .await
753 .map_err(Arc::new)
754 })
755 .shared()
756 })
757 .clone();
758
759 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
760 }
761
762 pub fn open_diff_since(
763 &mut self,
764 oid: Option<git::Oid>,
765 buffer: Entity<Buffer>,
766 repo: Entity<Repository>,
767 cx: &mut Context<Self>,
768 ) -> Task<Result<Entity<BufferDiff>>> {
769 let buffer_id = buffer.read(cx).remote_id();
770
771 if let Some(diff_state) = self.diffs.get(&buffer_id)
772 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
773 {
774 if let Some(task) =
775 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
776 {
777 return cx.background_executor().spawn(async move {
778 task.await;
779 Ok(oid_diff)
780 });
781 }
782 return Task::ready(Ok(oid_diff));
783 }
784
785 let diff_kind = DiffKind::SinceOid(oid);
786 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
787 let task = task.clone();
788 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
789 }
790
791 let task = cx
792 .spawn(async move |this, cx| {
793 let result: Result<Entity<BufferDiff>> = async {
794 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
795 let language_registry =
796 buffer.update(cx, |buffer, _| buffer.language_registry());
797 let content: Option<Arc<str>> = match oid {
798 None => None,
799 Some(oid) => Some(
800 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
801 .await?
802 .into(),
803 ),
804 };
805 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
806
807 buffer_diff
808 .update(cx, |buffer_diff, cx| {
809 buffer_diff.language_changed(
810 buffer_snapshot.language().cloned(),
811 language_registry,
812 cx,
813 );
814 buffer_diff.set_base_text(
815 content.clone(),
816 buffer_snapshot.language().cloned(),
817 buffer_snapshot.text,
818 cx,
819 )
820 })
821 .await?;
822 let unstaged_diff = this
823 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
824 .await?;
825 buffer_diff.update(cx, |buffer_diff, _| {
826 buffer_diff.set_secondary_diff(unstaged_diff);
827 });
828
829 this.update(cx, |this, cx| {
830 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
831 .detach();
832
833 this.loading_diffs.remove(&(buffer_id, diff_kind));
834
835 let git_store = cx.weak_entity();
836 let diff_state = this
837 .diffs
838 .entry(buffer_id)
839 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
840
841 diff_state.update(cx, |state, _| {
842 if let Some(oid) = oid {
843 if let Some(content) = content {
844 state.oid_texts.insert(oid, content);
845 }
846 }
847 state.oid_diffs.insert(oid, buffer_diff.downgrade());
848 });
849 })?;
850
851 Ok(buffer_diff)
852 }
853 .await;
854 result.map_err(Arc::new)
855 })
856 .shared();
857
858 self.loading_diffs
859 .insert((buffer_id, diff_kind), task.clone());
860 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
861 }
862
863 #[ztracing::instrument(skip_all)]
864 pub fn open_uncommitted_diff(
865 &mut self,
866 buffer: Entity<Buffer>,
867 cx: &mut Context<Self>,
868 ) -> Task<Result<Entity<BufferDiff>>> {
869 let buffer_id = buffer.read(cx).remote_id();
870
871 if let Some(diff_state) = self.diffs.get(&buffer_id)
872 && let Some(uncommitted_diff) = diff_state
873 .read(cx)
874 .uncommitted_diff
875 .as_ref()
876 .and_then(|weak| weak.upgrade())
877 {
878 if let Some(task) =
879 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
880 {
881 return cx.background_executor().spawn(async move {
882 task.await;
883 Ok(uncommitted_diff)
884 });
885 }
886 return Task::ready(Ok(uncommitted_diff));
887 }
888
889 let Some((repo, repo_path)) =
890 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
891 else {
892 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
893 };
894
895 let task = self
896 .loading_diffs
897 .entry((buffer_id, DiffKind::Uncommitted))
898 .or_insert_with(|| {
899 let changes = repo.update(cx, |repo, cx| {
900 repo.load_committed_text(buffer_id, repo_path, cx)
901 });
902
903 // todo(lw): hot foreground spawn
904 cx.spawn(async move |this, cx| {
905 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
906 .await
907 .map_err(Arc::new)
908 })
909 .shared()
910 })
911 .clone();
912
913 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
914 }
915
916 #[ztracing::instrument(skip_all)]
917 async fn open_diff_internal(
918 this: WeakEntity<Self>,
919 kind: DiffKind,
920 texts: Result<DiffBasesChange>,
921 buffer_entity: Entity<Buffer>,
922 cx: &mut AsyncApp,
923 ) -> Result<Entity<BufferDiff>> {
924 let diff_bases_change = match texts {
925 Err(e) => {
926 this.update(cx, |this, cx| {
927 let buffer = buffer_entity.read(cx);
928 let buffer_id = buffer.remote_id();
929 this.loading_diffs.remove(&(buffer_id, kind));
930 })?;
931 return Err(e);
932 }
933 Ok(change) => change,
934 };
935
936 this.update(cx, |this, cx| {
937 let buffer = buffer_entity.read(cx);
938 let buffer_id = buffer.remote_id();
939 let language = buffer.language().cloned();
940 let language_registry = buffer.language_registry();
941 let text_snapshot = buffer.text_snapshot();
942 this.loading_diffs.remove(&(buffer_id, kind));
943
944 let git_store = cx.weak_entity();
945 let diff_state = this
946 .diffs
947 .entry(buffer_id)
948 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
949
950 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
951
952 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
953 diff_state.update(cx, |diff_state, cx| {
954 diff_state.language_changed = true;
955 diff_state.language = language;
956 diff_state.language_registry = language_registry;
957
958 match kind {
959 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
960 DiffKind::Uncommitted => {
961 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
962 diff
963 } else {
964 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
965 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
966 unstaged_diff
967 };
968
969 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
970 diff_state.uncommitted_diff = Some(diff.downgrade())
971 }
972 DiffKind::SinceOid(_) => {
973 unreachable!("open_diff_internal is not used for OID diffs")
974 }
975 }
976
977 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
978 let rx = diff_state.wait_for_recalculation();
979
980 anyhow::Ok(async move {
981 if let Some(rx) = rx {
982 rx.await;
983 }
984 Ok(diff)
985 })
986 })
987 })??
988 .await
989 }
990
991 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
992 let diff_state = self.diffs.get(&buffer_id)?;
993 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
994 }
995
996 pub fn get_uncommitted_diff(
997 &self,
998 buffer_id: BufferId,
999 cx: &App,
1000 ) -> Option<Entity<BufferDiff>> {
1001 let diff_state = self.diffs.get(&buffer_id)?;
1002 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1003 }
1004
1005 pub fn get_diff_since_oid(
1006 &self,
1007 buffer_id: BufferId,
1008 oid: Option<git::Oid>,
1009 cx: &App,
1010 ) -> Option<Entity<BufferDiff>> {
1011 let diff_state = self.diffs.get(&buffer_id)?;
1012 diff_state.read(cx).oid_diff(oid)
1013 }
1014
1015 pub fn open_conflict_set(
1016 &mut self,
1017 buffer: Entity<Buffer>,
1018 cx: &mut Context<Self>,
1019 ) -> Entity<ConflictSet> {
1020 log::debug!("open conflict set");
1021 let buffer_id = buffer.read(cx).remote_id();
1022
1023 if let Some(git_state) = self.diffs.get(&buffer_id)
1024 && let Some(conflict_set) = git_state
1025 .read(cx)
1026 .conflict_set
1027 .as_ref()
1028 .and_then(|weak| weak.upgrade())
1029 {
1030 let conflict_set = conflict_set;
1031 let buffer_snapshot = buffer.read(cx).text_snapshot();
1032
1033 git_state.update(cx, |state, cx| {
1034 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1035 });
1036
1037 return conflict_set;
1038 }
1039
1040 let is_unmerged = self
1041 .repository_and_path_for_buffer_id(buffer_id, cx)
1042 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1043 let git_store = cx.weak_entity();
1044 let buffer_git_state = self
1045 .diffs
1046 .entry(buffer_id)
1047 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1048 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1049
1050 self._subscriptions
1051 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1052 cx.emit(GitStoreEvent::ConflictsUpdated);
1053 }));
1054
1055 buffer_git_state.update(cx, |state, cx| {
1056 state.conflict_set = Some(conflict_set.downgrade());
1057 let buffer_snapshot = buffer.read(cx).text_snapshot();
1058 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1059 });
1060
1061 conflict_set
1062 }
1063
1064 pub fn project_path_git_status(
1065 &self,
1066 project_path: &ProjectPath,
1067 cx: &App,
1068 ) -> Option<FileStatus> {
1069 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1070 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1071 }
1072
1073 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1074 let mut work_directory_abs_paths = Vec::new();
1075 let mut checkpoints = Vec::new();
1076 for repository in self.repositories.values() {
1077 repository.update(cx, |repository, _| {
1078 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1079 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1080 });
1081 }
1082
1083 cx.background_executor().spawn(async move {
1084 let checkpoints = future::try_join_all(checkpoints).await?;
1085 Ok(GitStoreCheckpoint {
1086 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1087 .into_iter()
1088 .zip(checkpoints)
1089 .collect(),
1090 })
1091 })
1092 }
1093
1094 pub fn restore_checkpoint(
1095 &self,
1096 checkpoint: GitStoreCheckpoint,
1097 cx: &mut App,
1098 ) -> Task<Result<()>> {
1099 let repositories_by_work_dir_abs_path = self
1100 .repositories
1101 .values()
1102 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1103 .collect::<HashMap<_, _>>();
1104
1105 let mut tasks = Vec::new();
1106 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1107 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1108 let restore = repository.update(cx, |repository, _| {
1109 repository.restore_checkpoint(checkpoint)
1110 });
1111 tasks.push(async move { restore.await? });
1112 }
1113 }
1114 cx.background_spawn(async move {
1115 future::try_join_all(tasks).await?;
1116 Ok(())
1117 })
1118 }
1119
1120 /// Compares two checkpoints, returning true if they are equal.
1121 pub fn compare_checkpoints(
1122 &self,
1123 left: GitStoreCheckpoint,
1124 mut right: GitStoreCheckpoint,
1125 cx: &mut App,
1126 ) -> Task<Result<bool>> {
1127 let repositories_by_work_dir_abs_path = self
1128 .repositories
1129 .values()
1130 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1131 .collect::<HashMap<_, _>>();
1132
1133 let mut tasks = Vec::new();
1134 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1135 if let Some(right_checkpoint) = right
1136 .checkpoints_by_work_dir_abs_path
1137 .remove(&work_dir_abs_path)
1138 {
1139 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1140 {
1141 let compare = repository.update(cx, |repository, _| {
1142 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1143 });
1144
1145 tasks.push(async move { compare.await? });
1146 }
1147 } else {
1148 return Task::ready(Ok(false));
1149 }
1150 }
1151 cx.background_spawn(async move {
1152 Ok(future::try_join_all(tasks)
1153 .await?
1154 .into_iter()
1155 .all(|result| result))
1156 })
1157 }
1158
1159 /// Blames a buffer.
1160 pub fn blame_buffer(
1161 &self,
1162 buffer: &Entity<Buffer>,
1163 version: Option<clock::Global>,
1164 cx: &mut Context<Self>,
1165 ) -> Task<Result<Option<Blame>>> {
1166 let buffer = buffer.read(cx);
1167 let Some((repo, repo_path)) =
1168 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1169 else {
1170 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1171 };
1172 let content = match &version {
1173 Some(version) => buffer.rope_for_version(version),
1174 None => buffer.as_rope().clone(),
1175 };
1176 let line_ending = buffer.line_ending();
1177 let version = version.unwrap_or(buffer.version());
1178 let buffer_id = buffer.remote_id();
1179
1180 let repo = repo.downgrade();
1181 cx.spawn(async move |_, cx| {
1182 let repository_state = repo
1183 .update(cx, |repo, _| repo.repository_state.clone())?
1184 .await
1185 .map_err(|err| anyhow::anyhow!(err))?;
1186 match repository_state {
1187 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1188 .blame(repo_path.clone(), content, line_ending)
1189 .await
1190 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1191 .map(Some),
1192 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1193 let response = client
1194 .request(proto::BlameBuffer {
1195 project_id: project_id.to_proto(),
1196 buffer_id: buffer_id.into(),
1197 version: serialize_version(&version),
1198 })
1199 .await?;
1200 Ok(deserialize_blame_buffer_response(response))
1201 }
1202 }
1203 })
1204 }
1205
1206 pub fn file_history(
1207 &self,
1208 repo: &Entity<Repository>,
1209 path: RepoPath,
1210 cx: &mut App,
1211 ) -> Task<Result<git::repository::FileHistory>> {
1212 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1213
1214 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1215 }
1216
1217 pub fn file_history_paginated(
1218 &self,
1219 repo: &Entity<Repository>,
1220 path: RepoPath,
1221 skip: usize,
1222 limit: Option<usize>,
1223 cx: &mut App,
1224 ) -> Task<Result<git::repository::FileHistory>> {
1225 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1226
1227 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1228 }
1229
1230 pub fn get_permalink_to_line(
1231 &self,
1232 buffer: &Entity<Buffer>,
1233 selection: Range<u32>,
1234 cx: &mut App,
1235 ) -> Task<Result<url::Url>> {
1236 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1237 return Task::ready(Err(anyhow!("buffer has no file")));
1238 };
1239
1240 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1241 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1242 cx,
1243 ) else {
1244 // If we're not in a Git repo, check whether this is a Rust source
1245 // file in the Cargo registry (presumably opened with go-to-definition
1246 // from a normal Rust file). If so, we can put together a permalink
1247 // using crate metadata.
1248 if buffer
1249 .read(cx)
1250 .language()
1251 .is_none_or(|lang| lang.name() != "Rust")
1252 {
1253 return Task::ready(Err(anyhow!("no permalink available")));
1254 }
1255 let file_path = file.worktree.read(cx).absolutize(&file.path);
1256 return cx.spawn(async move |cx| {
1257 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1258 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1259 .context("no permalink available")
1260 });
1261 };
1262
1263 let buffer_id = buffer.read(cx).remote_id();
1264 let branch = repo.read(cx).branch.clone();
1265 let remote = branch
1266 .as_ref()
1267 .and_then(|b| b.upstream.as_ref())
1268 .and_then(|b| b.remote_name())
1269 .unwrap_or("origin")
1270 .to_string();
1271
1272 let rx = repo.update(cx, |repo, _| {
1273 repo.send_job(None, move |state, cx| async move {
1274 match state {
1275 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1276 let origin_url = backend
1277 .remote_url(&remote)
1278 .await
1279 .with_context(|| format!("remote \"{remote}\" not found"))?;
1280
1281 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1282
1283 let provider_registry =
1284 cx.update(GitHostingProviderRegistry::default_global);
1285
1286 let (provider, remote) =
1287 parse_git_remote_url(provider_registry, &origin_url)
1288 .context("parsing Git remote URL")?;
1289
1290 Ok(provider.build_permalink(
1291 remote,
1292 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1293 ))
1294 }
1295 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1296 let response = client
1297 .request(proto::GetPermalinkToLine {
1298 project_id: project_id.to_proto(),
1299 buffer_id: buffer_id.into(),
1300 selection: Some(proto::Range {
1301 start: selection.start as u64,
1302 end: selection.end as u64,
1303 }),
1304 })
1305 .await?;
1306
1307 url::Url::parse(&response.permalink).context("failed to parse permalink")
1308 }
1309 }
1310 })
1311 });
1312 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1313 }
1314
1315 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1316 match &self.state {
1317 GitStoreState::Local {
1318 downstream: downstream_client,
1319 ..
1320 } => downstream_client
1321 .as_ref()
1322 .map(|state| (state.client.clone(), state.project_id)),
1323 GitStoreState::Remote {
1324 downstream: downstream_client,
1325 ..
1326 } => downstream_client.clone(),
1327 }
1328 }
1329
1330 fn upstream_client(&self) -> Option<AnyProtoClient> {
1331 match &self.state {
1332 GitStoreState::Local { .. } => None,
1333 GitStoreState::Remote {
1334 upstream_client, ..
1335 } => Some(upstream_client.clone()),
1336 }
1337 }
1338
1339 fn on_worktree_store_event(
1340 &mut self,
1341 worktree_store: Entity<WorktreeStore>,
1342 event: &WorktreeStoreEvent,
1343 cx: &mut Context<Self>,
1344 ) {
1345 let GitStoreState::Local {
1346 project_environment,
1347 downstream,
1348 next_repository_id,
1349 fs,
1350 } = &self.state
1351 else {
1352 return;
1353 };
1354
1355 match event {
1356 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1357 if let Some(worktree) = self
1358 .worktree_store
1359 .read(cx)
1360 .worktree_for_id(*worktree_id, cx)
1361 {
1362 let paths_by_git_repo =
1363 self.process_updated_entries(&worktree, updated_entries, cx);
1364 let downstream = downstream
1365 .as_ref()
1366 .map(|downstream| downstream.updates_tx.clone());
1367 cx.spawn(async move |_, cx| {
1368 let paths_by_git_repo = paths_by_git_repo.await;
1369 for (repo, paths) in paths_by_git_repo {
1370 repo.update(cx, |repo, cx| {
1371 repo.paths_changed(paths, downstream.clone(), cx);
1372 });
1373 }
1374 })
1375 .detach();
1376 }
1377 }
1378 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1379 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1380 else {
1381 return;
1382 };
1383 if !worktree.read(cx).is_visible() {
1384 log::debug!(
1385 "not adding repositories for local worktree {:?} because it's not visible",
1386 worktree.read(cx).abs_path()
1387 );
1388 return;
1389 }
1390 self.update_repositories_from_worktree(
1391 *worktree_id,
1392 project_environment.clone(),
1393 next_repository_id.clone(),
1394 downstream
1395 .as_ref()
1396 .map(|downstream| downstream.updates_tx.clone()),
1397 changed_repos.clone(),
1398 fs.clone(),
1399 cx,
1400 );
1401 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1402 }
1403 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1404 let repos_without_worktree: Vec<RepositoryId> = self
1405 .worktree_ids
1406 .iter_mut()
1407 .filter_map(|(repo_id, worktree_ids)| {
1408 worktree_ids.remove(worktree_id);
1409 if worktree_ids.is_empty() {
1410 Some(*repo_id)
1411 } else {
1412 None
1413 }
1414 })
1415 .collect();
1416 let is_active_repo_removed = repos_without_worktree
1417 .iter()
1418 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1419
1420 for repo_id in repos_without_worktree {
1421 self.repositories.remove(&repo_id);
1422 self.worktree_ids.remove(&repo_id);
1423 if let Some(updates_tx) =
1424 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1425 {
1426 updates_tx
1427 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1428 .ok();
1429 }
1430 }
1431
1432 if is_active_repo_removed {
1433 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1434 self.active_repo_id = Some(repo_id);
1435 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1436 } else {
1437 self.active_repo_id = None;
1438 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1439 }
1440 }
1441 }
1442 _ => {}
1443 }
1444 }
1445 fn on_repository_event(
1446 &mut self,
1447 repo: Entity<Repository>,
1448 event: &RepositoryEvent,
1449 cx: &mut Context<Self>,
1450 ) {
1451 let id = repo.read(cx).id;
1452 let repo_snapshot = repo.read(cx).snapshot.clone();
1453 for (buffer_id, diff) in self.diffs.iter() {
1454 if let Some((buffer_repo, repo_path)) =
1455 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1456 && buffer_repo == repo
1457 {
1458 diff.update(cx, |diff, cx| {
1459 if let Some(conflict_set) = &diff.conflict_set {
1460 let conflict_status_changed =
1461 conflict_set.update(cx, |conflict_set, cx| {
1462 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1463 conflict_set.set_has_conflict(has_conflict, cx)
1464 })?;
1465 if conflict_status_changed {
1466 let buffer_store = self.buffer_store.read(cx);
1467 if let Some(buffer) = buffer_store.get(*buffer_id) {
1468 let _ = diff
1469 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1470 }
1471 }
1472 }
1473 anyhow::Ok(())
1474 })
1475 .ok();
1476 }
1477 }
1478 cx.emit(GitStoreEvent::RepositoryUpdated(
1479 id,
1480 event.clone(),
1481 self.active_repo_id == Some(id),
1482 ))
1483 }
1484
1485 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1486 cx.emit(GitStoreEvent::JobsUpdated)
1487 }
1488
1489 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1490 fn update_repositories_from_worktree(
1491 &mut self,
1492 worktree_id: WorktreeId,
1493 project_environment: Entity<ProjectEnvironment>,
1494 next_repository_id: Arc<AtomicU64>,
1495 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1496 updated_git_repositories: UpdatedGitRepositoriesSet,
1497 fs: Arc<dyn Fs>,
1498 cx: &mut Context<Self>,
1499 ) {
1500 let mut removed_ids = Vec::new();
1501 for update in updated_git_repositories.iter() {
1502 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1503 let existing_work_directory_abs_path =
1504 repo.read(cx).work_directory_abs_path.clone();
1505 Some(&existing_work_directory_abs_path)
1506 == update.old_work_directory_abs_path.as_ref()
1507 || Some(&existing_work_directory_abs_path)
1508 == update.new_work_directory_abs_path.as_ref()
1509 }) {
1510 let repo_id = *id;
1511 if let Some(new_work_directory_abs_path) =
1512 update.new_work_directory_abs_path.clone()
1513 {
1514 self.worktree_ids
1515 .entry(repo_id)
1516 .or_insert_with(HashSet::new)
1517 .insert(worktree_id);
1518 existing.update(cx, |existing, cx| {
1519 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1520 existing.schedule_scan(updates_tx.clone(), cx);
1521 });
1522 } else {
1523 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1524 worktree_ids.remove(&worktree_id);
1525 if worktree_ids.is_empty() {
1526 removed_ids.push(repo_id);
1527 }
1528 }
1529 }
1530 } else if let UpdatedGitRepository {
1531 new_work_directory_abs_path: Some(work_directory_abs_path),
1532 dot_git_abs_path: Some(dot_git_abs_path),
1533 repository_dir_abs_path: Some(_repository_dir_abs_path),
1534 common_dir_abs_path: Some(common_dir_abs_path),
1535 ..
1536 } = update
1537 {
1538 let original_repo_abs_path: Arc<Path> =
1539 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1540 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1541 let is_trusted = TrustedWorktrees::try_get_global(cx)
1542 .map(|trusted_worktrees| {
1543 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1544 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1545 })
1546 })
1547 .unwrap_or(false);
1548 let git_store = cx.weak_entity();
1549 let repo = cx.new(|cx| {
1550 let mut repo = Repository::local(
1551 id,
1552 work_directory_abs_path.clone(),
1553 original_repo_abs_path.clone(),
1554 dot_git_abs_path.clone(),
1555 project_environment.downgrade(),
1556 fs.clone(),
1557 is_trusted,
1558 git_store,
1559 cx,
1560 );
1561 if let Some(updates_tx) = updates_tx.as_ref() {
1562 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1563 updates_tx
1564 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1565 .ok();
1566 }
1567 repo.schedule_scan(updates_tx.clone(), cx);
1568 repo
1569 });
1570 self._subscriptions
1571 .push(cx.subscribe(&repo, Self::on_repository_event));
1572 self._subscriptions
1573 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1574 self.repositories.insert(id, repo);
1575 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1576 cx.emit(GitStoreEvent::RepositoryAdded);
1577 self.active_repo_id.get_or_insert_with(|| {
1578 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1579 id
1580 });
1581 }
1582 }
1583
1584 for id in removed_ids {
1585 if self.active_repo_id == Some(id) {
1586 self.active_repo_id = None;
1587 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1588 }
1589 self.repositories.remove(&id);
1590 if let Some(updates_tx) = updates_tx.as_ref() {
1591 updates_tx
1592 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1593 .ok();
1594 }
1595 }
1596 }
1597
1598 fn on_trusted_worktrees_event(
1599 &mut self,
1600 _: Entity<TrustedWorktreesStore>,
1601 event: &TrustedWorktreesEvent,
1602 cx: &mut Context<Self>,
1603 ) {
1604 if !matches!(self.state, GitStoreState::Local { .. }) {
1605 return;
1606 }
1607
1608 let (is_trusted, event_paths) = match event {
1609 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1610 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1611 };
1612
1613 for (repo_id, worktree_ids) in &self.worktree_ids {
1614 if worktree_ids
1615 .iter()
1616 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1617 {
1618 if let Some(repo) = self.repositories.get(repo_id) {
1619 let repository_state = repo.read(cx).repository_state.clone();
1620 cx.background_spawn(async move {
1621 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1622 state.backend.set_trusted(is_trusted);
1623 }
1624 })
1625 .detach();
1626 }
1627 }
1628 }
1629 }
1630
1631 fn on_buffer_store_event(
1632 &mut self,
1633 _: Entity<BufferStore>,
1634 event: &BufferStoreEvent,
1635 cx: &mut Context<Self>,
1636 ) {
1637 match event {
1638 BufferStoreEvent::BufferAdded(buffer) => {
1639 cx.subscribe(buffer, |this, buffer, event, cx| {
1640 if let BufferEvent::LanguageChanged(_) = event {
1641 let buffer_id = buffer.read(cx).remote_id();
1642 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1643 diff_state.update(cx, |diff_state, cx| {
1644 diff_state.buffer_language_changed(buffer, cx);
1645 });
1646 }
1647 }
1648 })
1649 .detach();
1650 }
1651 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1652 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1653 diffs.remove(buffer_id);
1654 }
1655 }
1656 BufferStoreEvent::BufferDropped(buffer_id) => {
1657 self.diffs.remove(buffer_id);
1658 for diffs in self.shared_diffs.values_mut() {
1659 diffs.remove(buffer_id);
1660 }
1661 }
1662 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1663 // Whenever a buffer's file path changes, it's possible that the
1664 // new path is actually a path that is being tracked by a git
1665 // repository. In that case, we'll want to update the buffer's
1666 // `BufferDiffState`, in case it already has one.
1667 let buffer_id = buffer.read(cx).remote_id();
1668 let diff_state = self.diffs.get(&buffer_id);
1669 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1670
1671 if let Some(diff_state) = diff_state
1672 && let Some((repo, repo_path)) = repo
1673 {
1674 let buffer = buffer.clone();
1675 let diff_state = diff_state.clone();
1676
1677 cx.spawn(async move |_git_store, cx| {
1678 async {
1679 let diff_bases_change = repo
1680 .update(cx, |repo, cx| {
1681 repo.load_committed_text(buffer_id, repo_path, cx)
1682 })
1683 .await?;
1684
1685 diff_state.update(cx, |diff_state, cx| {
1686 let buffer_snapshot = buffer.read(cx).text_snapshot();
1687 diff_state.diff_bases_changed(
1688 buffer_snapshot,
1689 Some(diff_bases_change),
1690 cx,
1691 );
1692 });
1693 anyhow::Ok(())
1694 }
1695 .await
1696 .log_err();
1697 })
1698 .detach();
1699 }
1700 }
1701 }
1702 }
1703
1704 pub fn recalculate_buffer_diffs(
1705 &mut self,
1706 buffers: Vec<Entity<Buffer>>,
1707 cx: &mut Context<Self>,
1708 ) -> impl Future<Output = ()> + use<> {
1709 let mut futures = Vec::new();
1710 for buffer in buffers {
1711 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1712 let buffer = buffer.read(cx).text_snapshot();
1713 diff_state.update(cx, |diff_state, cx| {
1714 diff_state.recalculate_diffs(buffer.clone(), cx);
1715 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1716 });
1717 futures.push(diff_state.update(cx, |diff_state, cx| {
1718 diff_state
1719 .reparse_conflict_markers(buffer, cx)
1720 .map(|_| {})
1721 .boxed()
1722 }));
1723 }
1724 }
1725 async move {
1726 futures::future::join_all(futures).await;
1727 }
1728 }
1729
1730 fn on_buffer_diff_event(
1731 &mut self,
1732 diff: Entity<buffer_diff::BufferDiff>,
1733 event: &BufferDiffEvent,
1734 cx: &mut Context<Self>,
1735 ) {
1736 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1737 let buffer_id = diff.read(cx).buffer_id;
1738 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1739 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1740 diff_state.hunk_staging_operation_count += 1;
1741 diff_state.hunk_staging_operation_count
1742 });
1743 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1744 let recv = repo.update(cx, |repo, cx| {
1745 log::debug!("hunks changed for {}", path.as_unix_str());
1746 repo.spawn_set_index_text_job(
1747 path,
1748 new_index_text.as_ref().map(|rope| rope.to_string()),
1749 Some(hunk_staging_operation_count),
1750 cx,
1751 )
1752 });
1753 let diff = diff.downgrade();
1754 cx.spawn(async move |this, cx| {
1755 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1756 diff.update(cx, |diff, cx| {
1757 diff.clear_pending_hunks(cx);
1758 })
1759 .ok();
1760 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1761 .ok();
1762 }
1763 })
1764 .detach();
1765 }
1766 }
1767 }
1768 }
1769
1770 fn local_worktree_git_repos_changed(
1771 &mut self,
1772 worktree: Entity<Worktree>,
1773 changed_repos: &UpdatedGitRepositoriesSet,
1774 cx: &mut Context<Self>,
1775 ) {
1776 log::debug!("local worktree repos changed");
1777 debug_assert!(worktree.read(cx).is_local());
1778
1779 for repository in self.repositories.values() {
1780 repository.update(cx, |repository, cx| {
1781 let repo_abs_path = &repository.work_directory_abs_path;
1782 if changed_repos.iter().any(|update| {
1783 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1784 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1785 }) {
1786 repository.reload_buffer_diff_bases(cx);
1787 }
1788 });
1789 }
1790 }
1791
1792 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1793 &self.repositories
1794 }
1795
1796 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1797 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1798 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1799 Some(status.status)
1800 }
1801
1802 pub fn repository_and_path_for_buffer_id(
1803 &self,
1804 buffer_id: BufferId,
1805 cx: &App,
1806 ) -> Option<(Entity<Repository>, RepoPath)> {
1807 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1808 let project_path = buffer.read(cx).project_path(cx)?;
1809 self.repository_and_path_for_project_path(&project_path, cx)
1810 }
1811
1812 pub fn repository_and_path_for_project_path(
1813 &self,
1814 path: &ProjectPath,
1815 cx: &App,
1816 ) -> Option<(Entity<Repository>, RepoPath)> {
1817 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1818 self.repositories
1819 .values()
1820 .filter_map(|repo| {
1821 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1822 Some((repo.clone(), repo_path))
1823 })
1824 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1825 }
1826
1827 pub fn git_init(
1828 &self,
1829 path: Arc<Path>,
1830 fallback_branch_name: String,
1831 cx: &App,
1832 ) -> Task<Result<()>> {
1833 match &self.state {
1834 GitStoreState::Local { fs, .. } => {
1835 let fs = fs.clone();
1836 cx.background_executor()
1837 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1838 }
1839 GitStoreState::Remote {
1840 upstream_client,
1841 upstream_project_id: project_id,
1842 ..
1843 } => {
1844 let client = upstream_client.clone();
1845 let project_id = *project_id;
1846 cx.background_executor().spawn(async move {
1847 client
1848 .request(proto::GitInit {
1849 project_id: project_id,
1850 abs_path: path.to_string_lossy().into_owned(),
1851 fallback_branch_name,
1852 })
1853 .await?;
1854 Ok(())
1855 })
1856 }
1857 }
1858 }
1859
1860 pub fn git_clone(
1861 &self,
1862 repo: String,
1863 path: impl Into<Arc<std::path::Path>>,
1864 cx: &App,
1865 ) -> Task<Result<()>> {
1866 let path = path.into();
1867 match &self.state {
1868 GitStoreState::Local { fs, .. } => {
1869 let fs = fs.clone();
1870 cx.background_executor()
1871 .spawn(async move { fs.git_clone(&repo, &path).await })
1872 }
1873 GitStoreState::Remote {
1874 upstream_client,
1875 upstream_project_id,
1876 ..
1877 } => {
1878 if upstream_client.is_via_collab() {
1879 return Task::ready(Err(anyhow!(
1880 "Git Clone isn't supported for project guests"
1881 )));
1882 }
1883 let request = upstream_client.request(proto::GitClone {
1884 project_id: *upstream_project_id,
1885 abs_path: path.to_string_lossy().into_owned(),
1886 remote_repo: repo,
1887 });
1888
1889 cx.background_spawn(async move {
1890 let result = request.await?;
1891
1892 match result.success {
1893 true => Ok(()),
1894 false => Err(anyhow!("Git Clone failed")),
1895 }
1896 })
1897 }
1898 }
1899 }
1900
1901 async fn handle_update_repository(
1902 this: Entity<Self>,
1903 envelope: TypedEnvelope<proto::UpdateRepository>,
1904 mut cx: AsyncApp,
1905 ) -> Result<()> {
1906 this.update(&mut cx, |this, cx| {
1907 let path_style = this.worktree_store.read(cx).path_style();
1908 let mut update = envelope.payload;
1909
1910 let id = RepositoryId::from_proto(update.id);
1911 let client = this.upstream_client().context("no upstream client")?;
1912
1913 let original_repo_abs_path: Option<Arc<Path>> = update
1914 .original_repo_abs_path
1915 .as_deref()
1916 .map(|p| Path::new(p).into());
1917
1918 let mut repo_subscription = None;
1919 let repo = this.repositories.entry(id).or_insert_with(|| {
1920 let git_store = cx.weak_entity();
1921 let repo = cx.new(|cx| {
1922 Repository::remote(
1923 id,
1924 Path::new(&update.abs_path).into(),
1925 original_repo_abs_path.clone(),
1926 path_style,
1927 ProjectId(update.project_id),
1928 client,
1929 git_store,
1930 cx,
1931 )
1932 });
1933 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1934 cx.emit(GitStoreEvent::RepositoryAdded);
1935 repo
1936 });
1937 this._subscriptions.extend(repo_subscription);
1938
1939 repo.update(cx, {
1940 let update = update.clone();
1941 |repo, cx| repo.apply_remote_update(update, cx)
1942 })?;
1943
1944 this.active_repo_id.get_or_insert_with(|| {
1945 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1946 id
1947 });
1948
1949 if let Some((client, project_id)) = this.downstream_client() {
1950 update.project_id = project_id.to_proto();
1951 client.send(update).log_err();
1952 }
1953 Ok(())
1954 })
1955 }
1956
1957 async fn handle_remove_repository(
1958 this: Entity<Self>,
1959 envelope: TypedEnvelope<proto::RemoveRepository>,
1960 mut cx: AsyncApp,
1961 ) -> Result<()> {
1962 this.update(&mut cx, |this, cx| {
1963 let mut update = envelope.payload;
1964 let id = RepositoryId::from_proto(update.id);
1965 this.repositories.remove(&id);
1966 if let Some((client, project_id)) = this.downstream_client() {
1967 update.project_id = project_id.to_proto();
1968 client.send(update).log_err();
1969 }
1970 if this.active_repo_id == Some(id) {
1971 this.active_repo_id = None;
1972 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1973 }
1974 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1975 });
1976 Ok(())
1977 }
1978
1979 async fn handle_git_init(
1980 this: Entity<Self>,
1981 envelope: TypedEnvelope<proto::GitInit>,
1982 cx: AsyncApp,
1983 ) -> Result<proto::Ack> {
1984 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1985 let name = envelope.payload.fallback_branch_name;
1986 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1987 .await?;
1988
1989 Ok(proto::Ack {})
1990 }
1991
1992 async fn handle_git_clone(
1993 this: Entity<Self>,
1994 envelope: TypedEnvelope<proto::GitClone>,
1995 cx: AsyncApp,
1996 ) -> Result<proto::GitCloneResponse> {
1997 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1998 let repo_name = envelope.payload.remote_repo;
1999 let result = cx
2000 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2001 .await;
2002
2003 Ok(proto::GitCloneResponse {
2004 success: result.is_ok(),
2005 })
2006 }
2007
2008 async fn handle_fetch(
2009 this: Entity<Self>,
2010 envelope: TypedEnvelope<proto::Fetch>,
2011 mut cx: AsyncApp,
2012 ) -> Result<proto::RemoteMessageResponse> {
2013 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2014 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2015 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2016 let askpass_id = envelope.payload.askpass_id;
2017
2018 let askpass = make_remote_delegate(
2019 this,
2020 envelope.payload.project_id,
2021 repository_id,
2022 askpass_id,
2023 &mut cx,
2024 );
2025
2026 let remote_output = repository_handle
2027 .update(&mut cx, |repository_handle, cx| {
2028 repository_handle.fetch(fetch_options, askpass, cx)
2029 })
2030 .await??;
2031
2032 Ok(proto::RemoteMessageResponse {
2033 stdout: remote_output.stdout,
2034 stderr: remote_output.stderr,
2035 })
2036 }
2037
2038 async fn handle_push(
2039 this: Entity<Self>,
2040 envelope: TypedEnvelope<proto::Push>,
2041 mut cx: AsyncApp,
2042 ) -> Result<proto::RemoteMessageResponse> {
2043 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2044 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2045
2046 let askpass_id = envelope.payload.askpass_id;
2047 let askpass = make_remote_delegate(
2048 this,
2049 envelope.payload.project_id,
2050 repository_id,
2051 askpass_id,
2052 &mut cx,
2053 );
2054
2055 let options = envelope
2056 .payload
2057 .options
2058 .as_ref()
2059 .map(|_| match envelope.payload.options() {
2060 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2061 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2062 });
2063
2064 let branch_name = envelope.payload.branch_name.into();
2065 let remote_branch_name = envelope.payload.remote_branch_name.into();
2066 let remote_name = envelope.payload.remote_name.into();
2067
2068 let remote_output = repository_handle
2069 .update(&mut cx, |repository_handle, cx| {
2070 repository_handle.push(
2071 branch_name,
2072 remote_branch_name,
2073 remote_name,
2074 options,
2075 askpass,
2076 cx,
2077 )
2078 })
2079 .await??;
2080 Ok(proto::RemoteMessageResponse {
2081 stdout: remote_output.stdout,
2082 stderr: remote_output.stderr,
2083 })
2084 }
2085
2086 async fn handle_pull(
2087 this: Entity<Self>,
2088 envelope: TypedEnvelope<proto::Pull>,
2089 mut cx: AsyncApp,
2090 ) -> Result<proto::RemoteMessageResponse> {
2091 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2092 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2093 let askpass_id = envelope.payload.askpass_id;
2094 let askpass = make_remote_delegate(
2095 this,
2096 envelope.payload.project_id,
2097 repository_id,
2098 askpass_id,
2099 &mut cx,
2100 );
2101
2102 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2103 let remote_name = envelope.payload.remote_name.into();
2104 let rebase = envelope.payload.rebase;
2105
2106 let remote_message = repository_handle
2107 .update(&mut cx, |repository_handle, cx| {
2108 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2109 })
2110 .await??;
2111
2112 Ok(proto::RemoteMessageResponse {
2113 stdout: remote_message.stdout,
2114 stderr: remote_message.stderr,
2115 })
2116 }
2117
2118 async fn handle_stage(
2119 this: Entity<Self>,
2120 envelope: TypedEnvelope<proto::Stage>,
2121 mut cx: AsyncApp,
2122 ) -> Result<proto::Ack> {
2123 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2124 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2125
2126 let entries = envelope
2127 .payload
2128 .paths
2129 .into_iter()
2130 .map(|path| RepoPath::new(&path))
2131 .collect::<Result<Vec<_>>>()?;
2132
2133 repository_handle
2134 .update(&mut cx, |repository_handle, cx| {
2135 repository_handle.stage_entries(entries, cx)
2136 })
2137 .await?;
2138 Ok(proto::Ack {})
2139 }
2140
2141 async fn handle_unstage(
2142 this: Entity<Self>,
2143 envelope: TypedEnvelope<proto::Unstage>,
2144 mut cx: AsyncApp,
2145 ) -> Result<proto::Ack> {
2146 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2147 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2148
2149 let entries = envelope
2150 .payload
2151 .paths
2152 .into_iter()
2153 .map(|path| RepoPath::new(&path))
2154 .collect::<Result<Vec<_>>>()?;
2155
2156 repository_handle
2157 .update(&mut cx, |repository_handle, cx| {
2158 repository_handle.unstage_entries(entries, cx)
2159 })
2160 .await?;
2161
2162 Ok(proto::Ack {})
2163 }
2164
2165 async fn handle_stash(
2166 this: Entity<Self>,
2167 envelope: TypedEnvelope<proto::Stash>,
2168 mut cx: AsyncApp,
2169 ) -> Result<proto::Ack> {
2170 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2171 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2172
2173 let entries = envelope
2174 .payload
2175 .paths
2176 .into_iter()
2177 .map(|path| RepoPath::new(&path))
2178 .collect::<Result<Vec<_>>>()?;
2179
2180 repository_handle
2181 .update(&mut cx, |repository_handle, cx| {
2182 repository_handle.stash_entries(entries, cx)
2183 })
2184 .await?;
2185
2186 Ok(proto::Ack {})
2187 }
2188
2189 async fn handle_stash_pop(
2190 this: Entity<Self>,
2191 envelope: TypedEnvelope<proto::StashPop>,
2192 mut cx: AsyncApp,
2193 ) -> Result<proto::Ack> {
2194 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2195 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2196 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2197
2198 repository_handle
2199 .update(&mut cx, |repository_handle, cx| {
2200 repository_handle.stash_pop(stash_index, cx)
2201 })
2202 .await?;
2203
2204 Ok(proto::Ack {})
2205 }
2206
2207 async fn handle_stash_apply(
2208 this: Entity<Self>,
2209 envelope: TypedEnvelope<proto::StashApply>,
2210 mut cx: AsyncApp,
2211 ) -> Result<proto::Ack> {
2212 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2213 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2214 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2215
2216 repository_handle
2217 .update(&mut cx, |repository_handle, cx| {
2218 repository_handle.stash_apply(stash_index, cx)
2219 })
2220 .await?;
2221
2222 Ok(proto::Ack {})
2223 }
2224
2225 async fn handle_stash_drop(
2226 this: Entity<Self>,
2227 envelope: TypedEnvelope<proto::StashDrop>,
2228 mut cx: AsyncApp,
2229 ) -> Result<proto::Ack> {
2230 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2231 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2232 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2233
2234 repository_handle
2235 .update(&mut cx, |repository_handle, cx| {
2236 repository_handle.stash_drop(stash_index, cx)
2237 })
2238 .await??;
2239
2240 Ok(proto::Ack {})
2241 }
2242
2243 async fn handle_set_index_text(
2244 this: Entity<Self>,
2245 envelope: TypedEnvelope<proto::SetIndexText>,
2246 mut cx: AsyncApp,
2247 ) -> Result<proto::Ack> {
2248 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2249 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2250 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2251
2252 repository_handle
2253 .update(&mut cx, |repository_handle, cx| {
2254 repository_handle.spawn_set_index_text_job(
2255 repo_path,
2256 envelope.payload.text,
2257 None,
2258 cx,
2259 )
2260 })
2261 .await??;
2262 Ok(proto::Ack {})
2263 }
2264
2265 async fn handle_run_hook(
2266 this: Entity<Self>,
2267 envelope: TypedEnvelope<proto::RunGitHook>,
2268 mut cx: AsyncApp,
2269 ) -> Result<proto::Ack> {
2270 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2271 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2272 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2273 repository_handle
2274 .update(&mut cx, |repository_handle, cx| {
2275 repository_handle.run_hook(hook, cx)
2276 })
2277 .await??;
2278 Ok(proto::Ack {})
2279 }
2280
2281 async fn handle_commit(
2282 this: Entity<Self>,
2283 envelope: TypedEnvelope<proto::Commit>,
2284 mut cx: AsyncApp,
2285 ) -> Result<proto::Ack> {
2286 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2287 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2288 let askpass_id = envelope.payload.askpass_id;
2289
2290 let askpass = make_remote_delegate(
2291 this,
2292 envelope.payload.project_id,
2293 repository_id,
2294 askpass_id,
2295 &mut cx,
2296 );
2297
2298 let message = SharedString::from(envelope.payload.message);
2299 let name = envelope.payload.name.map(SharedString::from);
2300 let email = envelope.payload.email.map(SharedString::from);
2301 let options = envelope.payload.options.unwrap_or_default();
2302
2303 repository_handle
2304 .update(&mut cx, |repository_handle, cx| {
2305 repository_handle.commit(
2306 message,
2307 name.zip(email),
2308 CommitOptions {
2309 amend: options.amend,
2310 signoff: options.signoff,
2311 },
2312 askpass,
2313 cx,
2314 )
2315 })
2316 .await??;
2317 Ok(proto::Ack {})
2318 }
2319
2320 async fn handle_get_remotes(
2321 this: Entity<Self>,
2322 envelope: TypedEnvelope<proto::GetRemotes>,
2323 mut cx: AsyncApp,
2324 ) -> Result<proto::GetRemotesResponse> {
2325 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2326 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2327
2328 let branch_name = envelope.payload.branch_name;
2329 let is_push = envelope.payload.is_push;
2330
2331 let remotes = repository_handle
2332 .update(&mut cx, |repository_handle, _| {
2333 repository_handle.get_remotes(branch_name, is_push)
2334 })
2335 .await??;
2336
2337 Ok(proto::GetRemotesResponse {
2338 remotes: remotes
2339 .into_iter()
2340 .map(|remotes| proto::get_remotes_response::Remote {
2341 name: remotes.name.to_string(),
2342 })
2343 .collect::<Vec<_>>(),
2344 })
2345 }
2346
2347 async fn handle_get_worktrees(
2348 this: Entity<Self>,
2349 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2350 mut cx: AsyncApp,
2351 ) -> Result<proto::GitWorktreesResponse> {
2352 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2353 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2354
2355 let worktrees = repository_handle
2356 .update(&mut cx, |repository_handle, _| {
2357 repository_handle.worktrees()
2358 })
2359 .await??;
2360
2361 Ok(proto::GitWorktreesResponse {
2362 worktrees: worktrees
2363 .into_iter()
2364 .map(|worktree| worktree_to_proto(&worktree))
2365 .collect::<Vec<_>>(),
2366 })
2367 }
2368
2369 async fn handle_create_worktree(
2370 this: Entity<Self>,
2371 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2372 mut cx: AsyncApp,
2373 ) -> Result<proto::Ack> {
2374 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2375 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2376 let directory = PathBuf::from(envelope.payload.directory);
2377 let name = envelope.payload.name;
2378 let commit = envelope.payload.commit;
2379
2380 repository_handle
2381 .update(&mut cx, |repository_handle, _| {
2382 repository_handle.create_worktree(name, directory, commit)
2383 })
2384 .await??;
2385
2386 Ok(proto::Ack {})
2387 }
2388
2389 async fn handle_remove_worktree(
2390 this: Entity<Self>,
2391 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2392 mut cx: AsyncApp,
2393 ) -> Result<proto::Ack> {
2394 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2395 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2396 let path = PathBuf::from(envelope.payload.path);
2397 let force = envelope.payload.force;
2398
2399 repository_handle
2400 .update(&mut cx, |repository_handle, _| {
2401 repository_handle.remove_worktree(path, force)
2402 })
2403 .await??;
2404
2405 Ok(proto::Ack {})
2406 }
2407
2408 async fn handle_rename_worktree(
2409 this: Entity<Self>,
2410 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2411 mut cx: AsyncApp,
2412 ) -> Result<proto::Ack> {
2413 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2414 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2415 let old_path = PathBuf::from(envelope.payload.old_path);
2416 let new_path = PathBuf::from(envelope.payload.new_path);
2417
2418 repository_handle
2419 .update(&mut cx, |repository_handle, _| {
2420 repository_handle.rename_worktree(old_path, new_path)
2421 })
2422 .await??;
2423
2424 Ok(proto::Ack {})
2425 }
2426
2427 async fn handle_get_branches(
2428 this: Entity<Self>,
2429 envelope: TypedEnvelope<proto::GitGetBranches>,
2430 mut cx: AsyncApp,
2431 ) -> Result<proto::GitBranchesResponse> {
2432 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2433 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2434
2435 let branches = repository_handle
2436 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2437 .await??;
2438
2439 Ok(proto::GitBranchesResponse {
2440 branches: branches
2441 .into_iter()
2442 .map(|branch| branch_to_proto(&branch))
2443 .collect::<Vec<_>>(),
2444 })
2445 }
2446 async fn handle_get_default_branch(
2447 this: Entity<Self>,
2448 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2449 mut cx: AsyncApp,
2450 ) -> Result<proto::GetDefaultBranchResponse> {
2451 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2452 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2453
2454 let branch = repository_handle
2455 .update(&mut cx, |repository_handle, _| {
2456 repository_handle.default_branch(false)
2457 })
2458 .await??
2459 .map(Into::into);
2460
2461 Ok(proto::GetDefaultBranchResponse { branch })
2462 }
2463 async fn handle_create_branch(
2464 this: Entity<Self>,
2465 envelope: TypedEnvelope<proto::GitCreateBranch>,
2466 mut cx: AsyncApp,
2467 ) -> Result<proto::Ack> {
2468 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2469 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2470 let branch_name = envelope.payload.branch_name;
2471
2472 repository_handle
2473 .update(&mut cx, |repository_handle, _| {
2474 repository_handle.create_branch(branch_name, None)
2475 })
2476 .await??;
2477
2478 Ok(proto::Ack {})
2479 }
2480
2481 async fn handle_change_branch(
2482 this: Entity<Self>,
2483 envelope: TypedEnvelope<proto::GitChangeBranch>,
2484 mut cx: AsyncApp,
2485 ) -> Result<proto::Ack> {
2486 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2487 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2488 let branch_name = envelope.payload.branch_name;
2489
2490 repository_handle
2491 .update(&mut cx, |repository_handle, _| {
2492 repository_handle.change_branch(branch_name)
2493 })
2494 .await??;
2495
2496 Ok(proto::Ack {})
2497 }
2498
2499 async fn handle_rename_branch(
2500 this: Entity<Self>,
2501 envelope: TypedEnvelope<proto::GitRenameBranch>,
2502 mut cx: AsyncApp,
2503 ) -> Result<proto::Ack> {
2504 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2505 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2506 let branch = envelope.payload.branch;
2507 let new_name = envelope.payload.new_name;
2508
2509 repository_handle
2510 .update(&mut cx, |repository_handle, _| {
2511 repository_handle.rename_branch(branch, new_name)
2512 })
2513 .await??;
2514
2515 Ok(proto::Ack {})
2516 }
2517
2518 async fn handle_create_remote(
2519 this: Entity<Self>,
2520 envelope: TypedEnvelope<proto::GitCreateRemote>,
2521 mut cx: AsyncApp,
2522 ) -> Result<proto::Ack> {
2523 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2524 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2525 let remote_name = envelope.payload.remote_name;
2526 let remote_url = envelope.payload.remote_url;
2527
2528 repository_handle
2529 .update(&mut cx, |repository_handle, _| {
2530 repository_handle.create_remote(remote_name, remote_url)
2531 })
2532 .await??;
2533
2534 Ok(proto::Ack {})
2535 }
2536
2537 async fn handle_delete_branch(
2538 this: Entity<Self>,
2539 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2540 mut cx: AsyncApp,
2541 ) -> Result<proto::Ack> {
2542 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2543 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2544 let branch_name = envelope.payload.branch_name;
2545
2546 repository_handle
2547 .update(&mut cx, |repository_handle, _| {
2548 repository_handle.delete_branch(branch_name)
2549 })
2550 .await??;
2551
2552 Ok(proto::Ack {})
2553 }
2554
2555 async fn handle_remove_remote(
2556 this: Entity<Self>,
2557 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2558 mut cx: AsyncApp,
2559 ) -> Result<proto::Ack> {
2560 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2561 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2562 let remote_name = envelope.payload.remote_name;
2563
2564 repository_handle
2565 .update(&mut cx, |repository_handle, _| {
2566 repository_handle.remove_remote(remote_name)
2567 })
2568 .await??;
2569
2570 Ok(proto::Ack {})
2571 }
2572
2573 async fn handle_show(
2574 this: Entity<Self>,
2575 envelope: TypedEnvelope<proto::GitShow>,
2576 mut cx: AsyncApp,
2577 ) -> Result<proto::GitCommitDetails> {
2578 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2579 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2580
2581 let commit = repository_handle
2582 .update(&mut cx, |repository_handle, _| {
2583 repository_handle.show(envelope.payload.commit)
2584 })
2585 .await??;
2586 Ok(proto::GitCommitDetails {
2587 sha: commit.sha.into(),
2588 message: commit.message.into(),
2589 commit_timestamp: commit.commit_timestamp,
2590 author_email: commit.author_email.into(),
2591 author_name: commit.author_name.into(),
2592 })
2593 }
2594
2595 async fn handle_load_commit_diff(
2596 this: Entity<Self>,
2597 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2598 mut cx: AsyncApp,
2599 ) -> Result<proto::LoadCommitDiffResponse> {
2600 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2601 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2602
2603 let commit_diff = repository_handle
2604 .update(&mut cx, |repository_handle, _| {
2605 repository_handle.load_commit_diff(envelope.payload.commit)
2606 })
2607 .await??;
2608 Ok(proto::LoadCommitDiffResponse {
2609 files: commit_diff
2610 .files
2611 .into_iter()
2612 .map(|file| proto::CommitFile {
2613 path: file.path.to_proto(),
2614 old_text: file.old_text,
2615 new_text: file.new_text,
2616 is_binary: file.is_binary,
2617 })
2618 .collect(),
2619 })
2620 }
2621
2622 async fn handle_file_history(
2623 this: Entity<Self>,
2624 envelope: TypedEnvelope<proto::GitFileHistory>,
2625 mut cx: AsyncApp,
2626 ) -> Result<proto::GitFileHistoryResponse> {
2627 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2628 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2629 let path = RepoPath::from_proto(&envelope.payload.path)?;
2630 let skip = envelope.payload.skip as usize;
2631 let limit = envelope.payload.limit.map(|l| l as usize);
2632
2633 let file_history = repository_handle
2634 .update(&mut cx, |repository_handle, _| {
2635 repository_handle.file_history_paginated(path, skip, limit)
2636 })
2637 .await??;
2638
2639 Ok(proto::GitFileHistoryResponse {
2640 entries: file_history
2641 .entries
2642 .into_iter()
2643 .map(|entry| proto::FileHistoryEntry {
2644 sha: entry.sha.to_string(),
2645 subject: entry.subject.to_string(),
2646 message: entry.message.to_string(),
2647 commit_timestamp: entry.commit_timestamp,
2648 author_name: entry.author_name.to_string(),
2649 author_email: entry.author_email.to_string(),
2650 })
2651 .collect(),
2652 path: file_history.path.to_proto(),
2653 })
2654 }
2655
2656 async fn handle_reset(
2657 this: Entity<Self>,
2658 envelope: TypedEnvelope<proto::GitReset>,
2659 mut cx: AsyncApp,
2660 ) -> Result<proto::Ack> {
2661 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2662 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2663
2664 let mode = match envelope.payload.mode() {
2665 git_reset::ResetMode::Soft => ResetMode::Soft,
2666 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2667 };
2668
2669 repository_handle
2670 .update(&mut cx, |repository_handle, cx| {
2671 repository_handle.reset(envelope.payload.commit, mode, cx)
2672 })
2673 .await??;
2674 Ok(proto::Ack {})
2675 }
2676
2677 async fn handle_checkout_files(
2678 this: Entity<Self>,
2679 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2680 mut cx: AsyncApp,
2681 ) -> Result<proto::Ack> {
2682 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2683 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2684 let paths = envelope
2685 .payload
2686 .paths
2687 .iter()
2688 .map(|s| RepoPath::from_proto(s))
2689 .collect::<Result<Vec<_>>>()?;
2690
2691 repository_handle
2692 .update(&mut cx, |repository_handle, cx| {
2693 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2694 })
2695 .await?;
2696 Ok(proto::Ack {})
2697 }
2698
2699 async fn handle_open_commit_message_buffer(
2700 this: Entity<Self>,
2701 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2702 mut cx: AsyncApp,
2703 ) -> Result<proto::OpenBufferResponse> {
2704 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2705 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2706 let buffer = repository
2707 .update(&mut cx, |repository, cx| {
2708 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2709 })
2710 .await?;
2711
2712 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2713 this.update(&mut cx, |this, cx| {
2714 this.buffer_store.update(cx, |buffer_store, cx| {
2715 buffer_store
2716 .create_buffer_for_peer(
2717 &buffer,
2718 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2719 cx,
2720 )
2721 .detach_and_log_err(cx);
2722 })
2723 });
2724
2725 Ok(proto::OpenBufferResponse {
2726 buffer_id: buffer_id.to_proto(),
2727 })
2728 }
2729
2730 async fn handle_askpass(
2731 this: Entity<Self>,
2732 envelope: TypedEnvelope<proto::AskPassRequest>,
2733 mut cx: AsyncApp,
2734 ) -> Result<proto::AskPassResponse> {
2735 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2736 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2737
2738 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2739 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2740 debug_panic!("no askpass found");
2741 anyhow::bail!("no askpass found");
2742 };
2743
2744 let response = askpass
2745 .ask_password(envelope.payload.prompt)
2746 .await
2747 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2748
2749 delegates
2750 .lock()
2751 .insert(envelope.payload.askpass_id, askpass);
2752
2753 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2754 Ok(proto::AskPassResponse {
2755 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2756 })
2757 }
2758
2759 async fn handle_check_for_pushed_commits(
2760 this: Entity<Self>,
2761 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2762 mut cx: AsyncApp,
2763 ) -> Result<proto::CheckForPushedCommitsResponse> {
2764 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2765 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2766
2767 let branches = repository_handle
2768 .update(&mut cx, |repository_handle, _| {
2769 repository_handle.check_for_pushed_commits()
2770 })
2771 .await??;
2772 Ok(proto::CheckForPushedCommitsResponse {
2773 pushed_to: branches
2774 .into_iter()
2775 .map(|commit| commit.to_string())
2776 .collect(),
2777 })
2778 }
2779
2780 async fn handle_git_diff(
2781 this: Entity<Self>,
2782 envelope: TypedEnvelope<proto::GitDiff>,
2783 mut cx: AsyncApp,
2784 ) -> Result<proto::GitDiffResponse> {
2785 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2786 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2787 let diff_type = match envelope.payload.diff_type() {
2788 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2789 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2790 proto::git_diff::DiffType::MergeBase => {
2791 let base_ref = envelope
2792 .payload
2793 .merge_base_ref
2794 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2795 DiffType::MergeBase {
2796 base_ref: base_ref.into(),
2797 }
2798 }
2799 };
2800
2801 let mut diff = repository_handle
2802 .update(&mut cx, |repository_handle, cx| {
2803 repository_handle.diff(diff_type, cx)
2804 })
2805 .await??;
2806 const ONE_MB: usize = 1_000_000;
2807 if diff.len() > ONE_MB {
2808 diff = diff.chars().take(ONE_MB).collect()
2809 }
2810
2811 Ok(proto::GitDiffResponse { diff })
2812 }
2813
2814 async fn handle_tree_diff(
2815 this: Entity<Self>,
2816 request: TypedEnvelope<proto::GetTreeDiff>,
2817 mut cx: AsyncApp,
2818 ) -> Result<proto::GetTreeDiffResponse> {
2819 let repository_id = RepositoryId(request.payload.repository_id);
2820 let diff_type = if request.payload.is_merge {
2821 DiffTreeType::MergeBase {
2822 base: request.payload.base.into(),
2823 head: request.payload.head.into(),
2824 }
2825 } else {
2826 DiffTreeType::Since {
2827 base: request.payload.base.into(),
2828 head: request.payload.head.into(),
2829 }
2830 };
2831
2832 let diff = this
2833 .update(&mut cx, |this, cx| {
2834 let repository = this.repositories().get(&repository_id)?;
2835 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2836 })
2837 .context("missing repository")?
2838 .await??;
2839
2840 Ok(proto::GetTreeDiffResponse {
2841 entries: diff
2842 .entries
2843 .into_iter()
2844 .map(|(path, status)| proto::TreeDiffStatus {
2845 path: path.as_ref().to_proto(),
2846 status: match status {
2847 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2848 TreeDiffStatus::Modified { .. } => {
2849 proto::tree_diff_status::Status::Modified.into()
2850 }
2851 TreeDiffStatus::Deleted { .. } => {
2852 proto::tree_diff_status::Status::Deleted.into()
2853 }
2854 },
2855 oid: match status {
2856 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2857 Some(old.to_string())
2858 }
2859 TreeDiffStatus::Added => None,
2860 },
2861 })
2862 .collect(),
2863 })
2864 }
2865
2866 async fn handle_get_blob_content(
2867 this: Entity<Self>,
2868 request: TypedEnvelope<proto::GetBlobContent>,
2869 mut cx: AsyncApp,
2870 ) -> Result<proto::GetBlobContentResponse> {
2871 let oid = git::Oid::from_str(&request.payload.oid)?;
2872 let repository_id = RepositoryId(request.payload.repository_id);
2873 let content = this
2874 .update(&mut cx, |this, cx| {
2875 let repository = this.repositories().get(&repository_id)?;
2876 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2877 })
2878 .context("missing repository")?
2879 .await?;
2880 Ok(proto::GetBlobContentResponse { content })
2881 }
2882
2883 async fn handle_open_unstaged_diff(
2884 this: Entity<Self>,
2885 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2886 mut cx: AsyncApp,
2887 ) -> Result<proto::OpenUnstagedDiffResponse> {
2888 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2889 let diff = this
2890 .update(&mut cx, |this, cx| {
2891 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2892 Some(this.open_unstaged_diff(buffer, cx))
2893 })
2894 .context("missing buffer")?
2895 .await?;
2896 this.update(&mut cx, |this, _| {
2897 let shared_diffs = this
2898 .shared_diffs
2899 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2900 .or_default();
2901 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2902 });
2903 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2904 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2905 }
2906
2907 async fn handle_open_uncommitted_diff(
2908 this: Entity<Self>,
2909 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2910 mut cx: AsyncApp,
2911 ) -> Result<proto::OpenUncommittedDiffResponse> {
2912 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2913 let diff = this
2914 .update(&mut cx, |this, cx| {
2915 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2916 Some(this.open_uncommitted_diff(buffer, cx))
2917 })
2918 .context("missing buffer")?
2919 .await?;
2920 this.update(&mut cx, |this, _| {
2921 let shared_diffs = this
2922 .shared_diffs
2923 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2924 .or_default();
2925 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2926 });
2927 Ok(diff.read_with(&cx, |diff, cx| {
2928 use proto::open_uncommitted_diff_response::Mode;
2929
2930 let unstaged_diff = diff.secondary_diff();
2931 let index_snapshot = unstaged_diff.and_then(|diff| {
2932 let diff = diff.read(cx);
2933 diff.base_text_exists().then(|| diff.base_text(cx))
2934 });
2935
2936 let mode;
2937 let staged_text;
2938 let committed_text;
2939 if diff.base_text_exists() {
2940 let committed_snapshot = diff.base_text(cx);
2941 committed_text = Some(committed_snapshot.text());
2942 if let Some(index_text) = index_snapshot {
2943 if index_text.remote_id() == committed_snapshot.remote_id() {
2944 mode = Mode::IndexMatchesHead;
2945 staged_text = None;
2946 } else {
2947 mode = Mode::IndexAndHead;
2948 staged_text = Some(index_text.text());
2949 }
2950 } else {
2951 mode = Mode::IndexAndHead;
2952 staged_text = None;
2953 }
2954 } else {
2955 mode = Mode::IndexAndHead;
2956 committed_text = None;
2957 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2958 }
2959
2960 proto::OpenUncommittedDiffResponse {
2961 committed_text,
2962 staged_text,
2963 mode: mode.into(),
2964 }
2965 }))
2966 }
2967
2968 async fn handle_update_diff_bases(
2969 this: Entity<Self>,
2970 request: TypedEnvelope<proto::UpdateDiffBases>,
2971 mut cx: AsyncApp,
2972 ) -> Result<()> {
2973 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2974 this.update(&mut cx, |this, cx| {
2975 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2976 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2977 {
2978 let buffer = buffer.read(cx).text_snapshot();
2979 diff_state.update(cx, |diff_state, cx| {
2980 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2981 })
2982 }
2983 });
2984 Ok(())
2985 }
2986
2987 async fn handle_blame_buffer(
2988 this: Entity<Self>,
2989 envelope: TypedEnvelope<proto::BlameBuffer>,
2990 mut cx: AsyncApp,
2991 ) -> Result<proto::BlameBufferResponse> {
2992 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2993 let version = deserialize_version(&envelope.payload.version);
2994 let buffer = this.read_with(&cx, |this, cx| {
2995 this.buffer_store.read(cx).get_existing(buffer_id)
2996 })?;
2997 buffer
2998 .update(&mut cx, |buffer, _| {
2999 buffer.wait_for_version(version.clone())
3000 })
3001 .await?;
3002 let blame = this
3003 .update(&mut cx, |this, cx| {
3004 this.blame_buffer(&buffer, Some(version), cx)
3005 })
3006 .await?;
3007 Ok(serialize_blame_buffer_response(blame))
3008 }
3009
3010 async fn handle_get_permalink_to_line(
3011 this: Entity<Self>,
3012 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3013 mut cx: AsyncApp,
3014 ) -> Result<proto::GetPermalinkToLineResponse> {
3015 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3016 // let version = deserialize_version(&envelope.payload.version);
3017 let selection = {
3018 let proto_selection = envelope
3019 .payload
3020 .selection
3021 .context("no selection to get permalink for defined")?;
3022 proto_selection.start as u32..proto_selection.end as u32
3023 };
3024 let buffer = this.read_with(&cx, |this, cx| {
3025 this.buffer_store.read(cx).get_existing(buffer_id)
3026 })?;
3027 let permalink = this
3028 .update(&mut cx, |this, cx| {
3029 this.get_permalink_to_line(&buffer, selection, cx)
3030 })
3031 .await?;
3032 Ok(proto::GetPermalinkToLineResponse {
3033 permalink: permalink.to_string(),
3034 })
3035 }
3036
3037 fn repository_for_request(
3038 this: &Entity<Self>,
3039 id: RepositoryId,
3040 cx: &mut AsyncApp,
3041 ) -> Result<Entity<Repository>> {
3042 this.read_with(cx, |this, _| {
3043 this.repositories
3044 .get(&id)
3045 .context("missing repository handle")
3046 .cloned()
3047 })
3048 }
3049
3050 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3051 self.repositories
3052 .iter()
3053 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3054 .collect()
3055 }
3056
3057 fn process_updated_entries(
3058 &self,
3059 worktree: &Entity<Worktree>,
3060 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3061 cx: &mut App,
3062 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3063 let path_style = worktree.read(cx).path_style();
3064 let mut repo_paths = self
3065 .repositories
3066 .values()
3067 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3068 .collect::<Vec<_>>();
3069 let mut entries: Vec<_> = updated_entries
3070 .iter()
3071 .map(|(path, _, _)| path.clone())
3072 .collect();
3073 entries.sort();
3074 let worktree = worktree.read(cx);
3075
3076 let entries = entries
3077 .into_iter()
3078 .map(|path| worktree.absolutize(&path))
3079 .collect::<Arc<[_]>>();
3080
3081 let executor = cx.background_executor().clone();
3082 cx.background_executor().spawn(async move {
3083 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3084 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3085 let mut tasks = FuturesOrdered::new();
3086 for (repo_path, repo) in repo_paths.into_iter().rev() {
3087 let entries = entries.clone();
3088 let task = executor.spawn(async move {
3089 // Find all repository paths that belong to this repo
3090 let mut ix = entries.partition_point(|path| path < &*repo_path);
3091 if ix == entries.len() {
3092 return None;
3093 };
3094
3095 let mut paths = Vec::new();
3096 // All paths prefixed by a given repo will constitute a continuous range.
3097 while let Some(path) = entries.get(ix)
3098 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3099 &repo_path, path, path_style,
3100 )
3101 {
3102 paths.push((repo_path, ix));
3103 ix += 1;
3104 }
3105 if paths.is_empty() {
3106 None
3107 } else {
3108 Some((repo, paths))
3109 }
3110 });
3111 tasks.push_back(task);
3112 }
3113
3114 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3115 let mut path_was_used = vec![false; entries.len()];
3116 let tasks = tasks.collect::<Vec<_>>().await;
3117 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3118 // We always want to assign a path to it's innermost repository.
3119 for t in tasks {
3120 let Some((repo, paths)) = t else {
3121 continue;
3122 };
3123 let entry = paths_by_git_repo.entry(repo).or_default();
3124 for (repo_path, ix) in paths {
3125 if path_was_used[ix] {
3126 continue;
3127 }
3128 path_was_used[ix] = true;
3129 entry.push(repo_path);
3130 }
3131 }
3132
3133 paths_by_git_repo
3134 })
3135 }
3136}
3137
3138impl BufferGitState {
3139 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3140 Self {
3141 unstaged_diff: Default::default(),
3142 uncommitted_diff: Default::default(),
3143 oid_diffs: Default::default(),
3144 recalculate_diff_task: Default::default(),
3145 language: Default::default(),
3146 language_registry: Default::default(),
3147 recalculating_tx: postage::watch::channel_with(false).0,
3148 hunk_staging_operation_count: 0,
3149 hunk_staging_operation_count_as_of_write: 0,
3150 head_text: Default::default(),
3151 index_text: Default::default(),
3152 oid_texts: Default::default(),
3153 head_changed: Default::default(),
3154 index_changed: Default::default(),
3155 language_changed: Default::default(),
3156 conflict_updated_futures: Default::default(),
3157 conflict_set: Default::default(),
3158 reparse_conflict_markers_task: Default::default(),
3159 }
3160 }
3161
3162 #[ztracing::instrument(skip_all)]
3163 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3164 self.language = buffer.read(cx).language().cloned();
3165 self.language_changed = true;
3166 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3167 }
3168
3169 fn reparse_conflict_markers(
3170 &mut self,
3171 buffer: text::BufferSnapshot,
3172 cx: &mut Context<Self>,
3173 ) -> oneshot::Receiver<()> {
3174 let (tx, rx) = oneshot::channel();
3175
3176 let Some(conflict_set) = self
3177 .conflict_set
3178 .as_ref()
3179 .and_then(|conflict_set| conflict_set.upgrade())
3180 else {
3181 return rx;
3182 };
3183
3184 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3185 if conflict_set.has_conflict {
3186 Some(conflict_set.snapshot())
3187 } else {
3188 None
3189 }
3190 });
3191
3192 if let Some(old_snapshot) = old_snapshot {
3193 self.conflict_updated_futures.push(tx);
3194 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3195 let (snapshot, changed_range) = cx
3196 .background_spawn(async move {
3197 let new_snapshot = ConflictSet::parse(&buffer);
3198 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3199 (new_snapshot, changed_range)
3200 })
3201 .await;
3202 this.update(cx, |this, cx| {
3203 if let Some(conflict_set) = &this.conflict_set {
3204 conflict_set
3205 .update(cx, |conflict_set, cx| {
3206 conflict_set.set_snapshot(snapshot, changed_range, cx);
3207 })
3208 .ok();
3209 }
3210 let futures = std::mem::take(&mut this.conflict_updated_futures);
3211 for tx in futures {
3212 tx.send(()).ok();
3213 }
3214 })
3215 }))
3216 }
3217
3218 rx
3219 }
3220
3221 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3222 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3223 }
3224
3225 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3226 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3227 }
3228
3229 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3230 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3231 }
3232
3233 fn handle_base_texts_updated(
3234 &mut self,
3235 buffer: text::BufferSnapshot,
3236 message: proto::UpdateDiffBases,
3237 cx: &mut Context<Self>,
3238 ) {
3239 use proto::update_diff_bases::Mode;
3240
3241 let Some(mode) = Mode::from_i32(message.mode) else {
3242 return;
3243 };
3244
3245 let diff_bases_change = match mode {
3246 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3247 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3248 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3249 Mode::IndexAndHead => DiffBasesChange::SetEach {
3250 index: message.staged_text,
3251 head: message.committed_text,
3252 },
3253 };
3254
3255 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3256 }
3257
3258 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3259 if *self.recalculating_tx.borrow() {
3260 let mut rx = self.recalculating_tx.subscribe();
3261 Some(async move {
3262 loop {
3263 let is_recalculating = rx.recv().await;
3264 if is_recalculating != Some(true) {
3265 break;
3266 }
3267 }
3268 })
3269 } else {
3270 None
3271 }
3272 }
3273
3274 fn diff_bases_changed(
3275 &mut self,
3276 buffer: text::BufferSnapshot,
3277 diff_bases_change: Option<DiffBasesChange>,
3278 cx: &mut Context<Self>,
3279 ) {
3280 match diff_bases_change {
3281 Some(DiffBasesChange::SetIndex(index)) => {
3282 self.index_text = index.map(|mut index| {
3283 text::LineEnding::normalize(&mut index);
3284 Arc::from(index.as_str())
3285 });
3286 self.index_changed = true;
3287 }
3288 Some(DiffBasesChange::SetHead(head)) => {
3289 self.head_text = head.map(|mut head| {
3290 text::LineEnding::normalize(&mut head);
3291 Arc::from(head.as_str())
3292 });
3293 self.head_changed = true;
3294 }
3295 Some(DiffBasesChange::SetBoth(text)) => {
3296 let text = text.map(|mut text| {
3297 text::LineEnding::normalize(&mut text);
3298 Arc::from(text.as_str())
3299 });
3300 self.head_text = text.clone();
3301 self.index_text = text;
3302 self.head_changed = true;
3303 self.index_changed = true;
3304 }
3305 Some(DiffBasesChange::SetEach { index, head }) => {
3306 self.index_text = index.map(|mut index| {
3307 text::LineEnding::normalize(&mut index);
3308 Arc::from(index.as_str())
3309 });
3310 self.index_changed = true;
3311 self.head_text = head.map(|mut head| {
3312 text::LineEnding::normalize(&mut head);
3313 Arc::from(head.as_str())
3314 });
3315 self.head_changed = true;
3316 }
3317 None => {}
3318 }
3319
3320 self.recalculate_diffs(buffer, cx)
3321 }
3322
3323 #[ztracing::instrument(skip_all)]
3324 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3325 *self.recalculating_tx.borrow_mut() = true;
3326
3327 let language = self.language.clone();
3328 let language_registry = self.language_registry.clone();
3329 let unstaged_diff = self.unstaged_diff();
3330 let uncommitted_diff = self.uncommitted_diff();
3331 let head = self.head_text.clone();
3332 let index = self.index_text.clone();
3333 let index_changed = self.index_changed;
3334 let head_changed = self.head_changed;
3335 let language_changed = self.language_changed;
3336 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3337 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3338 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3339 (None, None) => true,
3340 _ => false,
3341 };
3342
3343 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3344 .oid_diffs
3345 .iter()
3346 .filter_map(|(oid, weak)| {
3347 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3348 weak.upgrade().map(|diff| (*oid, diff, base_text))
3349 })
3350 .collect();
3351
3352 self.oid_diffs.retain(|oid, weak| {
3353 let alive = weak.upgrade().is_some();
3354 if !alive {
3355 if let Some(oid) = oid {
3356 self.oid_texts.remove(oid);
3357 }
3358 }
3359 alive
3360 });
3361 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3362 log::debug!(
3363 "start recalculating diffs for buffer {}",
3364 buffer.remote_id()
3365 );
3366
3367 let mut new_unstaged_diff = None;
3368 if let Some(unstaged_diff) = &unstaged_diff {
3369 new_unstaged_diff = Some(
3370 cx.update(|cx| {
3371 unstaged_diff.read(cx).update_diff(
3372 buffer.clone(),
3373 index,
3374 index_changed.then_some(false),
3375 language.clone(),
3376 cx,
3377 )
3378 })
3379 .await,
3380 );
3381 }
3382
3383 // Dropping BufferDiff can be expensive, so yield back to the event loop
3384 // for a bit
3385 yield_now().await;
3386
3387 let mut new_uncommitted_diff = None;
3388 if let Some(uncommitted_diff) = &uncommitted_diff {
3389 new_uncommitted_diff = if index_matches_head {
3390 new_unstaged_diff.clone()
3391 } else {
3392 Some(
3393 cx.update(|cx| {
3394 uncommitted_diff.read(cx).update_diff(
3395 buffer.clone(),
3396 head,
3397 head_changed.then_some(true),
3398 language.clone(),
3399 cx,
3400 )
3401 })
3402 .await,
3403 )
3404 }
3405 }
3406
3407 // Dropping BufferDiff can be expensive, so yield back to the event loop
3408 // for a bit
3409 yield_now().await;
3410
3411 let cancel = this.update(cx, |this, _| {
3412 // This checks whether all pending stage/unstage operations
3413 // have quiesced (i.e. both the corresponding write and the
3414 // read of that write have completed). If not, then we cancel
3415 // this recalculation attempt to avoid invalidating pending
3416 // state too quickly; another recalculation will come along
3417 // later and clear the pending state once the state of the index has settled.
3418 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3419 *this.recalculating_tx.borrow_mut() = false;
3420 true
3421 } else {
3422 false
3423 }
3424 })?;
3425 if cancel {
3426 log::debug!(
3427 concat!(
3428 "aborting recalculating diffs for buffer {}",
3429 "due to subsequent hunk operations",
3430 ),
3431 buffer.remote_id()
3432 );
3433 return Ok(());
3434 }
3435
3436 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3437 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3438 {
3439 let task = unstaged_diff.update(cx, |diff, cx| {
3440 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3441 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3442 // view multibuffers.
3443 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3444 });
3445 Some(task.await)
3446 } else {
3447 None
3448 };
3449
3450 yield_now().await;
3451
3452 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3453 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3454 {
3455 uncommitted_diff
3456 .update(cx, |diff, cx| {
3457 if language_changed {
3458 diff.language_changed(language.clone(), language_registry, cx);
3459 }
3460 diff.set_snapshot_with_secondary(
3461 new_uncommitted_diff,
3462 &buffer,
3463 unstaged_changed_range.flatten(),
3464 true,
3465 cx,
3466 )
3467 })
3468 .await;
3469 }
3470
3471 yield_now().await;
3472
3473 for (oid, oid_diff, base_text) in oid_diffs {
3474 let new_oid_diff = cx
3475 .update(|cx| {
3476 oid_diff.read(cx).update_diff(
3477 buffer.clone(),
3478 base_text,
3479 None,
3480 language.clone(),
3481 cx,
3482 )
3483 })
3484 .await;
3485
3486 oid_diff
3487 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3488 .await;
3489
3490 log::debug!(
3491 "finished recalculating oid diff for buffer {} oid {:?}",
3492 buffer.remote_id(),
3493 oid
3494 );
3495
3496 yield_now().await;
3497 }
3498
3499 log::debug!(
3500 "finished recalculating diffs for buffer {}",
3501 buffer.remote_id()
3502 );
3503
3504 if let Some(this) = this.upgrade() {
3505 this.update(cx, |this, _| {
3506 this.index_changed = false;
3507 this.head_changed = false;
3508 this.language_changed = false;
3509 *this.recalculating_tx.borrow_mut() = false;
3510 });
3511 }
3512
3513 Ok(())
3514 }));
3515 }
3516}
3517
3518fn make_remote_delegate(
3519 this: Entity<GitStore>,
3520 project_id: u64,
3521 repository_id: RepositoryId,
3522 askpass_id: u64,
3523 cx: &mut AsyncApp,
3524) -> AskPassDelegate {
3525 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3526 this.update(cx, |this, cx| {
3527 let Some((client, _)) = this.downstream_client() else {
3528 return;
3529 };
3530 let response = client.request(proto::AskPassRequest {
3531 project_id,
3532 repository_id: repository_id.to_proto(),
3533 askpass_id,
3534 prompt,
3535 });
3536 cx.spawn(async move |_, _| {
3537 let mut response = response.await?.response;
3538 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3539 .ok();
3540 response.zeroize();
3541 anyhow::Ok(())
3542 })
3543 .detach_and_log_err(cx);
3544 });
3545 })
3546}
3547
3548impl RepositoryId {
3549 pub fn to_proto(self) -> u64 {
3550 self.0
3551 }
3552
3553 pub fn from_proto(id: u64) -> Self {
3554 RepositoryId(id)
3555 }
3556}
3557
3558impl RepositorySnapshot {
3559 fn empty(
3560 id: RepositoryId,
3561 work_directory_abs_path: Arc<Path>,
3562 original_repo_abs_path: Option<Arc<Path>>,
3563 path_style: PathStyle,
3564 ) -> Self {
3565 Self {
3566 id,
3567 statuses_by_path: Default::default(),
3568 original_repo_abs_path: original_repo_abs_path
3569 .unwrap_or_else(|| work_directory_abs_path.clone()),
3570 work_directory_abs_path,
3571 branch: None,
3572 head_commit: None,
3573 scan_id: 0,
3574 merge: Default::default(),
3575 remote_origin_url: None,
3576 remote_upstream_url: None,
3577 stash_entries: Default::default(),
3578 path_style,
3579 }
3580 }
3581
3582 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3583 proto::UpdateRepository {
3584 branch_summary: self.branch.as_ref().map(branch_to_proto),
3585 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3586 updated_statuses: self
3587 .statuses_by_path
3588 .iter()
3589 .map(|entry| entry.to_proto())
3590 .collect(),
3591 removed_statuses: Default::default(),
3592 current_merge_conflicts: self
3593 .merge
3594 .merge_heads_by_conflicted_path
3595 .iter()
3596 .map(|(repo_path, _)| repo_path.to_proto())
3597 .collect(),
3598 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3599 project_id,
3600 id: self.id.to_proto(),
3601 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3602 entry_ids: vec![self.id.to_proto()],
3603 scan_id: self.scan_id,
3604 is_last_update: true,
3605 stash_entries: self
3606 .stash_entries
3607 .entries
3608 .iter()
3609 .map(stash_to_proto)
3610 .collect(),
3611 remote_upstream_url: self.remote_upstream_url.clone(),
3612 remote_origin_url: self.remote_origin_url.clone(),
3613 original_repo_abs_path: Some(
3614 self.original_repo_abs_path.to_string_lossy().into_owned(),
3615 ),
3616 }
3617 }
3618
3619 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3620 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3621 let mut removed_statuses: Vec<String> = Vec::new();
3622
3623 let mut new_statuses = self.statuses_by_path.iter().peekable();
3624 let mut old_statuses = old.statuses_by_path.iter().peekable();
3625
3626 let mut current_new_entry = new_statuses.next();
3627 let mut current_old_entry = old_statuses.next();
3628 loop {
3629 match (current_new_entry, current_old_entry) {
3630 (Some(new_entry), Some(old_entry)) => {
3631 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3632 Ordering::Less => {
3633 updated_statuses.push(new_entry.to_proto());
3634 current_new_entry = new_statuses.next();
3635 }
3636 Ordering::Equal => {
3637 if new_entry.status != old_entry.status
3638 || new_entry.diff_stat != old_entry.diff_stat
3639 {
3640 updated_statuses.push(new_entry.to_proto());
3641 }
3642 current_old_entry = old_statuses.next();
3643 current_new_entry = new_statuses.next();
3644 }
3645 Ordering::Greater => {
3646 removed_statuses.push(old_entry.repo_path.to_proto());
3647 current_old_entry = old_statuses.next();
3648 }
3649 }
3650 }
3651 (None, Some(old_entry)) => {
3652 removed_statuses.push(old_entry.repo_path.to_proto());
3653 current_old_entry = old_statuses.next();
3654 }
3655 (Some(new_entry), None) => {
3656 updated_statuses.push(new_entry.to_proto());
3657 current_new_entry = new_statuses.next();
3658 }
3659 (None, None) => break,
3660 }
3661 }
3662
3663 proto::UpdateRepository {
3664 branch_summary: self.branch.as_ref().map(branch_to_proto),
3665 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3666 updated_statuses,
3667 removed_statuses,
3668 current_merge_conflicts: self
3669 .merge
3670 .merge_heads_by_conflicted_path
3671 .iter()
3672 .map(|(path, _)| path.to_proto())
3673 .collect(),
3674 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3675 project_id,
3676 id: self.id.to_proto(),
3677 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3678 entry_ids: vec![],
3679 scan_id: self.scan_id,
3680 is_last_update: true,
3681 stash_entries: self
3682 .stash_entries
3683 .entries
3684 .iter()
3685 .map(stash_to_proto)
3686 .collect(),
3687 remote_upstream_url: self.remote_upstream_url.clone(),
3688 remote_origin_url: self.remote_origin_url.clone(),
3689 original_repo_abs_path: Some(
3690 self.original_repo_abs_path.to_string_lossy().into_owned(),
3691 ),
3692 }
3693 }
3694
3695 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3696 self.statuses_by_path.iter().cloned()
3697 }
3698
3699 pub fn status_summary(&self) -> GitSummary {
3700 self.statuses_by_path.summary().item_summary
3701 }
3702
3703 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3704 self.statuses_by_path
3705 .get(&PathKey(path.as_ref().clone()), ())
3706 .cloned()
3707 }
3708
3709 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3710 self.statuses_by_path
3711 .get(&PathKey(path.as_ref().clone()), ())
3712 .and_then(|entry| entry.diff_stat)
3713 }
3714
3715 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3716 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3717 }
3718
3719 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3720 self.path_style
3721 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3722 .unwrap()
3723 .into()
3724 }
3725
3726 #[inline]
3727 fn abs_path_to_repo_path_inner(
3728 work_directory_abs_path: &Path,
3729 abs_path: &Path,
3730 path_style: PathStyle,
3731 ) -> Option<RepoPath> {
3732 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3733 Some(RepoPath::from_rel_path(&rel_path))
3734 }
3735
3736 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3737 self.merge
3738 .merge_heads_by_conflicted_path
3739 .contains_key(repo_path)
3740 }
3741
3742 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3743 let had_conflict_on_last_merge_head_change = self
3744 .merge
3745 .merge_heads_by_conflicted_path
3746 .contains_key(repo_path);
3747 let has_conflict_currently = self
3748 .status_for_path(repo_path)
3749 .is_some_and(|entry| entry.status.is_conflicted());
3750 had_conflict_on_last_merge_head_change || has_conflict_currently
3751 }
3752
3753 /// This is the name that will be displayed in the repository selector for this repository.
3754 pub fn display_name(&self) -> SharedString {
3755 self.work_directory_abs_path
3756 .file_name()
3757 .unwrap_or_default()
3758 .to_string_lossy()
3759 .to_string()
3760 .into()
3761 }
3762}
3763
3764pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3765 proto::StashEntry {
3766 oid: entry.oid.as_bytes().to_vec(),
3767 message: entry.message.clone(),
3768 branch: entry.branch.clone(),
3769 index: entry.index as u64,
3770 timestamp: entry.timestamp,
3771 }
3772}
3773
3774pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3775 Ok(StashEntry {
3776 oid: Oid::from_bytes(&entry.oid)?,
3777 message: entry.message.clone(),
3778 index: entry.index as usize,
3779 branch: entry.branch.clone(),
3780 timestamp: entry.timestamp,
3781 })
3782}
3783
3784impl MergeDetails {
3785 async fn update(
3786 &mut self,
3787 backend: &Arc<dyn GitRepository>,
3788 current_conflicted_paths: Vec<RepoPath>,
3789 ) -> Result<bool> {
3790 log::debug!("load merge details");
3791 self.message = backend.merge_message().await.map(SharedString::from);
3792 let heads = backend
3793 .revparse_batch(vec![
3794 "MERGE_HEAD".into(),
3795 "CHERRY_PICK_HEAD".into(),
3796 "REBASE_HEAD".into(),
3797 "REVERT_HEAD".into(),
3798 "APPLY_HEAD".into(),
3799 ])
3800 .await
3801 .log_err()
3802 .unwrap_or_default()
3803 .into_iter()
3804 .map(|opt| opt.map(SharedString::from))
3805 .collect::<Vec<_>>();
3806
3807 let mut conflicts_changed = false;
3808
3809 // Record the merge state for newly conflicted paths
3810 for path in ¤t_conflicted_paths {
3811 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3812 conflicts_changed = true;
3813 self.merge_heads_by_conflicted_path
3814 .insert(path.clone(), heads.clone());
3815 }
3816 }
3817
3818 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3819 self.merge_heads_by_conflicted_path
3820 .retain(|path, old_merge_heads| {
3821 let keep = current_conflicted_paths.contains(path)
3822 || (old_merge_heads == &heads
3823 && old_merge_heads.iter().any(|head| head.is_some()));
3824 if !keep {
3825 conflicts_changed = true;
3826 }
3827 keep
3828 });
3829
3830 Ok(conflicts_changed)
3831 }
3832}
3833
3834impl Repository {
3835 pub fn is_trusted(&self) -> bool {
3836 match self.repository_state.peek() {
3837 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3838 _ => false,
3839 }
3840 }
3841
3842 pub fn snapshot(&self) -> RepositorySnapshot {
3843 self.snapshot.clone()
3844 }
3845
3846 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3847 self.pending_ops.iter().cloned()
3848 }
3849
3850 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3851 self.pending_ops.summary().clone()
3852 }
3853
3854 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3855 self.pending_ops
3856 .get(&PathKey(path.as_ref().clone()), ())
3857 .cloned()
3858 }
3859
3860 fn local(
3861 id: RepositoryId,
3862 work_directory_abs_path: Arc<Path>,
3863 original_repo_abs_path: Arc<Path>,
3864 dot_git_abs_path: Arc<Path>,
3865 project_environment: WeakEntity<ProjectEnvironment>,
3866 fs: Arc<dyn Fs>,
3867 is_trusted: bool,
3868 git_store: WeakEntity<GitStore>,
3869 cx: &mut Context<Self>,
3870 ) -> Self {
3871 let snapshot = RepositorySnapshot::empty(
3872 id,
3873 work_directory_abs_path.clone(),
3874 Some(original_repo_abs_path),
3875 PathStyle::local(),
3876 );
3877 let state = cx
3878 .spawn(async move |_, cx| {
3879 LocalRepositoryState::new(
3880 work_directory_abs_path,
3881 dot_git_abs_path,
3882 project_environment,
3883 fs,
3884 is_trusted,
3885 cx,
3886 )
3887 .await
3888 .map_err(|err| err.to_string())
3889 })
3890 .shared();
3891 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3892 let state = cx
3893 .spawn(async move |_, _| {
3894 let state = state.await?;
3895 Ok(RepositoryState::Local(state))
3896 })
3897 .shared();
3898
3899 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3900 RepositoryEvent::BranchChanged => {
3901 if this.scan_id > 1 {
3902 this.initial_graph_data.clear();
3903 }
3904 }
3905 _ => {}
3906 })
3907 .detach();
3908
3909 Repository {
3910 this: cx.weak_entity(),
3911 git_store,
3912 snapshot,
3913 pending_ops: Default::default(),
3914 repository_state: state,
3915 commit_message_buffer: None,
3916 askpass_delegates: Default::default(),
3917 paths_needing_status_update: Default::default(),
3918 latest_askpass_id: 0,
3919 job_sender,
3920 job_id: 0,
3921 active_jobs: Default::default(),
3922 initial_graph_data: Default::default(),
3923 commit_data: Default::default(),
3924 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3925 }
3926 }
3927
3928 fn remote(
3929 id: RepositoryId,
3930 work_directory_abs_path: Arc<Path>,
3931 original_repo_abs_path: Option<Arc<Path>>,
3932 path_style: PathStyle,
3933 project_id: ProjectId,
3934 client: AnyProtoClient,
3935 git_store: WeakEntity<GitStore>,
3936 cx: &mut Context<Self>,
3937 ) -> Self {
3938 let snapshot = RepositorySnapshot::empty(
3939 id,
3940 work_directory_abs_path,
3941 original_repo_abs_path,
3942 path_style,
3943 );
3944 let repository_state = RemoteRepositoryState { project_id, client };
3945 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3946 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3947 Self {
3948 this: cx.weak_entity(),
3949 snapshot,
3950 commit_message_buffer: None,
3951 git_store,
3952 pending_ops: Default::default(),
3953 paths_needing_status_update: Default::default(),
3954 job_sender,
3955 repository_state,
3956 askpass_delegates: Default::default(),
3957 latest_askpass_id: 0,
3958 active_jobs: Default::default(),
3959 job_id: 0,
3960 initial_graph_data: Default::default(),
3961 commit_data: Default::default(),
3962 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3963 }
3964 }
3965
3966 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3967 self.git_store.upgrade()
3968 }
3969
3970 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3971 let this = cx.weak_entity();
3972 let git_store = self.git_store.clone();
3973 let _ = self.send_keyed_job(
3974 Some(GitJobKey::ReloadBufferDiffBases),
3975 None,
3976 |state, mut cx| async move {
3977 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3978 log::error!("tried to recompute diffs for a non-local repository");
3979 return Ok(());
3980 };
3981
3982 let Some(this) = this.upgrade() else {
3983 return Ok(());
3984 };
3985
3986 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3987 git_store.update(cx, |git_store, cx| {
3988 git_store
3989 .diffs
3990 .iter()
3991 .filter_map(|(buffer_id, diff_state)| {
3992 let buffer_store = git_store.buffer_store.read(cx);
3993 let buffer = buffer_store.get(*buffer_id)?;
3994 let file = File::from_dyn(buffer.read(cx).file())?;
3995 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3996 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3997 log::debug!(
3998 "start reload diff bases for repo path {}",
3999 repo_path.as_unix_str()
4000 );
4001 diff_state.update(cx, |diff_state, _| {
4002 let has_unstaged_diff = diff_state
4003 .unstaged_diff
4004 .as_ref()
4005 .is_some_and(|diff| diff.is_upgradable());
4006 let has_uncommitted_diff = diff_state
4007 .uncommitted_diff
4008 .as_ref()
4009 .is_some_and(|set| set.is_upgradable());
4010
4011 Some((
4012 buffer,
4013 repo_path,
4014 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4015 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4016 ))
4017 })
4018 })
4019 .collect::<Vec<_>>()
4020 })
4021 })?;
4022
4023 let buffer_diff_base_changes = cx
4024 .background_spawn(async move {
4025 let mut changes = Vec::new();
4026 for (buffer, repo_path, current_index_text, current_head_text) in
4027 &repo_diff_state_updates
4028 {
4029 let index_text = if current_index_text.is_some() {
4030 backend.load_index_text(repo_path.clone()).await
4031 } else {
4032 None
4033 };
4034 let head_text = if current_head_text.is_some() {
4035 backend.load_committed_text(repo_path.clone()).await
4036 } else {
4037 None
4038 };
4039
4040 let change =
4041 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4042 (Some(current_index), Some(current_head)) => {
4043 let index_changed =
4044 index_text.as_deref() != current_index.as_deref();
4045 let head_changed =
4046 head_text.as_deref() != current_head.as_deref();
4047 if index_changed && head_changed {
4048 if index_text == head_text {
4049 Some(DiffBasesChange::SetBoth(head_text))
4050 } else {
4051 Some(DiffBasesChange::SetEach {
4052 index: index_text,
4053 head: head_text,
4054 })
4055 }
4056 } else if index_changed {
4057 Some(DiffBasesChange::SetIndex(index_text))
4058 } else if head_changed {
4059 Some(DiffBasesChange::SetHead(head_text))
4060 } else {
4061 None
4062 }
4063 }
4064 (Some(current_index), None) => {
4065 let index_changed =
4066 index_text.as_deref() != current_index.as_deref();
4067 index_changed
4068 .then_some(DiffBasesChange::SetIndex(index_text))
4069 }
4070 (None, Some(current_head)) => {
4071 let head_changed =
4072 head_text.as_deref() != current_head.as_deref();
4073 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4074 }
4075 (None, None) => None,
4076 };
4077
4078 changes.push((buffer.clone(), change))
4079 }
4080 changes
4081 })
4082 .await;
4083
4084 git_store.update(&mut cx, |git_store, cx| {
4085 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4086 let buffer_snapshot = buffer.read(cx).text_snapshot();
4087 let buffer_id = buffer_snapshot.remote_id();
4088 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4089 continue;
4090 };
4091
4092 let downstream_client = git_store.downstream_client();
4093 diff_state.update(cx, |diff_state, cx| {
4094 use proto::update_diff_bases::Mode;
4095
4096 if let Some((diff_bases_change, (client, project_id))) =
4097 diff_bases_change.clone().zip(downstream_client)
4098 {
4099 let (staged_text, committed_text, mode) = match diff_bases_change {
4100 DiffBasesChange::SetIndex(index) => {
4101 (index, None, Mode::IndexOnly)
4102 }
4103 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4104 DiffBasesChange::SetEach { index, head } => {
4105 (index, head, Mode::IndexAndHead)
4106 }
4107 DiffBasesChange::SetBoth(text) => {
4108 (None, text, Mode::IndexMatchesHead)
4109 }
4110 };
4111 client
4112 .send(proto::UpdateDiffBases {
4113 project_id: project_id.to_proto(),
4114 buffer_id: buffer_id.to_proto(),
4115 staged_text,
4116 committed_text,
4117 mode: mode as i32,
4118 })
4119 .log_err();
4120 }
4121
4122 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4123 });
4124 }
4125 })
4126 },
4127 );
4128 }
4129
4130 pub fn send_job<F, Fut, R>(
4131 &mut self,
4132 status: Option<SharedString>,
4133 job: F,
4134 ) -> oneshot::Receiver<R>
4135 where
4136 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4137 Fut: Future<Output = R> + 'static,
4138 R: Send + 'static,
4139 {
4140 self.send_keyed_job(None, status, job)
4141 }
4142
4143 fn send_keyed_job<F, Fut, R>(
4144 &mut self,
4145 key: Option<GitJobKey>,
4146 status: Option<SharedString>,
4147 job: F,
4148 ) -> oneshot::Receiver<R>
4149 where
4150 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4151 Fut: Future<Output = R> + 'static,
4152 R: Send + 'static,
4153 {
4154 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4155 let job_id = post_inc(&mut self.job_id);
4156 let this = self.this.clone();
4157 self.job_sender
4158 .unbounded_send(GitJob {
4159 key,
4160 job: Box::new(move |state, cx: &mut AsyncApp| {
4161 let job = job(state, cx.clone());
4162 cx.spawn(async move |cx| {
4163 if let Some(s) = status.clone() {
4164 this.update(cx, |this, cx| {
4165 this.active_jobs.insert(
4166 job_id,
4167 JobInfo {
4168 start: Instant::now(),
4169 message: s.clone(),
4170 },
4171 );
4172
4173 cx.notify();
4174 })
4175 .ok();
4176 }
4177 let result = job.await;
4178
4179 this.update(cx, |this, cx| {
4180 this.active_jobs.remove(&job_id);
4181 cx.notify();
4182 })
4183 .ok();
4184
4185 result_tx.send(result).ok();
4186 })
4187 }),
4188 })
4189 .ok();
4190 result_rx
4191 }
4192
4193 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4194 let Some(git_store) = self.git_store.upgrade() else {
4195 return;
4196 };
4197 let entity = cx.entity();
4198 git_store.update(cx, |git_store, cx| {
4199 let Some((&id, _)) = git_store
4200 .repositories
4201 .iter()
4202 .find(|(_, handle)| *handle == &entity)
4203 else {
4204 return;
4205 };
4206 git_store.active_repo_id = Some(id);
4207 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4208 });
4209 }
4210
4211 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4212 self.snapshot.status()
4213 }
4214
4215 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4216 self.snapshot.diff_stat_for_path(path)
4217 }
4218
4219 pub fn cached_stash(&self) -> GitStash {
4220 self.snapshot.stash_entries.clone()
4221 }
4222
4223 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4224 let git_store = self.git_store.upgrade()?;
4225 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4226 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4227 let abs_path = SanitizedPath::new(&abs_path);
4228 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4229 Some(ProjectPath {
4230 worktree_id: worktree.read(cx).id(),
4231 path: relative_path,
4232 })
4233 }
4234
4235 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4236 let git_store = self.git_store.upgrade()?;
4237 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4238 let abs_path = worktree_store.absolutize(path, cx)?;
4239 self.snapshot.abs_path_to_repo_path(&abs_path)
4240 }
4241
4242 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4243 other
4244 .read(cx)
4245 .snapshot
4246 .work_directory_abs_path
4247 .starts_with(&self.snapshot.work_directory_abs_path)
4248 }
4249
4250 pub fn open_commit_buffer(
4251 &mut self,
4252 languages: Option<Arc<LanguageRegistry>>,
4253 buffer_store: Entity<BufferStore>,
4254 cx: &mut Context<Self>,
4255 ) -> Task<Result<Entity<Buffer>>> {
4256 let id = self.id;
4257 if let Some(buffer) = self.commit_message_buffer.clone() {
4258 return Task::ready(Ok(buffer));
4259 }
4260 let this = cx.weak_entity();
4261
4262 let rx = self.send_job(None, move |state, mut cx| async move {
4263 let Some(this) = this.upgrade() else {
4264 bail!("git store was dropped");
4265 };
4266 match state {
4267 RepositoryState::Local(..) => {
4268 this.update(&mut cx, |_, cx| {
4269 Self::open_local_commit_buffer(languages, buffer_store, cx)
4270 })
4271 .await
4272 }
4273 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4274 let request = client.request(proto::OpenCommitMessageBuffer {
4275 project_id: project_id.0,
4276 repository_id: id.to_proto(),
4277 });
4278 let response = request.await.context("requesting to open commit buffer")?;
4279 let buffer_id = BufferId::new(response.buffer_id)?;
4280 let buffer = buffer_store
4281 .update(&mut cx, |buffer_store, cx| {
4282 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4283 })
4284 .await?;
4285 if let Some(language_registry) = languages {
4286 let git_commit_language =
4287 language_registry.language_for_name("Git Commit").await?;
4288 buffer.update(&mut cx, |buffer, cx| {
4289 buffer.set_language(Some(git_commit_language), cx);
4290 });
4291 }
4292 this.update(&mut cx, |this, _| {
4293 this.commit_message_buffer = Some(buffer.clone());
4294 });
4295 Ok(buffer)
4296 }
4297 }
4298 });
4299
4300 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4301 }
4302
4303 fn open_local_commit_buffer(
4304 language_registry: Option<Arc<LanguageRegistry>>,
4305 buffer_store: Entity<BufferStore>,
4306 cx: &mut Context<Self>,
4307 ) -> Task<Result<Entity<Buffer>>> {
4308 cx.spawn(async move |repository, cx| {
4309 let git_commit_language = match language_registry {
4310 Some(language_registry) => {
4311 Some(language_registry.language_for_name("Git Commit").await?)
4312 }
4313 None => None,
4314 };
4315 let buffer = buffer_store
4316 .update(cx, |buffer_store, cx| {
4317 buffer_store.create_buffer(git_commit_language, false, cx)
4318 })
4319 .await?;
4320
4321 repository.update(cx, |repository, _| {
4322 repository.commit_message_buffer = Some(buffer.clone());
4323 })?;
4324 Ok(buffer)
4325 })
4326 }
4327
4328 pub fn checkout_files(
4329 &mut self,
4330 commit: &str,
4331 paths: Vec<RepoPath>,
4332 cx: &mut Context<Self>,
4333 ) -> Task<Result<()>> {
4334 let commit = commit.to_string();
4335 let id = self.id;
4336
4337 self.spawn_job_with_tracking(
4338 paths.clone(),
4339 pending_op::GitStatus::Reverted,
4340 cx,
4341 async move |this, cx| {
4342 this.update(cx, |this, _cx| {
4343 this.send_job(
4344 Some(format!("git checkout {}", commit).into()),
4345 move |git_repo, _| async move {
4346 match git_repo {
4347 RepositoryState::Local(LocalRepositoryState {
4348 backend,
4349 environment,
4350 ..
4351 }) => {
4352 backend
4353 .checkout_files(commit, paths, environment.clone())
4354 .await
4355 }
4356 RepositoryState::Remote(RemoteRepositoryState {
4357 project_id,
4358 client,
4359 }) => {
4360 client
4361 .request(proto::GitCheckoutFiles {
4362 project_id: project_id.0,
4363 repository_id: id.to_proto(),
4364 commit,
4365 paths: paths
4366 .into_iter()
4367 .map(|p| p.to_proto())
4368 .collect(),
4369 })
4370 .await?;
4371
4372 Ok(())
4373 }
4374 }
4375 },
4376 )
4377 })?
4378 .await?
4379 },
4380 )
4381 }
4382
4383 pub fn reset(
4384 &mut self,
4385 commit: String,
4386 reset_mode: ResetMode,
4387 _cx: &mut App,
4388 ) -> oneshot::Receiver<Result<()>> {
4389 let id = self.id;
4390
4391 self.send_job(None, move |git_repo, _| async move {
4392 match git_repo {
4393 RepositoryState::Local(LocalRepositoryState {
4394 backend,
4395 environment,
4396 ..
4397 }) => backend.reset(commit, reset_mode, environment).await,
4398 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4399 client
4400 .request(proto::GitReset {
4401 project_id: project_id.0,
4402 repository_id: id.to_proto(),
4403 commit,
4404 mode: match reset_mode {
4405 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4406 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4407 },
4408 })
4409 .await?;
4410
4411 Ok(())
4412 }
4413 }
4414 })
4415 }
4416
4417 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4418 let id = self.id;
4419 self.send_job(None, move |git_repo, _cx| async move {
4420 match git_repo {
4421 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4422 backend.show(commit).await
4423 }
4424 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4425 let resp = client
4426 .request(proto::GitShow {
4427 project_id: project_id.0,
4428 repository_id: id.to_proto(),
4429 commit,
4430 })
4431 .await?;
4432
4433 Ok(CommitDetails {
4434 sha: resp.sha.into(),
4435 message: resp.message.into(),
4436 commit_timestamp: resp.commit_timestamp,
4437 author_email: resp.author_email.into(),
4438 author_name: resp.author_name.into(),
4439 })
4440 }
4441 }
4442 })
4443 }
4444
4445 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4446 let id = self.id;
4447 self.send_job(None, move |git_repo, cx| async move {
4448 match git_repo {
4449 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4450 backend.load_commit(commit, cx).await
4451 }
4452 RepositoryState::Remote(RemoteRepositoryState {
4453 client, project_id, ..
4454 }) => {
4455 let response = client
4456 .request(proto::LoadCommitDiff {
4457 project_id: project_id.0,
4458 repository_id: id.to_proto(),
4459 commit,
4460 })
4461 .await?;
4462 Ok(CommitDiff {
4463 files: response
4464 .files
4465 .into_iter()
4466 .map(|file| {
4467 Ok(CommitFile {
4468 path: RepoPath::from_proto(&file.path)?,
4469 old_text: file.old_text,
4470 new_text: file.new_text,
4471 is_binary: file.is_binary,
4472 })
4473 })
4474 .collect::<Result<Vec<_>>>()?,
4475 })
4476 }
4477 }
4478 })
4479 }
4480
4481 pub fn file_history(
4482 &mut self,
4483 path: RepoPath,
4484 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4485 self.file_history_paginated(path, 0, None)
4486 }
4487
4488 pub fn file_history_paginated(
4489 &mut self,
4490 path: RepoPath,
4491 skip: usize,
4492 limit: Option<usize>,
4493 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4494 let id = self.id;
4495 self.send_job(None, move |git_repo, _cx| async move {
4496 match git_repo {
4497 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4498 backend.file_history_paginated(path, skip, limit).await
4499 }
4500 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4501 let response = client
4502 .request(proto::GitFileHistory {
4503 project_id: project_id.0,
4504 repository_id: id.to_proto(),
4505 path: path.to_proto(),
4506 skip: skip as u64,
4507 limit: limit.map(|l| l as u64),
4508 })
4509 .await?;
4510 Ok(git::repository::FileHistory {
4511 entries: response
4512 .entries
4513 .into_iter()
4514 .map(|entry| git::repository::FileHistoryEntry {
4515 sha: entry.sha.into(),
4516 subject: entry.subject.into(),
4517 message: entry.message.into(),
4518 commit_timestamp: entry.commit_timestamp,
4519 author_name: entry.author_name.into(),
4520 author_email: entry.author_email.into(),
4521 })
4522 .collect(),
4523 path: RepoPath::from_proto(&response.path)?,
4524 })
4525 }
4526 }
4527 })
4528 }
4529
4530 pub fn get_graph_data(
4531 &self,
4532 log_source: LogSource,
4533 log_order: LogOrder,
4534 ) -> Option<&InitialGitGraphData> {
4535 self.initial_graph_data.get(&(log_source, log_order))
4536 }
4537
4538 pub fn graph_data(
4539 &mut self,
4540 log_source: LogSource,
4541 log_order: LogOrder,
4542 range: Range<usize>,
4543 cx: &mut Context<Self>,
4544 ) -> GraphDataResponse<'_> {
4545 let initial_commit_data = self
4546 .initial_graph_data
4547 .entry((log_source.clone(), log_order))
4548 .or_insert_with(|| {
4549 let state = self.repository_state.clone();
4550 let log_source = log_source.clone();
4551
4552 let fetch_task = cx.spawn(async move |repository, cx| {
4553 let state = state.await;
4554 let result = match state {
4555 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4556 Self::local_git_graph_data(
4557 repository.clone(),
4558 backend,
4559 log_source.clone(),
4560 log_order,
4561 cx,
4562 )
4563 .await
4564 }
4565 Ok(RepositoryState::Remote(_)) => {
4566 Err("Git graph is not supported for collab yet".into())
4567 }
4568 Err(e) => Err(SharedString::from(e)),
4569 };
4570
4571 if let Err(fetch_task_error) = result {
4572 repository
4573 .update(cx, |repository, _| {
4574 if let Some(data) = repository
4575 .initial_graph_data
4576 .get_mut(&(log_source, log_order))
4577 {
4578 data.error = Some(fetch_task_error);
4579 } else {
4580 debug_panic!(
4581 "This task would be dropped if this entry doesn't exist"
4582 );
4583 }
4584 })
4585 .ok();
4586 }
4587 });
4588
4589 InitialGitGraphData {
4590 fetch_task,
4591 error: None,
4592 commit_data: Vec::new(),
4593 commit_oid_to_index: HashMap::default(),
4594 }
4595 });
4596
4597 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4598 let max_end = initial_commit_data.commit_data.len();
4599
4600 GraphDataResponse {
4601 commits: &initial_commit_data.commit_data
4602 [range.start.min(max_start)..range.end.min(max_end)],
4603 is_loading: !initial_commit_data.fetch_task.is_ready(),
4604 error: initial_commit_data.error.clone(),
4605 }
4606 }
4607
4608 async fn local_git_graph_data(
4609 this: WeakEntity<Self>,
4610 backend: Arc<dyn GitRepository>,
4611 log_source: LogSource,
4612 log_order: LogOrder,
4613 cx: &mut AsyncApp,
4614 ) -> Result<(), SharedString> {
4615 let (request_tx, request_rx) =
4616 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4617
4618 let task = cx.background_executor().spawn({
4619 let log_source = log_source.clone();
4620 async move {
4621 backend
4622 .initial_graph_data(log_source, log_order, request_tx)
4623 .await
4624 .map_err(|err| SharedString::from(err.to_string()))
4625 }
4626 });
4627
4628 let graph_data_key = (log_source, log_order);
4629
4630 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4631 this.update(cx, |repository, cx| {
4632 let graph_data = repository
4633 .initial_graph_data
4634 .entry(graph_data_key.clone())
4635 .and_modify(|graph_data| {
4636 for commit_data in initial_graph_commit_data {
4637 graph_data
4638 .commit_oid_to_index
4639 .insert(commit_data.sha, graph_data.commit_data.len());
4640 graph_data.commit_data.push(commit_data);
4641
4642 cx.emit(RepositoryEvent::GraphEvent(
4643 graph_data_key.clone(),
4644 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4645 ));
4646 }
4647 });
4648
4649 match &graph_data {
4650 Entry::Occupied(_) => {}
4651 Entry::Vacant(_) => {
4652 debug_panic!("This task should be dropped if data doesn't exist");
4653 }
4654 }
4655 })
4656 .ok();
4657 }
4658
4659 task.await?;
4660 Ok(())
4661 }
4662
4663 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4664 if !self.commit_data.contains_key(&sha) {
4665 match &self.graph_commit_data_handler {
4666 GraphCommitHandlerState::Open(handler) => {
4667 if handler.commit_data_request.try_send(sha).is_ok() {
4668 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4669 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4670 }
4671 }
4672 GraphCommitHandlerState::Closed => {
4673 self.open_graph_commit_data_handler(cx);
4674 }
4675 GraphCommitHandlerState::Starting => {}
4676 }
4677 }
4678
4679 self.commit_data
4680 .get(&sha)
4681 .unwrap_or(&CommitDataState::Loading)
4682 }
4683
4684 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4685 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4686
4687 let state = self.repository_state.clone();
4688 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4689 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4690
4691 let foreground_task = cx.spawn(async move |this, cx| {
4692 while let Ok((sha, commit_data)) = result_rx.recv().await {
4693 let result = this.update(cx, |this, cx| {
4694 let old_value = this
4695 .commit_data
4696 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4697 debug_assert!(
4698 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4699 "We should never overwrite commit data"
4700 );
4701
4702 cx.notify();
4703 });
4704 if result.is_err() {
4705 break;
4706 }
4707 }
4708
4709 this.update(cx, |this, _cx| {
4710 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4711 })
4712 .ok();
4713 });
4714
4715 let request_tx_for_handler = request_tx;
4716 let background_executor = cx.background_executor().clone();
4717
4718 cx.background_spawn(async move {
4719 let backend = match state.await {
4720 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4721 Ok(RepositoryState::Remote(_)) => {
4722 log::error!("commit_data_reader not supported for remote repositories");
4723 return;
4724 }
4725 Err(error) => {
4726 log::error!("failed to get repository state: {error}");
4727 return;
4728 }
4729 };
4730
4731 let reader = match backend.commit_data_reader() {
4732 Ok(reader) => reader,
4733 Err(error) => {
4734 log::error!("failed to create commit data reader: {error:?}");
4735 return;
4736 }
4737 };
4738
4739 loop {
4740 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4741
4742 futures::select_biased! {
4743 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4744 let Ok(sha) = sha else {
4745 break;
4746 };
4747
4748 match reader.read(sha).await {
4749 Ok(commit_data) => {
4750 if result_tx.send((sha, commit_data)).await.is_err() {
4751 break;
4752 }
4753 }
4754 Err(error) => {
4755 log::error!("failed to read commit data for {sha}: {error:?}");
4756 }
4757 }
4758 }
4759 _ = futures::FutureExt::fuse(timeout) => {
4760 break;
4761 }
4762 }
4763 }
4764
4765 drop(result_tx);
4766 })
4767 .detach();
4768
4769 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4770 _task: foreground_task,
4771 commit_data_request: request_tx_for_handler,
4772 });
4773 }
4774
4775 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4776 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4777 }
4778
4779 fn save_buffers<'a>(
4780 &self,
4781 entries: impl IntoIterator<Item = &'a RepoPath>,
4782 cx: &mut Context<Self>,
4783 ) -> Vec<Task<anyhow::Result<()>>> {
4784 let mut save_futures = Vec::new();
4785 if let Some(buffer_store) = self.buffer_store(cx) {
4786 buffer_store.update(cx, |buffer_store, cx| {
4787 for path in entries {
4788 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4789 continue;
4790 };
4791 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4792 && buffer
4793 .read(cx)
4794 .file()
4795 .is_some_and(|file| file.disk_state().exists())
4796 && buffer.read(cx).has_unsaved_edits()
4797 {
4798 save_futures.push(buffer_store.save_buffer(buffer, cx));
4799 }
4800 }
4801 })
4802 }
4803 save_futures
4804 }
4805
4806 pub fn stage_entries(
4807 &mut self,
4808 entries: Vec<RepoPath>,
4809 cx: &mut Context<Self>,
4810 ) -> Task<anyhow::Result<()>> {
4811 self.stage_or_unstage_entries(true, entries, cx)
4812 }
4813
4814 pub fn unstage_entries(
4815 &mut self,
4816 entries: Vec<RepoPath>,
4817 cx: &mut Context<Self>,
4818 ) -> Task<anyhow::Result<()>> {
4819 self.stage_or_unstage_entries(false, entries, cx)
4820 }
4821
4822 fn stage_or_unstage_entries(
4823 &mut self,
4824 stage: bool,
4825 entries: Vec<RepoPath>,
4826 cx: &mut Context<Self>,
4827 ) -> Task<anyhow::Result<()>> {
4828 if entries.is_empty() {
4829 return Task::ready(Ok(()));
4830 }
4831 let Some(git_store) = self.git_store.upgrade() else {
4832 return Task::ready(Ok(()));
4833 };
4834 let id = self.id;
4835 let save_tasks = self.save_buffers(&entries, cx);
4836 let paths = entries
4837 .iter()
4838 .map(|p| p.as_unix_str())
4839 .collect::<Vec<_>>()
4840 .join(" ");
4841 let status = if stage {
4842 format!("git add {paths}")
4843 } else {
4844 format!("git reset {paths}")
4845 };
4846 let job_key = GitJobKey::WriteIndex(entries.clone());
4847
4848 self.spawn_job_with_tracking(
4849 entries.clone(),
4850 if stage {
4851 pending_op::GitStatus::Staged
4852 } else {
4853 pending_op::GitStatus::Unstaged
4854 },
4855 cx,
4856 async move |this, cx| {
4857 for save_task in save_tasks {
4858 save_task.await?;
4859 }
4860
4861 this.update(cx, |this, cx| {
4862 let weak_this = cx.weak_entity();
4863 this.send_keyed_job(
4864 Some(job_key),
4865 Some(status.into()),
4866 move |git_repo, mut cx| async move {
4867 let hunk_staging_operation_counts = weak_this
4868 .update(&mut cx, |this, cx| {
4869 let mut hunk_staging_operation_counts = HashMap::default();
4870 for path in &entries {
4871 let Some(project_path) =
4872 this.repo_path_to_project_path(path, cx)
4873 else {
4874 continue;
4875 };
4876 let Some(buffer) = git_store
4877 .read(cx)
4878 .buffer_store
4879 .read(cx)
4880 .get_by_path(&project_path)
4881 else {
4882 continue;
4883 };
4884 let Some(diff_state) = git_store
4885 .read(cx)
4886 .diffs
4887 .get(&buffer.read(cx).remote_id())
4888 .cloned()
4889 else {
4890 continue;
4891 };
4892 let Some(uncommitted_diff) =
4893 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4894 |uncommitted_diff| uncommitted_diff.upgrade(),
4895 )
4896 else {
4897 continue;
4898 };
4899 let buffer_snapshot = buffer.read(cx).text_snapshot();
4900 let file_exists = buffer
4901 .read(cx)
4902 .file()
4903 .is_some_and(|file| file.disk_state().exists());
4904 let hunk_staging_operation_count =
4905 diff_state.update(cx, |diff_state, cx| {
4906 uncommitted_diff.update(
4907 cx,
4908 |uncommitted_diff, cx| {
4909 uncommitted_diff
4910 .stage_or_unstage_all_hunks(
4911 stage,
4912 &buffer_snapshot,
4913 file_exists,
4914 cx,
4915 );
4916 },
4917 );
4918
4919 diff_state.hunk_staging_operation_count += 1;
4920 diff_state.hunk_staging_operation_count
4921 });
4922 hunk_staging_operation_counts.insert(
4923 diff_state.downgrade(),
4924 hunk_staging_operation_count,
4925 );
4926 }
4927 hunk_staging_operation_counts
4928 })
4929 .unwrap_or_default();
4930
4931 let result = match git_repo {
4932 RepositoryState::Local(LocalRepositoryState {
4933 backend,
4934 environment,
4935 ..
4936 }) => {
4937 if stage {
4938 backend.stage_paths(entries, environment.clone()).await
4939 } else {
4940 backend.unstage_paths(entries, environment.clone()).await
4941 }
4942 }
4943 RepositoryState::Remote(RemoteRepositoryState {
4944 project_id,
4945 client,
4946 }) => {
4947 if stage {
4948 client
4949 .request(proto::Stage {
4950 project_id: project_id.0,
4951 repository_id: id.to_proto(),
4952 paths: entries
4953 .into_iter()
4954 .map(|repo_path| repo_path.to_proto())
4955 .collect(),
4956 })
4957 .await
4958 .context("sending stage request")
4959 .map(|_| ())
4960 } else {
4961 client
4962 .request(proto::Unstage {
4963 project_id: project_id.0,
4964 repository_id: id.to_proto(),
4965 paths: entries
4966 .into_iter()
4967 .map(|repo_path| repo_path.to_proto())
4968 .collect(),
4969 })
4970 .await
4971 .context("sending unstage request")
4972 .map(|_| ())
4973 }
4974 }
4975 };
4976
4977 for (diff_state, hunk_staging_operation_count) in
4978 hunk_staging_operation_counts
4979 {
4980 diff_state
4981 .update(&mut cx, |diff_state, cx| {
4982 if result.is_ok() {
4983 diff_state.hunk_staging_operation_count_as_of_write =
4984 hunk_staging_operation_count;
4985 } else if let Some(uncommitted_diff) =
4986 &diff_state.uncommitted_diff
4987 {
4988 uncommitted_diff
4989 .update(cx, |uncommitted_diff, cx| {
4990 uncommitted_diff.clear_pending_hunks(cx);
4991 })
4992 .ok();
4993 }
4994 })
4995 .ok();
4996 }
4997
4998 result
4999 },
5000 )
5001 })?
5002 .await?
5003 },
5004 )
5005 }
5006
5007 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5008 let to_stage = self
5009 .cached_status()
5010 .filter_map(|entry| {
5011 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
5012 if ops.staging() || ops.staged() {
5013 None
5014 } else {
5015 Some(entry.repo_path)
5016 }
5017 } else if entry.status.staging().is_fully_staged() {
5018 None
5019 } else {
5020 Some(entry.repo_path)
5021 }
5022 })
5023 .collect();
5024 self.stage_or_unstage_entries(true, to_stage, cx)
5025 }
5026
5027 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5028 let to_unstage = self
5029 .cached_status()
5030 .filter_map(|entry| {
5031 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
5032 if !ops.staging() && !ops.staged() {
5033 None
5034 } else {
5035 Some(entry.repo_path)
5036 }
5037 } else if entry.status.staging().is_fully_unstaged() {
5038 None
5039 } else {
5040 Some(entry.repo_path)
5041 }
5042 })
5043 .collect();
5044 self.stage_or_unstage_entries(false, to_unstage, cx)
5045 }
5046
5047 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5048 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5049
5050 self.stash_entries(to_stash, cx)
5051 }
5052
5053 pub fn stash_entries(
5054 &mut self,
5055 entries: Vec<RepoPath>,
5056 cx: &mut Context<Self>,
5057 ) -> Task<anyhow::Result<()>> {
5058 let id = self.id;
5059
5060 cx.spawn(async move |this, cx| {
5061 this.update(cx, |this, _| {
5062 this.send_job(None, move |git_repo, _cx| async move {
5063 match git_repo {
5064 RepositoryState::Local(LocalRepositoryState {
5065 backend,
5066 environment,
5067 ..
5068 }) => backend.stash_paths(entries, environment).await,
5069 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5070 client
5071 .request(proto::Stash {
5072 project_id: project_id.0,
5073 repository_id: id.to_proto(),
5074 paths: entries
5075 .into_iter()
5076 .map(|repo_path| repo_path.to_proto())
5077 .collect(),
5078 })
5079 .await?;
5080 Ok(())
5081 }
5082 }
5083 })
5084 })?
5085 .await??;
5086 Ok(())
5087 })
5088 }
5089
5090 pub fn stash_pop(
5091 &mut self,
5092 index: Option<usize>,
5093 cx: &mut Context<Self>,
5094 ) -> Task<anyhow::Result<()>> {
5095 let id = self.id;
5096 cx.spawn(async move |this, cx| {
5097 this.update(cx, |this, _| {
5098 this.send_job(None, move |git_repo, _cx| async move {
5099 match git_repo {
5100 RepositoryState::Local(LocalRepositoryState {
5101 backend,
5102 environment,
5103 ..
5104 }) => backend.stash_pop(index, environment).await,
5105 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5106 client
5107 .request(proto::StashPop {
5108 project_id: project_id.0,
5109 repository_id: id.to_proto(),
5110 stash_index: index.map(|i| i as u64),
5111 })
5112 .await
5113 .context("sending stash pop request")?;
5114 Ok(())
5115 }
5116 }
5117 })
5118 })?
5119 .await??;
5120 Ok(())
5121 })
5122 }
5123
5124 pub fn stash_apply(
5125 &mut self,
5126 index: Option<usize>,
5127 cx: &mut Context<Self>,
5128 ) -> Task<anyhow::Result<()>> {
5129 let id = self.id;
5130 cx.spawn(async move |this, cx| {
5131 this.update(cx, |this, _| {
5132 this.send_job(None, move |git_repo, _cx| async move {
5133 match git_repo {
5134 RepositoryState::Local(LocalRepositoryState {
5135 backend,
5136 environment,
5137 ..
5138 }) => backend.stash_apply(index, environment).await,
5139 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5140 client
5141 .request(proto::StashApply {
5142 project_id: project_id.0,
5143 repository_id: id.to_proto(),
5144 stash_index: index.map(|i| i as u64),
5145 })
5146 .await
5147 .context("sending stash apply request")?;
5148 Ok(())
5149 }
5150 }
5151 })
5152 })?
5153 .await??;
5154 Ok(())
5155 })
5156 }
5157
5158 pub fn stash_drop(
5159 &mut self,
5160 index: Option<usize>,
5161 cx: &mut Context<Self>,
5162 ) -> oneshot::Receiver<anyhow::Result<()>> {
5163 let id = self.id;
5164 let updates_tx = self
5165 .git_store()
5166 .and_then(|git_store| match &git_store.read(cx).state {
5167 GitStoreState::Local { downstream, .. } => downstream
5168 .as_ref()
5169 .map(|downstream| downstream.updates_tx.clone()),
5170 _ => None,
5171 });
5172 let this = cx.weak_entity();
5173 self.send_job(None, move |git_repo, mut cx| async move {
5174 match git_repo {
5175 RepositoryState::Local(LocalRepositoryState {
5176 backend,
5177 environment,
5178 ..
5179 }) => {
5180 // TODO would be nice to not have to do this manually
5181 let result = backend.stash_drop(index, environment).await;
5182 if result.is_ok()
5183 && let Ok(stash_entries) = backend.stash_entries().await
5184 {
5185 let snapshot = this.update(&mut cx, |this, cx| {
5186 this.snapshot.stash_entries = stash_entries;
5187 cx.emit(RepositoryEvent::StashEntriesChanged);
5188 this.snapshot.clone()
5189 })?;
5190 if let Some(updates_tx) = updates_tx {
5191 updates_tx
5192 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5193 .ok();
5194 }
5195 }
5196
5197 result
5198 }
5199 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5200 client
5201 .request(proto::StashDrop {
5202 project_id: project_id.0,
5203 repository_id: id.to_proto(),
5204 stash_index: index.map(|i| i as u64),
5205 })
5206 .await
5207 .context("sending stash pop request")?;
5208 Ok(())
5209 }
5210 }
5211 })
5212 }
5213
5214 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5215 let id = self.id;
5216 self.send_job(
5217 Some(format!("git hook {}", hook.as_str()).into()),
5218 move |git_repo, _cx| async move {
5219 match git_repo {
5220 RepositoryState::Local(LocalRepositoryState {
5221 backend,
5222 environment,
5223 ..
5224 }) => backend.run_hook(hook, environment.clone()).await,
5225 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5226 client
5227 .request(proto::RunGitHook {
5228 project_id: project_id.0,
5229 repository_id: id.to_proto(),
5230 hook: hook.to_proto(),
5231 })
5232 .await?;
5233
5234 Ok(())
5235 }
5236 }
5237 },
5238 )
5239 }
5240
5241 pub fn commit(
5242 &mut self,
5243 message: SharedString,
5244 name_and_email: Option<(SharedString, SharedString)>,
5245 options: CommitOptions,
5246 askpass: AskPassDelegate,
5247 cx: &mut App,
5248 ) -> oneshot::Receiver<Result<()>> {
5249 let id = self.id;
5250 let askpass_delegates = self.askpass_delegates.clone();
5251 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5252
5253 let rx = self.run_hook(RunHook::PreCommit, cx);
5254
5255 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5256 rx.await??;
5257
5258 match git_repo {
5259 RepositoryState::Local(LocalRepositoryState {
5260 backend,
5261 environment,
5262 ..
5263 }) => {
5264 backend
5265 .commit(message, name_and_email, options, askpass, environment)
5266 .await
5267 }
5268 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5269 askpass_delegates.lock().insert(askpass_id, askpass);
5270 let _defer = util::defer(|| {
5271 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5272 debug_assert!(askpass_delegate.is_some());
5273 });
5274 let (name, email) = name_and_email.unzip();
5275 client
5276 .request(proto::Commit {
5277 project_id: project_id.0,
5278 repository_id: id.to_proto(),
5279 message: String::from(message),
5280 name: name.map(String::from),
5281 email: email.map(String::from),
5282 options: Some(proto::commit::CommitOptions {
5283 amend: options.amend,
5284 signoff: options.signoff,
5285 }),
5286 askpass_id,
5287 })
5288 .await?;
5289
5290 Ok(())
5291 }
5292 }
5293 })
5294 }
5295
5296 pub fn fetch(
5297 &mut self,
5298 fetch_options: FetchOptions,
5299 askpass: AskPassDelegate,
5300 _cx: &mut App,
5301 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5302 let askpass_delegates = self.askpass_delegates.clone();
5303 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5304 let id = self.id;
5305
5306 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5307 match git_repo {
5308 RepositoryState::Local(LocalRepositoryState {
5309 backend,
5310 environment,
5311 ..
5312 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5313 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5314 askpass_delegates.lock().insert(askpass_id, askpass);
5315 let _defer = util::defer(|| {
5316 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5317 debug_assert!(askpass_delegate.is_some());
5318 });
5319
5320 let response = client
5321 .request(proto::Fetch {
5322 project_id: project_id.0,
5323 repository_id: id.to_proto(),
5324 askpass_id,
5325 remote: fetch_options.to_proto(),
5326 })
5327 .await?;
5328
5329 Ok(RemoteCommandOutput {
5330 stdout: response.stdout,
5331 stderr: response.stderr,
5332 })
5333 }
5334 }
5335 })
5336 }
5337
5338 pub fn push(
5339 &mut self,
5340 branch: SharedString,
5341 remote_branch: SharedString,
5342 remote: SharedString,
5343 options: Option<PushOptions>,
5344 askpass: AskPassDelegate,
5345 cx: &mut Context<Self>,
5346 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5347 let askpass_delegates = self.askpass_delegates.clone();
5348 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5349 let id = self.id;
5350
5351 let args = options
5352 .map(|option| match option {
5353 PushOptions::SetUpstream => " --set-upstream",
5354 PushOptions::Force => " --force-with-lease",
5355 })
5356 .unwrap_or("");
5357
5358 let updates_tx = self
5359 .git_store()
5360 .and_then(|git_store| match &git_store.read(cx).state {
5361 GitStoreState::Local { downstream, .. } => downstream
5362 .as_ref()
5363 .map(|downstream| downstream.updates_tx.clone()),
5364 _ => None,
5365 });
5366
5367 let this = cx.weak_entity();
5368 self.send_job(
5369 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5370 move |git_repo, mut cx| async move {
5371 match git_repo {
5372 RepositoryState::Local(LocalRepositoryState {
5373 backend,
5374 environment,
5375 ..
5376 }) => {
5377 let result = backend
5378 .push(
5379 branch.to_string(),
5380 remote_branch.to_string(),
5381 remote.to_string(),
5382 options,
5383 askpass,
5384 environment.clone(),
5385 cx.clone(),
5386 )
5387 .await;
5388 // TODO would be nice to not have to do this manually
5389 if result.is_ok() {
5390 let branches = backend.branches().await?;
5391 let branch = branches.into_iter().find(|branch| branch.is_head);
5392 log::info!("head branch after scan is {branch:?}");
5393 let snapshot = this.update(&mut cx, |this, cx| {
5394 this.snapshot.branch = branch;
5395 cx.emit(RepositoryEvent::BranchChanged);
5396 this.snapshot.clone()
5397 })?;
5398 if let Some(updates_tx) = updates_tx {
5399 updates_tx
5400 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5401 .ok();
5402 }
5403 }
5404 result
5405 }
5406 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5407 askpass_delegates.lock().insert(askpass_id, askpass);
5408 let _defer = util::defer(|| {
5409 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5410 debug_assert!(askpass_delegate.is_some());
5411 });
5412 let response = client
5413 .request(proto::Push {
5414 project_id: project_id.0,
5415 repository_id: id.to_proto(),
5416 askpass_id,
5417 branch_name: branch.to_string(),
5418 remote_branch_name: remote_branch.to_string(),
5419 remote_name: remote.to_string(),
5420 options: options.map(|options| match options {
5421 PushOptions::Force => proto::push::PushOptions::Force,
5422 PushOptions::SetUpstream => {
5423 proto::push::PushOptions::SetUpstream
5424 }
5425 }
5426 as i32),
5427 })
5428 .await?;
5429
5430 Ok(RemoteCommandOutput {
5431 stdout: response.stdout,
5432 stderr: response.stderr,
5433 })
5434 }
5435 }
5436 },
5437 )
5438 }
5439
5440 pub fn pull(
5441 &mut self,
5442 branch: Option<SharedString>,
5443 remote: SharedString,
5444 rebase: bool,
5445 askpass: AskPassDelegate,
5446 _cx: &mut App,
5447 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5448 let askpass_delegates = self.askpass_delegates.clone();
5449 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5450 let id = self.id;
5451
5452 let mut status = "git pull".to_string();
5453 if rebase {
5454 status.push_str(" --rebase");
5455 }
5456 status.push_str(&format!(" {}", remote));
5457 if let Some(b) = &branch {
5458 status.push_str(&format!(" {}", b));
5459 }
5460
5461 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5462 match git_repo {
5463 RepositoryState::Local(LocalRepositoryState {
5464 backend,
5465 environment,
5466 ..
5467 }) => {
5468 backend
5469 .pull(
5470 branch.as_ref().map(|b| b.to_string()),
5471 remote.to_string(),
5472 rebase,
5473 askpass,
5474 environment.clone(),
5475 cx,
5476 )
5477 .await
5478 }
5479 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5480 askpass_delegates.lock().insert(askpass_id, askpass);
5481 let _defer = util::defer(|| {
5482 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5483 debug_assert!(askpass_delegate.is_some());
5484 });
5485 let response = client
5486 .request(proto::Pull {
5487 project_id: project_id.0,
5488 repository_id: id.to_proto(),
5489 askpass_id,
5490 rebase,
5491 branch_name: branch.as_ref().map(|b| b.to_string()),
5492 remote_name: remote.to_string(),
5493 })
5494 .await?;
5495
5496 Ok(RemoteCommandOutput {
5497 stdout: response.stdout,
5498 stderr: response.stderr,
5499 })
5500 }
5501 }
5502 })
5503 }
5504
5505 fn spawn_set_index_text_job(
5506 &mut self,
5507 path: RepoPath,
5508 content: Option<String>,
5509 hunk_staging_operation_count: Option<usize>,
5510 cx: &mut Context<Self>,
5511 ) -> oneshot::Receiver<anyhow::Result<()>> {
5512 let id = self.id;
5513 let this = cx.weak_entity();
5514 let git_store = self.git_store.clone();
5515 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5516 self.send_keyed_job(
5517 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5518 None,
5519 move |git_repo, mut cx| async move {
5520 log::debug!(
5521 "start updating index text for buffer {}",
5522 path.as_unix_str()
5523 );
5524
5525 match git_repo {
5526 RepositoryState::Local(LocalRepositoryState {
5527 fs,
5528 backend,
5529 environment,
5530 ..
5531 }) => {
5532 let executable = match fs.metadata(&abs_path).await {
5533 Ok(Some(meta)) => meta.is_executable,
5534 Ok(None) => false,
5535 Err(_err) => false,
5536 };
5537 backend
5538 .set_index_text(path.clone(), content, environment.clone(), executable)
5539 .await?;
5540 }
5541 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5542 client
5543 .request(proto::SetIndexText {
5544 project_id: project_id.0,
5545 repository_id: id.to_proto(),
5546 path: path.to_proto(),
5547 text: content,
5548 })
5549 .await?;
5550 }
5551 }
5552 log::debug!(
5553 "finish updating index text for buffer {}",
5554 path.as_unix_str()
5555 );
5556
5557 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5558 let project_path = this
5559 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5560 .ok()
5561 .flatten();
5562 git_store
5563 .update(&mut cx, |git_store, cx| {
5564 let buffer_id = git_store
5565 .buffer_store
5566 .read(cx)
5567 .get_by_path(&project_path?)?
5568 .read(cx)
5569 .remote_id();
5570 let diff_state = git_store.diffs.get(&buffer_id)?;
5571 diff_state.update(cx, |diff_state, _| {
5572 diff_state.hunk_staging_operation_count_as_of_write =
5573 hunk_staging_operation_count;
5574 });
5575 Some(())
5576 })
5577 .context("Git store dropped")?;
5578 }
5579 Ok(())
5580 },
5581 )
5582 }
5583
5584 pub fn create_remote(
5585 &mut self,
5586 remote_name: String,
5587 remote_url: String,
5588 ) -> oneshot::Receiver<Result<()>> {
5589 let id = self.id;
5590 self.send_job(
5591 Some(format!("git remote add {remote_name} {remote_url}").into()),
5592 move |repo, _cx| async move {
5593 match repo {
5594 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5595 backend.create_remote(remote_name, remote_url).await
5596 }
5597 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5598 client
5599 .request(proto::GitCreateRemote {
5600 project_id: project_id.0,
5601 repository_id: id.to_proto(),
5602 remote_name,
5603 remote_url,
5604 })
5605 .await?;
5606
5607 Ok(())
5608 }
5609 }
5610 },
5611 )
5612 }
5613
5614 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5615 let id = self.id;
5616 self.send_job(
5617 Some(format!("git remove remote {remote_name}").into()),
5618 move |repo, _cx| async move {
5619 match repo {
5620 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5621 backend.remove_remote(remote_name).await
5622 }
5623 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5624 client
5625 .request(proto::GitRemoveRemote {
5626 project_id: project_id.0,
5627 repository_id: id.to_proto(),
5628 remote_name,
5629 })
5630 .await?;
5631
5632 Ok(())
5633 }
5634 }
5635 },
5636 )
5637 }
5638
5639 pub fn get_remotes(
5640 &mut self,
5641 branch_name: Option<String>,
5642 is_push: bool,
5643 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5644 let id = self.id;
5645 self.send_job(None, move |repo, _cx| async move {
5646 match repo {
5647 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5648 let remote = if let Some(branch_name) = branch_name {
5649 if is_push {
5650 backend.get_push_remote(branch_name).await?
5651 } else {
5652 backend.get_branch_remote(branch_name).await?
5653 }
5654 } else {
5655 None
5656 };
5657
5658 match remote {
5659 Some(remote) => Ok(vec![remote]),
5660 None => backend.get_all_remotes().await,
5661 }
5662 }
5663 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5664 let response = client
5665 .request(proto::GetRemotes {
5666 project_id: project_id.0,
5667 repository_id: id.to_proto(),
5668 branch_name,
5669 is_push,
5670 })
5671 .await?;
5672
5673 let remotes = response
5674 .remotes
5675 .into_iter()
5676 .map(|remotes| Remote {
5677 name: remotes.name.into(),
5678 })
5679 .collect();
5680
5681 Ok(remotes)
5682 }
5683 }
5684 })
5685 }
5686
5687 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5688 let id = self.id;
5689 self.send_job(None, move |repo, _| async move {
5690 match repo {
5691 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5692 backend.branches().await
5693 }
5694 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5695 let response = client
5696 .request(proto::GitGetBranches {
5697 project_id: project_id.0,
5698 repository_id: id.to_proto(),
5699 })
5700 .await?;
5701
5702 let branches = response
5703 .branches
5704 .into_iter()
5705 .map(|branch| proto_to_branch(&branch))
5706 .collect();
5707
5708 Ok(branches)
5709 }
5710 }
5711 })
5712 }
5713
5714 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5715 let id = self.id;
5716 self.send_job(None, move |repo, _| async move {
5717 match repo {
5718 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5719 backend.worktrees().await
5720 }
5721 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5722 let response = client
5723 .request(proto::GitGetWorktrees {
5724 project_id: project_id.0,
5725 repository_id: id.to_proto(),
5726 })
5727 .await?;
5728
5729 let worktrees = response
5730 .worktrees
5731 .into_iter()
5732 .map(|worktree| proto_to_worktree(&worktree))
5733 .collect();
5734
5735 Ok(worktrees)
5736 }
5737 }
5738 })
5739 }
5740
5741 pub fn create_worktree(
5742 &mut self,
5743 name: String,
5744 directory: PathBuf,
5745 commit: Option<String>,
5746 ) -> oneshot::Receiver<Result<()>> {
5747 let id = self.id;
5748 self.send_job(
5749 Some("git worktree add".into()),
5750 move |repo, _cx| async move {
5751 match repo {
5752 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5753 backend.create_worktree(name, directory, commit).await
5754 }
5755 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5756 client
5757 .request(proto::GitCreateWorktree {
5758 project_id: project_id.0,
5759 repository_id: id.to_proto(),
5760 name,
5761 directory: directory.to_string_lossy().to_string(),
5762 commit,
5763 })
5764 .await?;
5765
5766 Ok(())
5767 }
5768 }
5769 },
5770 )
5771 }
5772
5773 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5774 let id = self.id;
5775 self.send_job(
5776 Some(format!("git worktree remove: {}", path.display()).into()),
5777 move |repo, _cx| async move {
5778 match repo {
5779 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5780 backend.remove_worktree(path, force).await
5781 }
5782 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5783 client
5784 .request(proto::GitRemoveWorktree {
5785 project_id: project_id.0,
5786 repository_id: id.to_proto(),
5787 path: path.to_string_lossy().to_string(),
5788 force,
5789 })
5790 .await?;
5791
5792 Ok(())
5793 }
5794 }
5795 },
5796 )
5797 }
5798
5799 pub fn rename_worktree(
5800 &mut self,
5801 old_path: PathBuf,
5802 new_path: PathBuf,
5803 ) -> oneshot::Receiver<Result<()>> {
5804 let id = self.id;
5805 self.send_job(
5806 Some(format!("git worktree move: {}", old_path.display()).into()),
5807 move |repo, _cx| async move {
5808 match repo {
5809 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5810 backend.rename_worktree(old_path, new_path).await
5811 }
5812 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5813 client
5814 .request(proto::GitRenameWorktree {
5815 project_id: project_id.0,
5816 repository_id: id.to_proto(),
5817 old_path: old_path.to_string_lossy().to_string(),
5818 new_path: new_path.to_string_lossy().to_string(),
5819 })
5820 .await?;
5821
5822 Ok(())
5823 }
5824 }
5825 },
5826 )
5827 }
5828
5829 pub fn default_branch(
5830 &mut self,
5831 include_remote_name: bool,
5832 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5833 let id = self.id;
5834 self.send_job(None, move |repo, _| async move {
5835 match repo {
5836 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5837 backend.default_branch(include_remote_name).await
5838 }
5839 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5840 let response = client
5841 .request(proto::GetDefaultBranch {
5842 project_id: project_id.0,
5843 repository_id: id.to_proto(),
5844 })
5845 .await?;
5846
5847 anyhow::Ok(response.branch.map(SharedString::from))
5848 }
5849 }
5850 })
5851 }
5852
5853 pub fn diff_tree(
5854 &mut self,
5855 diff_type: DiffTreeType,
5856 _cx: &App,
5857 ) -> oneshot::Receiver<Result<TreeDiff>> {
5858 let repository_id = self.snapshot.id;
5859 self.send_job(None, move |repo, _cx| async move {
5860 match repo {
5861 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5862 backend.diff_tree(diff_type).await
5863 }
5864 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5865 let response = client
5866 .request(proto::GetTreeDiff {
5867 project_id: project_id.0,
5868 repository_id: repository_id.0,
5869 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5870 base: diff_type.base().to_string(),
5871 head: diff_type.head().to_string(),
5872 })
5873 .await?;
5874
5875 let entries = response
5876 .entries
5877 .into_iter()
5878 .filter_map(|entry| {
5879 let status = match entry.status() {
5880 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5881 proto::tree_diff_status::Status::Modified => {
5882 TreeDiffStatus::Modified {
5883 old: git::Oid::from_str(
5884 &entry.oid.context("missing oid").log_err()?,
5885 )
5886 .log_err()?,
5887 }
5888 }
5889 proto::tree_diff_status::Status::Deleted => {
5890 TreeDiffStatus::Deleted {
5891 old: git::Oid::from_str(
5892 &entry.oid.context("missing oid").log_err()?,
5893 )
5894 .log_err()?,
5895 }
5896 }
5897 };
5898 Some((
5899 RepoPath::from_rel_path(
5900 &RelPath::from_proto(&entry.path).log_err()?,
5901 ),
5902 status,
5903 ))
5904 })
5905 .collect();
5906
5907 Ok(TreeDiff { entries })
5908 }
5909 }
5910 })
5911 }
5912
5913 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5914 let id = self.id;
5915 self.send_job(None, move |repo, _cx| async move {
5916 match repo {
5917 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5918 backend.diff(diff_type).await
5919 }
5920 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5921 let (proto_diff_type, merge_base_ref) = match &diff_type {
5922 DiffType::HeadToIndex => {
5923 (proto::git_diff::DiffType::HeadToIndex.into(), None)
5924 }
5925 DiffType::HeadToWorktree => {
5926 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
5927 }
5928 DiffType::MergeBase { base_ref } => (
5929 proto::git_diff::DiffType::MergeBase.into(),
5930 Some(base_ref.to_string()),
5931 ),
5932 };
5933 let response = client
5934 .request(proto::GitDiff {
5935 project_id: project_id.0,
5936 repository_id: id.to_proto(),
5937 diff_type: proto_diff_type,
5938 merge_base_ref,
5939 })
5940 .await?;
5941
5942 Ok(response.diff)
5943 }
5944 }
5945 })
5946 }
5947
5948 pub fn create_branch(
5949 &mut self,
5950 branch_name: String,
5951 base_branch: Option<String>,
5952 ) -> oneshot::Receiver<Result<()>> {
5953 let id = self.id;
5954 let status_msg = if let Some(ref base) = base_branch {
5955 format!("git switch -c {branch_name} {base}").into()
5956 } else {
5957 format!("git switch -c {branch_name}").into()
5958 };
5959 self.send_job(Some(status_msg), move |repo, _cx| async move {
5960 match repo {
5961 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5962 backend.create_branch(branch_name, base_branch).await
5963 }
5964 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5965 client
5966 .request(proto::GitCreateBranch {
5967 project_id: project_id.0,
5968 repository_id: id.to_proto(),
5969 branch_name,
5970 })
5971 .await?;
5972
5973 Ok(())
5974 }
5975 }
5976 })
5977 }
5978
5979 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5980 let id = self.id;
5981 self.send_job(
5982 Some(format!("git switch {branch_name}").into()),
5983 move |repo, _cx| async move {
5984 match repo {
5985 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5986 backend.change_branch(branch_name).await
5987 }
5988 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5989 client
5990 .request(proto::GitChangeBranch {
5991 project_id: project_id.0,
5992 repository_id: id.to_proto(),
5993 branch_name,
5994 })
5995 .await?;
5996
5997 Ok(())
5998 }
5999 }
6000 },
6001 )
6002 }
6003
6004 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6005 let id = self.id;
6006 self.send_job(
6007 Some(format!("git branch -d {branch_name}").into()),
6008 move |repo, _cx| async move {
6009 match repo {
6010 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
6011 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6012 client
6013 .request(proto::GitDeleteBranch {
6014 project_id: project_id.0,
6015 repository_id: id.to_proto(),
6016 branch_name,
6017 })
6018 .await?;
6019
6020 Ok(())
6021 }
6022 }
6023 },
6024 )
6025 }
6026
6027 pub fn rename_branch(
6028 &mut self,
6029 branch: String,
6030 new_name: String,
6031 ) -> oneshot::Receiver<Result<()>> {
6032 let id = self.id;
6033 self.send_job(
6034 Some(format!("git branch -m {branch} {new_name}").into()),
6035 move |repo, _cx| async move {
6036 match repo {
6037 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6038 backend.rename_branch(branch, new_name).await
6039 }
6040 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6041 client
6042 .request(proto::GitRenameBranch {
6043 project_id: project_id.0,
6044 repository_id: id.to_proto(),
6045 branch,
6046 new_name,
6047 })
6048 .await?;
6049
6050 Ok(())
6051 }
6052 }
6053 },
6054 )
6055 }
6056
6057 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6058 let id = self.id;
6059 self.send_job(None, move |repo, _cx| async move {
6060 match repo {
6061 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6062 backend.check_for_pushed_commit().await
6063 }
6064 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6065 let response = client
6066 .request(proto::CheckForPushedCommits {
6067 project_id: project_id.0,
6068 repository_id: id.to_proto(),
6069 })
6070 .await?;
6071
6072 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6073
6074 Ok(branches)
6075 }
6076 }
6077 })
6078 }
6079
6080 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6081 self.send_job(None, |repo, _cx| async move {
6082 match repo {
6083 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6084 backend.checkpoint().await
6085 }
6086 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6087 }
6088 })
6089 }
6090
6091 pub fn restore_checkpoint(
6092 &mut self,
6093 checkpoint: GitRepositoryCheckpoint,
6094 ) -> oneshot::Receiver<Result<()>> {
6095 self.send_job(None, move |repo, _cx| async move {
6096 match repo {
6097 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6098 backend.restore_checkpoint(checkpoint).await
6099 }
6100 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6101 }
6102 })
6103 }
6104
6105 pub(crate) fn apply_remote_update(
6106 &mut self,
6107 update: proto::UpdateRepository,
6108 cx: &mut Context<Self>,
6109 ) -> Result<()> {
6110 if let Some(main_path) = &update.original_repo_abs_path {
6111 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6112 }
6113
6114 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6115 let new_head_commit = update
6116 .head_commit_details
6117 .as_ref()
6118 .map(proto_to_commit_details);
6119 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6120 cx.emit(RepositoryEvent::BranchChanged)
6121 }
6122 self.snapshot.branch = new_branch;
6123 self.snapshot.head_commit = new_head_commit;
6124
6125 // We don't store any merge head state for downstream projects; the upstream
6126 // will track it and we will just get the updated conflicts
6127 let new_merge_heads = TreeMap::from_ordered_entries(
6128 update
6129 .current_merge_conflicts
6130 .into_iter()
6131 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6132 );
6133 let conflicts_changed =
6134 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6135 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6136 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6137 let new_stash_entries = GitStash {
6138 entries: update
6139 .stash_entries
6140 .iter()
6141 .filter_map(|entry| proto_to_stash(entry).ok())
6142 .collect(),
6143 };
6144 if self.snapshot.stash_entries != new_stash_entries {
6145 cx.emit(RepositoryEvent::StashEntriesChanged)
6146 }
6147 self.snapshot.stash_entries = new_stash_entries;
6148 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6149 self.snapshot.remote_origin_url = update.remote_origin_url;
6150
6151 let edits = update
6152 .removed_statuses
6153 .into_iter()
6154 .filter_map(|path| {
6155 Some(sum_tree::Edit::Remove(PathKey(
6156 RelPath::from_proto(&path).log_err()?,
6157 )))
6158 })
6159 .chain(
6160 update
6161 .updated_statuses
6162 .into_iter()
6163 .filter_map(|updated_status| {
6164 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6165 }),
6166 )
6167 .collect::<Vec<_>>();
6168 if conflicts_changed || !edits.is_empty() {
6169 cx.emit(RepositoryEvent::StatusesChanged);
6170 }
6171 self.snapshot.statuses_by_path.edit(edits, ());
6172
6173 if update.is_last_update {
6174 self.snapshot.scan_id = update.scan_id;
6175 }
6176 self.clear_pending_ops(cx);
6177 Ok(())
6178 }
6179
6180 pub fn compare_checkpoints(
6181 &mut self,
6182 left: GitRepositoryCheckpoint,
6183 right: GitRepositoryCheckpoint,
6184 ) -> oneshot::Receiver<Result<bool>> {
6185 self.send_job(None, move |repo, _cx| async move {
6186 match repo {
6187 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6188 backend.compare_checkpoints(left, right).await
6189 }
6190 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6191 }
6192 })
6193 }
6194
6195 pub fn diff_checkpoints(
6196 &mut self,
6197 base_checkpoint: GitRepositoryCheckpoint,
6198 target_checkpoint: GitRepositoryCheckpoint,
6199 ) -> oneshot::Receiver<Result<String>> {
6200 self.send_job(None, move |repo, _cx| async move {
6201 match repo {
6202 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6203 backend
6204 .diff_checkpoints(base_checkpoint, target_checkpoint)
6205 .await
6206 }
6207 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6208 }
6209 })
6210 }
6211
6212 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6213 let updated = SumTree::from_iter(
6214 self.pending_ops.iter().filter_map(|ops| {
6215 let inner_ops: Vec<PendingOp> =
6216 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6217 if inner_ops.is_empty() {
6218 None
6219 } else {
6220 Some(PendingOps {
6221 repo_path: ops.repo_path.clone(),
6222 ops: inner_ops,
6223 })
6224 }
6225 }),
6226 (),
6227 );
6228
6229 if updated != self.pending_ops {
6230 cx.emit(RepositoryEvent::PendingOpsChanged {
6231 pending_ops: self.pending_ops.clone(),
6232 })
6233 }
6234
6235 self.pending_ops = updated;
6236 }
6237
6238 fn schedule_scan(
6239 &mut self,
6240 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6241 cx: &mut Context<Self>,
6242 ) {
6243 let this = cx.weak_entity();
6244 let _ = self.send_keyed_job(
6245 Some(GitJobKey::ReloadGitState),
6246 None,
6247 |state, mut cx| async move {
6248 log::debug!("run scheduled git status scan");
6249
6250 let Some(this) = this.upgrade() else {
6251 return Ok(());
6252 };
6253 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6254 bail!("not a local repository")
6255 };
6256 let compute_snapshot = this.update(&mut cx, |this, _| {
6257 this.paths_needing_status_update.clear();
6258 compute_snapshot(
6259 this.id,
6260 this.work_directory_abs_path.clone(),
6261 this.snapshot.clone(),
6262 backend.clone(),
6263 )
6264 });
6265 let (snapshot, events) = cx.background_spawn(compute_snapshot).await?;
6266 this.update(&mut cx, |this, cx| {
6267 this.snapshot = snapshot.clone();
6268 this.clear_pending_ops(cx);
6269 for event in events {
6270 cx.emit(event);
6271 }
6272 });
6273 if let Some(updates_tx) = updates_tx {
6274 updates_tx
6275 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6276 .ok();
6277 }
6278 Ok(())
6279 },
6280 );
6281 }
6282
6283 fn spawn_local_git_worker(
6284 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6285 cx: &mut Context<Self>,
6286 ) -> mpsc::UnboundedSender<GitJob> {
6287 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6288
6289 cx.spawn(async move |_, cx| {
6290 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6291 if let Some(git_hosting_provider_registry) =
6292 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6293 {
6294 git_hosting_providers::register_additional_providers(
6295 git_hosting_provider_registry,
6296 state.backend.clone(),
6297 )
6298 .await;
6299 }
6300 let state = RepositoryState::Local(state);
6301 let mut jobs = VecDeque::new();
6302 loop {
6303 while let Ok(Some(next_job)) = job_rx.try_next() {
6304 jobs.push_back(next_job);
6305 }
6306
6307 if let Some(job) = jobs.pop_front() {
6308 if let Some(current_key) = &job.key
6309 && jobs
6310 .iter()
6311 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6312 {
6313 continue;
6314 }
6315 (job.job)(state.clone(), cx).await;
6316 } else if let Some(job) = job_rx.next().await {
6317 jobs.push_back(job);
6318 } else {
6319 break;
6320 }
6321 }
6322 anyhow::Ok(())
6323 })
6324 .detach_and_log_err(cx);
6325
6326 job_tx
6327 }
6328
6329 fn spawn_remote_git_worker(
6330 state: RemoteRepositoryState,
6331 cx: &mut Context<Self>,
6332 ) -> mpsc::UnboundedSender<GitJob> {
6333 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6334
6335 cx.spawn(async move |_, cx| {
6336 let state = RepositoryState::Remote(state);
6337 let mut jobs = VecDeque::new();
6338 loop {
6339 while let Ok(Some(next_job)) = job_rx.try_next() {
6340 jobs.push_back(next_job);
6341 }
6342
6343 if let Some(job) = jobs.pop_front() {
6344 if let Some(current_key) = &job.key
6345 && jobs
6346 .iter()
6347 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6348 {
6349 continue;
6350 }
6351 (job.job)(state.clone(), cx).await;
6352 } else if let Some(job) = job_rx.next().await {
6353 jobs.push_back(job);
6354 } else {
6355 break;
6356 }
6357 }
6358 anyhow::Ok(())
6359 })
6360 .detach_and_log_err(cx);
6361
6362 job_tx
6363 }
6364
6365 fn load_staged_text(
6366 &mut self,
6367 buffer_id: BufferId,
6368 repo_path: RepoPath,
6369 cx: &App,
6370 ) -> Task<Result<Option<String>>> {
6371 let rx = self.send_job(None, move |state, _| async move {
6372 match state {
6373 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6374 anyhow::Ok(backend.load_index_text(repo_path).await)
6375 }
6376 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6377 let response = client
6378 .request(proto::OpenUnstagedDiff {
6379 project_id: project_id.to_proto(),
6380 buffer_id: buffer_id.to_proto(),
6381 })
6382 .await?;
6383 Ok(response.staged_text)
6384 }
6385 }
6386 });
6387 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6388 }
6389
6390 fn load_committed_text(
6391 &mut self,
6392 buffer_id: BufferId,
6393 repo_path: RepoPath,
6394 cx: &App,
6395 ) -> Task<Result<DiffBasesChange>> {
6396 let rx = self.send_job(None, move |state, _| async move {
6397 match state {
6398 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6399 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6400 let staged_text = backend.load_index_text(repo_path).await;
6401 let diff_bases_change = if committed_text == staged_text {
6402 DiffBasesChange::SetBoth(committed_text)
6403 } else {
6404 DiffBasesChange::SetEach {
6405 index: staged_text,
6406 head: committed_text,
6407 }
6408 };
6409 anyhow::Ok(diff_bases_change)
6410 }
6411 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6412 use proto::open_uncommitted_diff_response::Mode;
6413
6414 let response = client
6415 .request(proto::OpenUncommittedDiff {
6416 project_id: project_id.to_proto(),
6417 buffer_id: buffer_id.to_proto(),
6418 })
6419 .await?;
6420 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6421 let bases = match mode {
6422 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6423 Mode::IndexAndHead => DiffBasesChange::SetEach {
6424 head: response.committed_text,
6425 index: response.staged_text,
6426 },
6427 };
6428 Ok(bases)
6429 }
6430 }
6431 });
6432
6433 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6434 }
6435
6436 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6437 let repository_id = self.snapshot.id;
6438 let rx = self.send_job(None, move |state, _| async move {
6439 match state {
6440 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6441 backend.load_blob_content(oid).await
6442 }
6443 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6444 let response = client
6445 .request(proto::GetBlobContent {
6446 project_id: project_id.to_proto(),
6447 repository_id: repository_id.0,
6448 oid: oid.to_string(),
6449 })
6450 .await?;
6451 Ok(response.content)
6452 }
6453 }
6454 });
6455 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6456 }
6457
6458 fn paths_changed(
6459 &mut self,
6460 paths: Vec<RepoPath>,
6461 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6462 cx: &mut Context<Self>,
6463 ) {
6464 if !paths.is_empty() {
6465 self.paths_needing_status_update.push(paths);
6466 }
6467
6468 let this = cx.weak_entity();
6469 let _ = self.send_keyed_job(
6470 Some(GitJobKey::RefreshStatuses),
6471 None,
6472 |state, mut cx| async move {
6473 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6474 (
6475 this.snapshot.clone(),
6476 mem::take(&mut this.paths_needing_status_update),
6477 )
6478 })?;
6479 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6480 bail!("not a local repository")
6481 };
6482
6483 if changed_paths.is_empty() {
6484 return Ok(());
6485 }
6486
6487 let has_head = prev_snapshot.head_commit.is_some();
6488
6489 let stash_entries = backend.stash_entries().await?;
6490 let changed_path_statuses = cx
6491 .background_spawn(async move {
6492 let mut changed_paths =
6493 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6494 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6495
6496 let status_task = backend.status(&changed_paths_vec);
6497 let diff_stat_future = if has_head {
6498 backend.diff_stat(&changed_paths_vec)
6499 } else {
6500 future::ready(Ok(status::GitDiffStat {
6501 entries: Arc::default(),
6502 }))
6503 .boxed()
6504 };
6505
6506 let (statuses, diff_stats) =
6507 futures::future::try_join(status_task, diff_stat_future).await?;
6508
6509 let diff_stats: HashMap<RepoPath, DiffStat> =
6510 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6511
6512 let mut changed_path_statuses = Vec::new();
6513 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6514 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6515
6516 for (repo_path, status) in &*statuses.entries {
6517 let current_diff_stat = diff_stats.get(repo_path).copied();
6518
6519 changed_paths.remove(repo_path);
6520 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6521 && cursor.item().is_some_and(|entry| {
6522 entry.status == *status && entry.diff_stat == current_diff_stat
6523 })
6524 {
6525 continue;
6526 }
6527
6528 changed_path_statuses.push(Edit::Insert(StatusEntry {
6529 repo_path: repo_path.clone(),
6530 status: *status,
6531 diff_stat: current_diff_stat,
6532 }));
6533 }
6534 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6535 for path in changed_paths.into_iter() {
6536 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6537 changed_path_statuses
6538 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6539 }
6540 }
6541 anyhow::Ok(changed_path_statuses)
6542 })
6543 .await?;
6544
6545 this.update(&mut cx, |this, cx| {
6546 if this.snapshot.stash_entries != stash_entries {
6547 cx.emit(RepositoryEvent::StashEntriesChanged);
6548 this.snapshot.stash_entries = stash_entries;
6549 }
6550
6551 if !changed_path_statuses.is_empty() {
6552 cx.emit(RepositoryEvent::StatusesChanged);
6553 this.snapshot
6554 .statuses_by_path
6555 .edit(changed_path_statuses, ());
6556 this.snapshot.scan_id += 1;
6557 }
6558
6559 if let Some(updates_tx) = updates_tx {
6560 updates_tx
6561 .unbounded_send(DownstreamUpdate::UpdateRepository(
6562 this.snapshot.clone(),
6563 ))
6564 .ok();
6565 }
6566 })
6567 },
6568 );
6569 }
6570
6571 /// currently running git command and when it started
6572 pub fn current_job(&self) -> Option<JobInfo> {
6573 self.active_jobs.values().next().cloned()
6574 }
6575
6576 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6577 self.send_job(None, |_, _| async {})
6578 }
6579
6580 fn spawn_job_with_tracking<AsyncFn>(
6581 &mut self,
6582 paths: Vec<RepoPath>,
6583 git_status: pending_op::GitStatus,
6584 cx: &mut Context<Self>,
6585 f: AsyncFn,
6586 ) -> Task<Result<()>>
6587 where
6588 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6589 {
6590 let ids = self.new_pending_ops_for_paths(paths, git_status);
6591
6592 cx.spawn(async move |this, cx| {
6593 let (job_status, result) = match f(this.clone(), cx).await {
6594 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6595 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6596 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6597 };
6598
6599 this.update(cx, |this, _| {
6600 let mut edits = Vec::with_capacity(ids.len());
6601 for (id, entry) in ids {
6602 if let Some(mut ops) = this
6603 .pending_ops
6604 .get(&PathKey(entry.as_ref().clone()), ())
6605 .cloned()
6606 {
6607 if let Some(op) = ops.op_by_id_mut(id) {
6608 op.job_status = job_status;
6609 }
6610 edits.push(sum_tree::Edit::Insert(ops));
6611 }
6612 }
6613 this.pending_ops.edit(edits, ());
6614 })?;
6615
6616 result
6617 })
6618 }
6619
6620 fn new_pending_ops_for_paths(
6621 &mut self,
6622 paths: Vec<RepoPath>,
6623 git_status: pending_op::GitStatus,
6624 ) -> Vec<(PendingOpId, RepoPath)> {
6625 let mut edits = Vec::with_capacity(paths.len());
6626 let mut ids = Vec::with_capacity(paths.len());
6627 for path in paths {
6628 let mut ops = self
6629 .pending_ops
6630 .get(&PathKey(path.as_ref().clone()), ())
6631 .cloned()
6632 .unwrap_or_else(|| PendingOps::new(&path));
6633 let id = ops.max_id() + 1;
6634 ops.ops.push(PendingOp {
6635 id,
6636 git_status,
6637 job_status: pending_op::JobStatus::Running,
6638 });
6639 edits.push(sum_tree::Edit::Insert(ops));
6640 ids.push((id, path));
6641 }
6642 self.pending_ops.edit(edits, ());
6643 ids
6644 }
6645 pub fn default_remote_url(&self) -> Option<String> {
6646 self.remote_upstream_url
6647 .clone()
6648 .or(self.remote_origin_url.clone())
6649 }
6650}
6651
6652fn get_permalink_in_rust_registry_src(
6653 provider_registry: Arc<GitHostingProviderRegistry>,
6654 path: PathBuf,
6655 selection: Range<u32>,
6656) -> Result<url::Url> {
6657 #[derive(Deserialize)]
6658 struct CargoVcsGit {
6659 sha1: String,
6660 }
6661
6662 #[derive(Deserialize)]
6663 struct CargoVcsInfo {
6664 git: CargoVcsGit,
6665 path_in_vcs: String,
6666 }
6667
6668 #[derive(Deserialize)]
6669 struct CargoPackage {
6670 repository: String,
6671 }
6672
6673 #[derive(Deserialize)]
6674 struct CargoToml {
6675 package: CargoPackage,
6676 }
6677
6678 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6679 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6680 Some((dir, json))
6681 }) else {
6682 bail!("No .cargo_vcs_info.json found in parent directories")
6683 };
6684 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6685 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6686 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6687 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6688 .context("parsing package.repository field of manifest")?;
6689 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6690 let permalink = provider.build_permalink(
6691 remote,
6692 BuildPermalinkParams::new(
6693 &cargo_vcs_info.git.sha1,
6694 &RepoPath::from_rel_path(
6695 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6696 ),
6697 Some(selection),
6698 ),
6699 );
6700 Ok(permalink)
6701}
6702
6703fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6704 let Some(blame) = blame else {
6705 return proto::BlameBufferResponse {
6706 blame_response: None,
6707 };
6708 };
6709
6710 let entries = blame
6711 .entries
6712 .into_iter()
6713 .map(|entry| proto::BlameEntry {
6714 sha: entry.sha.as_bytes().into(),
6715 start_line: entry.range.start,
6716 end_line: entry.range.end,
6717 original_line_number: entry.original_line_number,
6718 author: entry.author,
6719 author_mail: entry.author_mail,
6720 author_time: entry.author_time,
6721 author_tz: entry.author_tz,
6722 committer: entry.committer_name,
6723 committer_mail: entry.committer_email,
6724 committer_time: entry.committer_time,
6725 committer_tz: entry.committer_tz,
6726 summary: entry.summary,
6727 previous: entry.previous,
6728 filename: entry.filename,
6729 })
6730 .collect::<Vec<_>>();
6731
6732 let messages = blame
6733 .messages
6734 .into_iter()
6735 .map(|(oid, message)| proto::CommitMessage {
6736 oid: oid.as_bytes().into(),
6737 message,
6738 })
6739 .collect::<Vec<_>>();
6740
6741 proto::BlameBufferResponse {
6742 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6743 }
6744}
6745
6746fn deserialize_blame_buffer_response(
6747 response: proto::BlameBufferResponse,
6748) -> Option<git::blame::Blame> {
6749 let response = response.blame_response?;
6750 let entries = response
6751 .entries
6752 .into_iter()
6753 .filter_map(|entry| {
6754 Some(git::blame::BlameEntry {
6755 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6756 range: entry.start_line..entry.end_line,
6757 original_line_number: entry.original_line_number,
6758 committer_name: entry.committer,
6759 committer_time: entry.committer_time,
6760 committer_tz: entry.committer_tz,
6761 committer_email: entry.committer_mail,
6762 author: entry.author,
6763 author_mail: entry.author_mail,
6764 author_time: entry.author_time,
6765 author_tz: entry.author_tz,
6766 summary: entry.summary,
6767 previous: entry.previous,
6768 filename: entry.filename,
6769 })
6770 })
6771 .collect::<Vec<_>>();
6772
6773 let messages = response
6774 .messages
6775 .into_iter()
6776 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6777 .collect::<HashMap<_, _>>();
6778
6779 Some(Blame { entries, messages })
6780}
6781
6782fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6783 proto::Branch {
6784 is_head: branch.is_head,
6785 ref_name: branch.ref_name.to_string(),
6786 unix_timestamp: branch
6787 .most_recent_commit
6788 .as_ref()
6789 .map(|commit| commit.commit_timestamp as u64),
6790 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6791 ref_name: upstream.ref_name.to_string(),
6792 tracking: upstream
6793 .tracking
6794 .status()
6795 .map(|upstream| proto::UpstreamTracking {
6796 ahead: upstream.ahead as u64,
6797 behind: upstream.behind as u64,
6798 }),
6799 }),
6800 most_recent_commit: branch
6801 .most_recent_commit
6802 .as_ref()
6803 .map(|commit| proto::CommitSummary {
6804 sha: commit.sha.to_string(),
6805 subject: commit.subject.to_string(),
6806 commit_timestamp: commit.commit_timestamp,
6807 author_name: commit.author_name.to_string(),
6808 }),
6809 }
6810}
6811
6812fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6813 proto::Worktree {
6814 path: worktree.path.to_string_lossy().to_string(),
6815 ref_name: worktree.ref_name.to_string(),
6816 sha: worktree.sha.to_string(),
6817 }
6818}
6819
6820fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6821 git::repository::Worktree {
6822 path: PathBuf::from(proto.path.clone()),
6823 ref_name: proto.ref_name.clone().into(),
6824 sha: proto.sha.clone().into(),
6825 }
6826}
6827
6828fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6829 git::repository::Branch {
6830 is_head: proto.is_head,
6831 ref_name: proto.ref_name.clone().into(),
6832 upstream: proto
6833 .upstream
6834 .as_ref()
6835 .map(|upstream| git::repository::Upstream {
6836 ref_name: upstream.ref_name.to_string().into(),
6837 tracking: upstream
6838 .tracking
6839 .as_ref()
6840 .map(|tracking| {
6841 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6842 ahead: tracking.ahead as u32,
6843 behind: tracking.behind as u32,
6844 })
6845 })
6846 .unwrap_or(git::repository::UpstreamTracking::Gone),
6847 }),
6848 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6849 git::repository::CommitSummary {
6850 sha: commit.sha.to_string().into(),
6851 subject: commit.subject.to_string().into(),
6852 commit_timestamp: commit.commit_timestamp,
6853 author_name: commit.author_name.to_string().into(),
6854 has_parent: true,
6855 }
6856 }),
6857 }
6858}
6859
6860fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6861 proto::GitCommitDetails {
6862 sha: commit.sha.to_string(),
6863 message: commit.message.to_string(),
6864 commit_timestamp: commit.commit_timestamp,
6865 author_email: commit.author_email.to_string(),
6866 author_name: commit.author_name.to_string(),
6867 }
6868}
6869
6870fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6871 CommitDetails {
6872 sha: proto.sha.clone().into(),
6873 message: proto.message.clone().into(),
6874 commit_timestamp: proto.commit_timestamp,
6875 author_email: proto.author_email.clone().into(),
6876 author_name: proto.author_name.clone().into(),
6877 }
6878}
6879
6880async fn compute_snapshot(
6881 id: RepositoryId,
6882 work_directory_abs_path: Arc<Path>,
6883 prev_snapshot: RepositorySnapshot,
6884 backend: Arc<dyn GitRepository>,
6885) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6886 let mut events = Vec::new();
6887 let branches = backend.branches().await?;
6888 let branch = branches.into_iter().find(|branch| branch.is_head);
6889
6890 // Useful when branch is None in detached head state
6891 let head_commit = match backend.head_sha().await {
6892 Some(head_sha) => backend.show(head_sha).await.log_err(),
6893 None => None,
6894 };
6895
6896 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> = if head_commit.is_some() {
6897 backend.diff_stat(&[])
6898 } else {
6899 future::ready(Ok(status::GitDiffStat {
6900 entries: Arc::default(),
6901 }))
6902 .boxed()
6903 };
6904 let (statuses, diff_stats) = futures::future::try_join(
6905 backend.status(&[RepoPath::from_rel_path(
6906 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6907 )]),
6908 diff_stat_future,
6909 )
6910 .await?;
6911
6912 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
6913 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
6914 let stash_entries = backend.stash_entries().await?;
6915 let mut conflicted_paths = Vec::new();
6916 let statuses_by_path = SumTree::from_iter(
6917 statuses.entries.iter().map(|(repo_path, status)| {
6918 if status.is_conflicted() {
6919 conflicted_paths.push(repo_path.clone());
6920 }
6921 StatusEntry {
6922 repo_path: repo_path.clone(),
6923 status: *status,
6924 diff_stat: diff_stat_map.get(repo_path).copied(),
6925 }
6926 }),
6927 (),
6928 );
6929 let mut merge_details = prev_snapshot.merge;
6930 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
6931 log::debug!("new merge details: {merge_details:?}");
6932
6933 if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path {
6934 events.push(RepositoryEvent::StatusesChanged)
6935 }
6936
6937 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6938 events.push(RepositoryEvent::BranchChanged);
6939 }
6940
6941 let remote_origin_url = backend.remote_url("origin").await;
6942 let remote_upstream_url = backend.remote_url("upstream").await;
6943
6944 let snapshot = RepositorySnapshot {
6945 id,
6946 statuses_by_path,
6947 work_directory_abs_path,
6948 original_repo_abs_path: prev_snapshot.original_repo_abs_path,
6949 path_style: prev_snapshot.path_style,
6950 scan_id: prev_snapshot.scan_id + 1,
6951 branch,
6952 head_commit,
6953 merge: merge_details,
6954 remote_origin_url,
6955 remote_upstream_url,
6956 stash_entries,
6957 };
6958
6959 Ok((snapshot, events))
6960}
6961
6962fn status_from_proto(
6963 simple_status: i32,
6964 status: Option<proto::GitFileStatus>,
6965) -> anyhow::Result<FileStatus> {
6966 use proto::git_file_status::Variant;
6967
6968 let Some(variant) = status.and_then(|status| status.variant) else {
6969 let code = proto::GitStatus::from_i32(simple_status)
6970 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6971 let result = match code {
6972 proto::GitStatus::Added => TrackedStatus {
6973 worktree_status: StatusCode::Added,
6974 index_status: StatusCode::Unmodified,
6975 }
6976 .into(),
6977 proto::GitStatus::Modified => TrackedStatus {
6978 worktree_status: StatusCode::Modified,
6979 index_status: StatusCode::Unmodified,
6980 }
6981 .into(),
6982 proto::GitStatus::Conflict => UnmergedStatus {
6983 first_head: UnmergedStatusCode::Updated,
6984 second_head: UnmergedStatusCode::Updated,
6985 }
6986 .into(),
6987 proto::GitStatus::Deleted => TrackedStatus {
6988 worktree_status: StatusCode::Deleted,
6989 index_status: StatusCode::Unmodified,
6990 }
6991 .into(),
6992 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6993 };
6994 return Ok(result);
6995 };
6996
6997 let result = match variant {
6998 Variant::Untracked(_) => FileStatus::Untracked,
6999 Variant::Ignored(_) => FileStatus::Ignored,
7000 Variant::Unmerged(unmerged) => {
7001 let [first_head, second_head] =
7002 [unmerged.first_head, unmerged.second_head].map(|head| {
7003 let code = proto::GitStatus::from_i32(head)
7004 .with_context(|| format!("Invalid git status code: {head}"))?;
7005 let result = match code {
7006 proto::GitStatus::Added => UnmergedStatusCode::Added,
7007 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7008 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7009 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7010 };
7011 Ok(result)
7012 });
7013 let [first_head, second_head] = [first_head?, second_head?];
7014 UnmergedStatus {
7015 first_head,
7016 second_head,
7017 }
7018 .into()
7019 }
7020 Variant::Tracked(tracked) => {
7021 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7022 .map(|status| {
7023 let code = proto::GitStatus::from_i32(status)
7024 .with_context(|| format!("Invalid git status code: {status}"))?;
7025 let result = match code {
7026 proto::GitStatus::Modified => StatusCode::Modified,
7027 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7028 proto::GitStatus::Added => StatusCode::Added,
7029 proto::GitStatus::Deleted => StatusCode::Deleted,
7030 proto::GitStatus::Renamed => StatusCode::Renamed,
7031 proto::GitStatus::Copied => StatusCode::Copied,
7032 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7033 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7034 };
7035 Ok(result)
7036 });
7037 let [index_status, worktree_status] = [index_status?, worktree_status?];
7038 TrackedStatus {
7039 index_status,
7040 worktree_status,
7041 }
7042 .into()
7043 }
7044 };
7045 Ok(result)
7046}
7047
7048fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7049 use proto::git_file_status::{Tracked, Unmerged, Variant};
7050
7051 let variant = match status {
7052 FileStatus::Untracked => Variant::Untracked(Default::default()),
7053 FileStatus::Ignored => Variant::Ignored(Default::default()),
7054 FileStatus::Unmerged(UnmergedStatus {
7055 first_head,
7056 second_head,
7057 }) => Variant::Unmerged(Unmerged {
7058 first_head: unmerged_status_to_proto(first_head),
7059 second_head: unmerged_status_to_proto(second_head),
7060 }),
7061 FileStatus::Tracked(TrackedStatus {
7062 index_status,
7063 worktree_status,
7064 }) => Variant::Tracked(Tracked {
7065 index_status: tracked_status_to_proto(index_status),
7066 worktree_status: tracked_status_to_proto(worktree_status),
7067 }),
7068 };
7069 proto::GitFileStatus {
7070 variant: Some(variant),
7071 }
7072}
7073
7074fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7075 match code {
7076 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7077 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7078 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7079 }
7080}
7081
7082fn tracked_status_to_proto(code: StatusCode) -> i32 {
7083 match code {
7084 StatusCode::Added => proto::GitStatus::Added as _,
7085 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7086 StatusCode::Modified => proto::GitStatus::Modified as _,
7087 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7088 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7089 StatusCode::Copied => proto::GitStatus::Copied as _,
7090 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7091 }
7092}