1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296}
297
298type JobId = u64;
299
300#[derive(Clone, Debug, PartialEq, Eq)]
301pub struct JobInfo {
302 pub start: Instant,
303 pub message: SharedString,
304}
305
306struct GraphCommitDataHandler {
307 _task: Task<()>,
308 commit_data_request: smol::channel::Sender<Oid>,
309}
310
311enum GraphCommitHandlerState {
312 Starting,
313 Open(GraphCommitDataHandler),
314 Closed,
315}
316
317pub struct InitialGitGraphData {
318 fetch_task: Task<()>,
319 pub error: Option<SharedString>,
320 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
321 pub commit_oid_to_index: HashMap<Oid, usize>,
322}
323
324pub struct GraphDataResponse<'a> {
325 pub commits: &'a [Arc<InitialGraphCommitData>],
326 pub is_loading: bool,
327 pub error: Option<SharedString>,
328}
329
330pub struct Repository {
331 this: WeakEntity<Self>,
332 snapshot: RepositorySnapshot,
333 commit_message_buffer: Option<Entity<Buffer>>,
334 git_store: WeakEntity<GitStore>,
335 // For a local repository, holds paths that have had worktree events since the last status scan completed,
336 // and that should be examined during the next status scan.
337 paths_needing_status_update: Vec<Vec<RepoPath>>,
338 job_sender: mpsc::UnboundedSender<GitJob>,
339 active_jobs: HashMap<JobId, JobInfo>,
340 pending_ops: SumTree<PendingOps>,
341 job_id: JobId,
342 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
343 latest_askpass_id: u64,
344 repository_state: Shared<Task<Result<RepositoryState, String>>>,
345 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
346 graph_commit_data_handler: GraphCommitHandlerState,
347 commit_data: HashMap<Oid, CommitDataState>,
348}
349
350impl std::ops::Deref for Repository {
351 type Target = RepositorySnapshot;
352
353 fn deref(&self) -> &Self::Target {
354 &self.snapshot
355 }
356}
357
358#[derive(Clone)]
359pub struct LocalRepositoryState {
360 pub fs: Arc<dyn Fs>,
361 pub backend: Arc<dyn GitRepository>,
362 pub environment: Arc<HashMap<String, String>>,
363}
364
365impl LocalRepositoryState {
366 async fn new(
367 work_directory_abs_path: Arc<Path>,
368 dot_git_abs_path: Arc<Path>,
369 project_environment: WeakEntity<ProjectEnvironment>,
370 fs: Arc<dyn Fs>,
371 is_trusted: bool,
372 cx: &mut AsyncApp,
373 ) -> anyhow::Result<Self> {
374 let environment = project_environment
375 .update(cx, |project_environment, cx| {
376 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
377 })?
378 .await
379 .unwrap_or_else(|| {
380 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
381 HashMap::default()
382 });
383 let search_paths = environment.get("PATH").map(|val| val.to_owned());
384 let backend = cx
385 .background_spawn({
386 let fs = fs.clone();
387 async move {
388 let system_git_binary_path = search_paths
389 .and_then(|search_paths| {
390 which::which_in("git", Some(search_paths), &work_directory_abs_path)
391 .ok()
392 })
393 .or_else(|| which::which("git").ok());
394 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
395 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
396 }
397 })
398 .await?;
399 backend.set_trusted(is_trusted);
400 Ok(LocalRepositoryState {
401 backend,
402 environment: Arc::new(environment),
403 fs,
404 })
405 }
406}
407
408#[derive(Clone)]
409pub struct RemoteRepositoryState {
410 pub project_id: ProjectId,
411 pub client: AnyProtoClient,
412}
413
414#[derive(Clone)]
415pub enum RepositoryState {
416 Local(LocalRepositoryState),
417 Remote(RemoteRepositoryState),
418}
419
420#[derive(Clone, Debug, PartialEq, Eq)]
421pub enum GitGraphEvent {
422 CountUpdated(usize),
423 FullyLoaded,
424 LoadingError,
425}
426
427#[derive(Clone, Debug, PartialEq, Eq)]
428pub enum RepositoryEvent {
429 StatusesChanged,
430 BranchChanged,
431 StashEntriesChanged,
432 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
433 GraphEvent((LogSource, LogOrder), GitGraphEvent),
434}
435
436#[derive(Clone, Debug)]
437pub struct JobsUpdated;
438
439#[derive(Debug)]
440pub enum GitStoreEvent {
441 ActiveRepositoryChanged(Option<RepositoryId>),
442 /// Bool is true when the repository that's updated is the active repository
443 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
444 RepositoryAdded,
445 RepositoryRemoved(RepositoryId),
446 IndexWriteError(anyhow::Error),
447 JobsUpdated,
448 ConflictsUpdated,
449}
450
451impl EventEmitter<RepositoryEvent> for Repository {}
452impl EventEmitter<JobsUpdated> for Repository {}
453impl EventEmitter<GitStoreEvent> for GitStore {}
454
455pub struct GitJob {
456 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
457 key: Option<GitJobKey>,
458}
459
460#[derive(PartialEq, Eq)]
461enum GitJobKey {
462 WriteIndex(Vec<RepoPath>),
463 ReloadBufferDiffBases,
464 RefreshStatuses,
465 ReloadGitState,
466}
467
468impl GitStore {
469 pub fn local(
470 worktree_store: &Entity<WorktreeStore>,
471 buffer_store: Entity<BufferStore>,
472 environment: Entity<ProjectEnvironment>,
473 fs: Arc<dyn Fs>,
474 cx: &mut Context<Self>,
475 ) -> Self {
476 Self::new(
477 worktree_store.clone(),
478 buffer_store,
479 GitStoreState::Local {
480 next_repository_id: Arc::new(AtomicU64::new(1)),
481 downstream: None,
482 project_environment: environment,
483 fs,
484 },
485 cx,
486 )
487 }
488
489 pub fn remote(
490 worktree_store: &Entity<WorktreeStore>,
491 buffer_store: Entity<BufferStore>,
492 upstream_client: AnyProtoClient,
493 project_id: u64,
494 cx: &mut Context<Self>,
495 ) -> Self {
496 Self::new(
497 worktree_store.clone(),
498 buffer_store,
499 GitStoreState::Remote {
500 upstream_client,
501 upstream_project_id: project_id,
502 downstream: None,
503 },
504 cx,
505 )
506 }
507
508 fn new(
509 worktree_store: Entity<WorktreeStore>,
510 buffer_store: Entity<BufferStore>,
511 state: GitStoreState,
512 cx: &mut Context<Self>,
513 ) -> Self {
514 let mut _subscriptions = vec![
515 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
516 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
517 ];
518
519 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
520 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
521 }
522
523 GitStore {
524 state,
525 buffer_store,
526 worktree_store,
527 repositories: HashMap::default(),
528 worktree_ids: HashMap::default(),
529 active_repo_id: None,
530 _subscriptions,
531 loading_diffs: HashMap::default(),
532 shared_diffs: HashMap::default(),
533 diffs: HashMap::default(),
534 }
535 }
536
537 pub fn init(client: &AnyProtoClient) {
538 client.add_entity_request_handler(Self::handle_get_remotes);
539 client.add_entity_request_handler(Self::handle_get_branches);
540 client.add_entity_request_handler(Self::handle_get_default_branch);
541 client.add_entity_request_handler(Self::handle_change_branch);
542 client.add_entity_request_handler(Self::handle_create_branch);
543 client.add_entity_request_handler(Self::handle_rename_branch);
544 client.add_entity_request_handler(Self::handle_create_remote);
545 client.add_entity_request_handler(Self::handle_remove_remote);
546 client.add_entity_request_handler(Self::handle_delete_branch);
547 client.add_entity_request_handler(Self::handle_git_init);
548 client.add_entity_request_handler(Self::handle_push);
549 client.add_entity_request_handler(Self::handle_pull);
550 client.add_entity_request_handler(Self::handle_fetch);
551 client.add_entity_request_handler(Self::handle_stage);
552 client.add_entity_request_handler(Self::handle_unstage);
553 client.add_entity_request_handler(Self::handle_stash);
554 client.add_entity_request_handler(Self::handle_stash_pop);
555 client.add_entity_request_handler(Self::handle_stash_apply);
556 client.add_entity_request_handler(Self::handle_stash_drop);
557 client.add_entity_request_handler(Self::handle_commit);
558 client.add_entity_request_handler(Self::handle_run_hook);
559 client.add_entity_request_handler(Self::handle_reset);
560 client.add_entity_request_handler(Self::handle_show);
561 client.add_entity_request_handler(Self::handle_load_commit_diff);
562 client.add_entity_request_handler(Self::handle_file_history);
563 client.add_entity_request_handler(Self::handle_checkout_files);
564 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
565 client.add_entity_request_handler(Self::handle_set_index_text);
566 client.add_entity_request_handler(Self::handle_askpass);
567 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
568 client.add_entity_request_handler(Self::handle_git_diff);
569 client.add_entity_request_handler(Self::handle_tree_diff);
570 client.add_entity_request_handler(Self::handle_get_blob_content);
571 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
572 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
573 client.add_entity_message_handler(Self::handle_update_diff_bases);
574 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
575 client.add_entity_request_handler(Self::handle_blame_buffer);
576 client.add_entity_message_handler(Self::handle_update_repository);
577 client.add_entity_message_handler(Self::handle_remove_repository);
578 client.add_entity_request_handler(Self::handle_git_clone);
579 client.add_entity_request_handler(Self::handle_get_worktrees);
580 client.add_entity_request_handler(Self::handle_create_worktree);
581 }
582
583 pub fn is_local(&self) -> bool {
584 matches!(self.state, GitStoreState::Local { .. })
585 }
586 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
587 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
588 let id = repo.read(cx).id;
589 if self.active_repo_id != Some(id) {
590 self.active_repo_id = Some(id);
591 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
592 }
593 }
594 }
595
596 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
597 match &mut self.state {
598 GitStoreState::Remote {
599 downstream: downstream_client,
600 ..
601 } => {
602 for repo in self.repositories.values() {
603 let update = repo.read(cx).snapshot.initial_update(project_id);
604 for update in split_repository_update(update) {
605 client.send(update).log_err();
606 }
607 }
608 *downstream_client = Some((client, ProjectId(project_id)));
609 }
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 let mut snapshots = HashMap::default();
615 let (updates_tx, mut updates_rx) = mpsc::unbounded();
616 for repo in self.repositories.values() {
617 updates_tx
618 .unbounded_send(DownstreamUpdate::UpdateRepository(
619 repo.read(cx).snapshot.clone(),
620 ))
621 .ok();
622 }
623 *downstream_client = Some(LocalDownstreamState {
624 client: client.clone(),
625 project_id: ProjectId(project_id),
626 updates_tx,
627 _task: cx.spawn(async move |this, cx| {
628 cx.background_spawn(async move {
629 while let Some(update) = updates_rx.next().await {
630 match update {
631 DownstreamUpdate::UpdateRepository(snapshot) => {
632 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
633 {
634 let update =
635 snapshot.build_update(old_snapshot, project_id);
636 *old_snapshot = snapshot;
637 for update in split_repository_update(update) {
638 client.send(update)?;
639 }
640 } else {
641 let update = snapshot.initial_update(project_id);
642 for update in split_repository_update(update) {
643 client.send(update)?;
644 }
645 snapshots.insert(snapshot.id, snapshot);
646 }
647 }
648 DownstreamUpdate::RemoveRepository(id) => {
649 client.send(proto::RemoveRepository {
650 project_id,
651 id: id.to_proto(),
652 })?;
653 }
654 }
655 }
656 anyhow::Ok(())
657 })
658 .await
659 .ok();
660 this.update(cx, |this, _| {
661 if let GitStoreState::Local {
662 downstream: downstream_client,
663 ..
664 } = &mut this.state
665 {
666 downstream_client.take();
667 } else {
668 unreachable!("unshared called on remote store");
669 }
670 })
671 }),
672 });
673 }
674 }
675 }
676
677 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
678 match &mut self.state {
679 GitStoreState::Local {
680 downstream: downstream_client,
681 ..
682 } => {
683 downstream_client.take();
684 }
685 GitStoreState::Remote {
686 downstream: downstream_client,
687 ..
688 } => {
689 downstream_client.take();
690 }
691 }
692 self.shared_diffs.clear();
693 }
694
695 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
696 self.shared_diffs.remove(peer_id);
697 }
698
699 pub fn active_repository(&self) -> Option<Entity<Repository>> {
700 self.active_repo_id
701 .as_ref()
702 .map(|id| self.repositories[id].clone())
703 }
704
705 pub fn open_unstaged_diff(
706 &mut self,
707 buffer: Entity<Buffer>,
708 cx: &mut Context<Self>,
709 ) -> Task<Result<Entity<BufferDiff>>> {
710 let buffer_id = buffer.read(cx).remote_id();
711 if let Some(diff_state) = self.diffs.get(&buffer_id)
712 && let Some(unstaged_diff) = diff_state
713 .read(cx)
714 .unstaged_diff
715 .as_ref()
716 .and_then(|weak| weak.upgrade())
717 {
718 if let Some(task) =
719 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
720 {
721 return cx.background_executor().spawn(async move {
722 task.await;
723 Ok(unstaged_diff)
724 });
725 }
726 return Task::ready(Ok(unstaged_diff));
727 }
728
729 let Some((repo, repo_path)) =
730 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
731 else {
732 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
733 };
734
735 let task = self
736 .loading_diffs
737 .entry((buffer_id, DiffKind::Unstaged))
738 .or_insert_with(|| {
739 let staged_text = repo.update(cx, |repo, cx| {
740 repo.load_staged_text(buffer_id, repo_path, cx)
741 });
742 cx.spawn(async move |this, cx| {
743 Self::open_diff_internal(
744 this,
745 DiffKind::Unstaged,
746 staged_text.await.map(DiffBasesChange::SetIndex),
747 buffer,
748 cx,
749 )
750 .await
751 .map_err(Arc::new)
752 })
753 .shared()
754 })
755 .clone();
756
757 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
758 }
759
760 pub fn open_diff_since(
761 &mut self,
762 oid: Option<git::Oid>,
763 buffer: Entity<Buffer>,
764 repo: Entity<Repository>,
765 cx: &mut Context<Self>,
766 ) -> Task<Result<Entity<BufferDiff>>> {
767 let buffer_id = buffer.read(cx).remote_id();
768
769 if let Some(diff_state) = self.diffs.get(&buffer_id)
770 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
771 {
772 if let Some(task) =
773 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
774 {
775 return cx.background_executor().spawn(async move {
776 task.await;
777 Ok(oid_diff)
778 });
779 }
780 return Task::ready(Ok(oid_diff));
781 }
782
783 let diff_kind = DiffKind::SinceOid(oid);
784 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
785 let task = task.clone();
786 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
787 }
788
789 let task = cx
790 .spawn(async move |this, cx| {
791 let result: Result<Entity<BufferDiff>> = async {
792 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
793 let language_registry =
794 buffer.update(cx, |buffer, _| buffer.language_registry());
795 let content: Option<Arc<str>> = match oid {
796 None => None,
797 Some(oid) => Some(
798 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
799 .await?
800 .into(),
801 ),
802 };
803 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
804
805 buffer_diff
806 .update(cx, |buffer_diff, cx| {
807 buffer_diff.language_changed(
808 buffer_snapshot.language().cloned(),
809 language_registry,
810 cx,
811 );
812 buffer_diff.set_base_text(
813 content.clone(),
814 buffer_snapshot.language().cloned(),
815 buffer_snapshot.text,
816 cx,
817 )
818 })
819 .await?;
820 let unstaged_diff = this
821 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
822 .await?;
823 buffer_diff.update(cx, |buffer_diff, _| {
824 buffer_diff.set_secondary_diff(unstaged_diff);
825 });
826
827 this.update(cx, |this, cx| {
828 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
829 .detach();
830
831 this.loading_diffs.remove(&(buffer_id, diff_kind));
832
833 let git_store = cx.weak_entity();
834 let diff_state = this
835 .diffs
836 .entry(buffer_id)
837 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
838
839 diff_state.update(cx, |state, _| {
840 if let Some(oid) = oid {
841 if let Some(content) = content {
842 state.oid_texts.insert(oid, content);
843 }
844 }
845 state.oid_diffs.insert(oid, buffer_diff.downgrade());
846 });
847 })?;
848
849 Ok(buffer_diff)
850 }
851 .await;
852 result.map_err(Arc::new)
853 })
854 .shared();
855
856 self.loading_diffs
857 .insert((buffer_id, diff_kind), task.clone());
858 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
859 }
860
861 #[ztracing::instrument(skip_all)]
862 pub fn open_uncommitted_diff(
863 &mut self,
864 buffer: Entity<Buffer>,
865 cx: &mut Context<Self>,
866 ) -> Task<Result<Entity<BufferDiff>>> {
867 let buffer_id = buffer.read(cx).remote_id();
868
869 if let Some(diff_state) = self.diffs.get(&buffer_id)
870 && let Some(uncommitted_diff) = diff_state
871 .read(cx)
872 .uncommitted_diff
873 .as_ref()
874 .and_then(|weak| weak.upgrade())
875 {
876 if let Some(task) =
877 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
878 {
879 return cx.background_executor().spawn(async move {
880 task.await;
881 Ok(uncommitted_diff)
882 });
883 }
884 return Task::ready(Ok(uncommitted_diff));
885 }
886
887 let Some((repo, repo_path)) =
888 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
889 else {
890 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
891 };
892
893 let task = self
894 .loading_diffs
895 .entry((buffer_id, DiffKind::Uncommitted))
896 .or_insert_with(|| {
897 let changes = repo.update(cx, |repo, cx| {
898 repo.load_committed_text(buffer_id, repo_path, cx)
899 });
900
901 // todo(lw): hot foreground spawn
902 cx.spawn(async move |this, cx| {
903 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
904 .await
905 .map_err(Arc::new)
906 })
907 .shared()
908 })
909 .clone();
910
911 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
912 }
913
914 #[ztracing::instrument(skip_all)]
915 async fn open_diff_internal(
916 this: WeakEntity<Self>,
917 kind: DiffKind,
918 texts: Result<DiffBasesChange>,
919 buffer_entity: Entity<Buffer>,
920 cx: &mut AsyncApp,
921 ) -> Result<Entity<BufferDiff>> {
922 let diff_bases_change = match texts {
923 Err(e) => {
924 this.update(cx, |this, cx| {
925 let buffer = buffer_entity.read(cx);
926 let buffer_id = buffer.remote_id();
927 this.loading_diffs.remove(&(buffer_id, kind));
928 })?;
929 return Err(e);
930 }
931 Ok(change) => change,
932 };
933
934 this.update(cx, |this, cx| {
935 let buffer = buffer_entity.read(cx);
936 let buffer_id = buffer.remote_id();
937 let language = buffer.language().cloned();
938 let language_registry = buffer.language_registry();
939 let text_snapshot = buffer.text_snapshot();
940 this.loading_diffs.remove(&(buffer_id, kind));
941
942 let git_store = cx.weak_entity();
943 let diff_state = this
944 .diffs
945 .entry(buffer_id)
946 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
947
948 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
949
950 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
951 diff_state.update(cx, |diff_state, cx| {
952 diff_state.language_changed = true;
953 diff_state.language = language;
954 diff_state.language_registry = language_registry;
955
956 match kind {
957 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
958 DiffKind::Uncommitted => {
959 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
960 diff
961 } else {
962 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
963 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
964 unstaged_diff
965 };
966
967 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
968 diff_state.uncommitted_diff = Some(diff.downgrade())
969 }
970 DiffKind::SinceOid(_) => {
971 unreachable!("open_diff_internal is not used for OID diffs")
972 }
973 }
974
975 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
976 let rx = diff_state.wait_for_recalculation();
977
978 anyhow::Ok(async move {
979 if let Some(rx) = rx {
980 rx.await;
981 }
982 Ok(diff)
983 })
984 })
985 })??
986 .await
987 }
988
989 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
990 let diff_state = self.diffs.get(&buffer_id)?;
991 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
992 }
993
994 pub fn get_uncommitted_diff(
995 &self,
996 buffer_id: BufferId,
997 cx: &App,
998 ) -> Option<Entity<BufferDiff>> {
999 let diff_state = self.diffs.get(&buffer_id)?;
1000 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1001 }
1002
1003 pub fn get_diff_since_oid(
1004 &self,
1005 buffer_id: BufferId,
1006 oid: Option<git::Oid>,
1007 cx: &App,
1008 ) -> Option<Entity<BufferDiff>> {
1009 let diff_state = self.diffs.get(&buffer_id)?;
1010 diff_state.read(cx).oid_diff(oid)
1011 }
1012
1013 pub fn open_conflict_set(
1014 &mut self,
1015 buffer: Entity<Buffer>,
1016 cx: &mut Context<Self>,
1017 ) -> Entity<ConflictSet> {
1018 log::debug!("open conflict set");
1019 let buffer_id = buffer.read(cx).remote_id();
1020
1021 if let Some(git_state) = self.diffs.get(&buffer_id)
1022 && let Some(conflict_set) = git_state
1023 .read(cx)
1024 .conflict_set
1025 .as_ref()
1026 .and_then(|weak| weak.upgrade())
1027 {
1028 let conflict_set = conflict_set;
1029 let buffer_snapshot = buffer.read(cx).text_snapshot();
1030
1031 git_state.update(cx, |state, cx| {
1032 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1033 });
1034
1035 return conflict_set;
1036 }
1037
1038 let is_unmerged = self
1039 .repository_and_path_for_buffer_id(buffer_id, cx)
1040 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1041 let git_store = cx.weak_entity();
1042 let buffer_git_state = self
1043 .diffs
1044 .entry(buffer_id)
1045 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1046 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1047
1048 self._subscriptions
1049 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1050 cx.emit(GitStoreEvent::ConflictsUpdated);
1051 }));
1052
1053 buffer_git_state.update(cx, |state, cx| {
1054 state.conflict_set = Some(conflict_set.downgrade());
1055 let buffer_snapshot = buffer.read(cx).text_snapshot();
1056 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1057 });
1058
1059 conflict_set
1060 }
1061
1062 pub fn project_path_git_status(
1063 &self,
1064 project_path: &ProjectPath,
1065 cx: &App,
1066 ) -> Option<FileStatus> {
1067 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1068 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1069 }
1070
1071 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1072 let mut work_directory_abs_paths = Vec::new();
1073 let mut checkpoints = Vec::new();
1074 for repository in self.repositories.values() {
1075 repository.update(cx, |repository, _| {
1076 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1077 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1078 });
1079 }
1080
1081 cx.background_executor().spawn(async move {
1082 let checkpoints = future::try_join_all(checkpoints).await?;
1083 Ok(GitStoreCheckpoint {
1084 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1085 .into_iter()
1086 .zip(checkpoints)
1087 .collect(),
1088 })
1089 })
1090 }
1091
1092 pub fn restore_checkpoint(
1093 &self,
1094 checkpoint: GitStoreCheckpoint,
1095 cx: &mut App,
1096 ) -> Task<Result<()>> {
1097 let repositories_by_work_dir_abs_path = self
1098 .repositories
1099 .values()
1100 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1101 .collect::<HashMap<_, _>>();
1102
1103 let mut tasks = Vec::new();
1104 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1105 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1106 let restore = repository.update(cx, |repository, _| {
1107 repository.restore_checkpoint(checkpoint)
1108 });
1109 tasks.push(async move { restore.await? });
1110 }
1111 }
1112 cx.background_spawn(async move {
1113 future::try_join_all(tasks).await?;
1114 Ok(())
1115 })
1116 }
1117
1118 /// Compares two checkpoints, returning true if they are equal.
1119 pub fn compare_checkpoints(
1120 &self,
1121 left: GitStoreCheckpoint,
1122 mut right: GitStoreCheckpoint,
1123 cx: &mut App,
1124 ) -> Task<Result<bool>> {
1125 let repositories_by_work_dir_abs_path = self
1126 .repositories
1127 .values()
1128 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1129 .collect::<HashMap<_, _>>();
1130
1131 let mut tasks = Vec::new();
1132 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1133 if let Some(right_checkpoint) = right
1134 .checkpoints_by_work_dir_abs_path
1135 .remove(&work_dir_abs_path)
1136 {
1137 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1138 {
1139 let compare = repository.update(cx, |repository, _| {
1140 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1141 });
1142
1143 tasks.push(async move { compare.await? });
1144 }
1145 } else {
1146 return Task::ready(Ok(false));
1147 }
1148 }
1149 cx.background_spawn(async move {
1150 Ok(future::try_join_all(tasks)
1151 .await?
1152 .into_iter()
1153 .all(|result| result))
1154 })
1155 }
1156
1157 /// Blames a buffer.
1158 pub fn blame_buffer(
1159 &self,
1160 buffer: &Entity<Buffer>,
1161 version: Option<clock::Global>,
1162 cx: &mut Context<Self>,
1163 ) -> Task<Result<Option<Blame>>> {
1164 let buffer = buffer.read(cx);
1165 let Some((repo, repo_path)) =
1166 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1167 else {
1168 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1169 };
1170 let content = match &version {
1171 Some(version) => buffer.rope_for_version(version),
1172 None => buffer.as_rope().clone(),
1173 };
1174 let line_ending = buffer.line_ending();
1175 let version = version.unwrap_or(buffer.version());
1176 let buffer_id = buffer.remote_id();
1177
1178 let repo = repo.downgrade();
1179 cx.spawn(async move |_, cx| {
1180 let repository_state = repo
1181 .update(cx, |repo, _| repo.repository_state.clone())?
1182 .await
1183 .map_err(|err| anyhow::anyhow!(err))?;
1184 match repository_state {
1185 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1186 .blame(repo_path.clone(), content, line_ending)
1187 .await
1188 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1189 .map(Some),
1190 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1191 let response = client
1192 .request(proto::BlameBuffer {
1193 project_id: project_id.to_proto(),
1194 buffer_id: buffer_id.into(),
1195 version: serialize_version(&version),
1196 })
1197 .await?;
1198 Ok(deserialize_blame_buffer_response(response))
1199 }
1200 }
1201 })
1202 }
1203
1204 pub fn file_history(
1205 &self,
1206 repo: &Entity<Repository>,
1207 path: RepoPath,
1208 cx: &mut App,
1209 ) -> Task<Result<git::repository::FileHistory>> {
1210 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1211
1212 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1213 }
1214
1215 pub fn file_history_paginated(
1216 &self,
1217 repo: &Entity<Repository>,
1218 path: RepoPath,
1219 skip: usize,
1220 limit: Option<usize>,
1221 cx: &mut App,
1222 ) -> Task<Result<git::repository::FileHistory>> {
1223 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1224
1225 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1226 }
1227
1228 pub fn get_permalink_to_line(
1229 &self,
1230 buffer: &Entity<Buffer>,
1231 selection: Range<u32>,
1232 cx: &mut App,
1233 ) -> Task<Result<url::Url>> {
1234 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1235 return Task::ready(Err(anyhow!("buffer has no file")));
1236 };
1237
1238 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1239 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1240 cx,
1241 ) else {
1242 // If we're not in a Git repo, check whether this is a Rust source
1243 // file in the Cargo registry (presumably opened with go-to-definition
1244 // from a normal Rust file). If so, we can put together a permalink
1245 // using crate metadata.
1246 if buffer
1247 .read(cx)
1248 .language()
1249 .is_none_or(|lang| lang.name() != "Rust")
1250 {
1251 return Task::ready(Err(anyhow!("no permalink available")));
1252 }
1253 let file_path = file.worktree.read(cx).absolutize(&file.path);
1254 return cx.spawn(async move |cx| {
1255 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1256 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1257 .context("no permalink available")
1258 });
1259 };
1260
1261 let buffer_id = buffer.read(cx).remote_id();
1262 let branch = repo.read(cx).branch.clone();
1263 let remote = branch
1264 .as_ref()
1265 .and_then(|b| b.upstream.as_ref())
1266 .and_then(|b| b.remote_name())
1267 .unwrap_or("origin")
1268 .to_string();
1269
1270 let rx = repo.update(cx, |repo, _| {
1271 repo.send_job(None, move |state, cx| async move {
1272 match state {
1273 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1274 let origin_url = backend
1275 .remote_url(&remote)
1276 .await
1277 .with_context(|| format!("remote \"{remote}\" not found"))?;
1278
1279 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1280
1281 let provider_registry =
1282 cx.update(GitHostingProviderRegistry::default_global);
1283
1284 let (provider, remote) =
1285 parse_git_remote_url(provider_registry, &origin_url)
1286 .context("parsing Git remote URL")?;
1287
1288 Ok(provider.build_permalink(
1289 remote,
1290 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1291 ))
1292 }
1293 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1294 let response = client
1295 .request(proto::GetPermalinkToLine {
1296 project_id: project_id.to_proto(),
1297 buffer_id: buffer_id.into(),
1298 selection: Some(proto::Range {
1299 start: selection.start as u64,
1300 end: selection.end as u64,
1301 }),
1302 })
1303 .await?;
1304
1305 url::Url::parse(&response.permalink).context("failed to parse permalink")
1306 }
1307 }
1308 })
1309 });
1310 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1311 }
1312
1313 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1314 match &self.state {
1315 GitStoreState::Local {
1316 downstream: downstream_client,
1317 ..
1318 } => downstream_client
1319 .as_ref()
1320 .map(|state| (state.client.clone(), state.project_id)),
1321 GitStoreState::Remote {
1322 downstream: downstream_client,
1323 ..
1324 } => downstream_client.clone(),
1325 }
1326 }
1327
1328 fn upstream_client(&self) -> Option<AnyProtoClient> {
1329 match &self.state {
1330 GitStoreState::Local { .. } => None,
1331 GitStoreState::Remote {
1332 upstream_client, ..
1333 } => Some(upstream_client.clone()),
1334 }
1335 }
1336
1337 fn on_worktree_store_event(
1338 &mut self,
1339 worktree_store: Entity<WorktreeStore>,
1340 event: &WorktreeStoreEvent,
1341 cx: &mut Context<Self>,
1342 ) {
1343 let GitStoreState::Local {
1344 project_environment,
1345 downstream,
1346 next_repository_id,
1347 fs,
1348 } = &self.state
1349 else {
1350 return;
1351 };
1352
1353 match event {
1354 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1355 if let Some(worktree) = self
1356 .worktree_store
1357 .read(cx)
1358 .worktree_for_id(*worktree_id, cx)
1359 {
1360 let paths_by_git_repo =
1361 self.process_updated_entries(&worktree, updated_entries, cx);
1362 let downstream = downstream
1363 .as_ref()
1364 .map(|downstream| downstream.updates_tx.clone());
1365 cx.spawn(async move |_, cx| {
1366 let paths_by_git_repo = paths_by_git_repo.await;
1367 for (repo, paths) in paths_by_git_repo {
1368 repo.update(cx, |repo, cx| {
1369 repo.paths_changed(paths, downstream.clone(), cx);
1370 });
1371 }
1372 })
1373 .detach();
1374 }
1375 }
1376 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1377 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1378 else {
1379 return;
1380 };
1381 if !worktree.read(cx).is_visible() {
1382 log::debug!(
1383 "not adding repositories for local worktree {:?} because it's not visible",
1384 worktree.read(cx).abs_path()
1385 );
1386 return;
1387 }
1388 self.update_repositories_from_worktree(
1389 *worktree_id,
1390 project_environment.clone(),
1391 next_repository_id.clone(),
1392 downstream
1393 .as_ref()
1394 .map(|downstream| downstream.updates_tx.clone()),
1395 changed_repos.clone(),
1396 fs.clone(),
1397 cx,
1398 );
1399 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1400 }
1401 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1402 let repos_without_worktree: Vec<RepositoryId> = self
1403 .worktree_ids
1404 .iter_mut()
1405 .filter_map(|(repo_id, worktree_ids)| {
1406 worktree_ids.remove(worktree_id);
1407 if worktree_ids.is_empty() {
1408 Some(*repo_id)
1409 } else {
1410 None
1411 }
1412 })
1413 .collect();
1414 let is_active_repo_removed = repos_without_worktree
1415 .iter()
1416 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1417
1418 for repo_id in repos_without_worktree {
1419 self.repositories.remove(&repo_id);
1420 self.worktree_ids.remove(&repo_id);
1421 if let Some(updates_tx) =
1422 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1423 {
1424 updates_tx
1425 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1426 .ok();
1427 }
1428 }
1429
1430 if is_active_repo_removed {
1431 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1432 self.active_repo_id = Some(repo_id);
1433 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1434 } else {
1435 self.active_repo_id = None;
1436 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1437 }
1438 }
1439 }
1440 _ => {}
1441 }
1442 }
1443 fn on_repository_event(
1444 &mut self,
1445 repo: Entity<Repository>,
1446 event: &RepositoryEvent,
1447 cx: &mut Context<Self>,
1448 ) {
1449 let id = repo.read(cx).id;
1450 let repo_snapshot = repo.read(cx).snapshot.clone();
1451 for (buffer_id, diff) in self.diffs.iter() {
1452 if let Some((buffer_repo, repo_path)) =
1453 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1454 && buffer_repo == repo
1455 {
1456 diff.update(cx, |diff, cx| {
1457 if let Some(conflict_set) = &diff.conflict_set {
1458 let conflict_status_changed =
1459 conflict_set.update(cx, |conflict_set, cx| {
1460 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1461 conflict_set.set_has_conflict(has_conflict, cx)
1462 })?;
1463 if conflict_status_changed {
1464 let buffer_store = self.buffer_store.read(cx);
1465 if let Some(buffer) = buffer_store.get(*buffer_id) {
1466 let _ = diff
1467 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1468 }
1469 }
1470 }
1471 anyhow::Ok(())
1472 })
1473 .ok();
1474 }
1475 }
1476 cx.emit(GitStoreEvent::RepositoryUpdated(
1477 id,
1478 event.clone(),
1479 self.active_repo_id == Some(id),
1480 ))
1481 }
1482
1483 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1484 cx.emit(GitStoreEvent::JobsUpdated)
1485 }
1486
1487 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1488 fn update_repositories_from_worktree(
1489 &mut self,
1490 worktree_id: WorktreeId,
1491 project_environment: Entity<ProjectEnvironment>,
1492 next_repository_id: Arc<AtomicU64>,
1493 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1494 updated_git_repositories: UpdatedGitRepositoriesSet,
1495 fs: Arc<dyn Fs>,
1496 cx: &mut Context<Self>,
1497 ) {
1498 let mut removed_ids = Vec::new();
1499 for update in updated_git_repositories.iter() {
1500 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1501 let existing_work_directory_abs_path =
1502 repo.read(cx).work_directory_abs_path.clone();
1503 Some(&existing_work_directory_abs_path)
1504 == update.old_work_directory_abs_path.as_ref()
1505 || Some(&existing_work_directory_abs_path)
1506 == update.new_work_directory_abs_path.as_ref()
1507 }) {
1508 let repo_id = *id;
1509 if let Some(new_work_directory_abs_path) =
1510 update.new_work_directory_abs_path.clone()
1511 {
1512 self.worktree_ids
1513 .entry(repo_id)
1514 .or_insert_with(HashSet::new)
1515 .insert(worktree_id);
1516 existing.update(cx, |existing, cx| {
1517 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1518 existing.schedule_scan(updates_tx.clone(), cx);
1519 });
1520 } else {
1521 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1522 worktree_ids.remove(&worktree_id);
1523 if worktree_ids.is_empty() {
1524 removed_ids.push(repo_id);
1525 }
1526 }
1527 }
1528 } else if let UpdatedGitRepository {
1529 new_work_directory_abs_path: Some(work_directory_abs_path),
1530 dot_git_abs_path: Some(dot_git_abs_path),
1531 repository_dir_abs_path: Some(_repository_dir_abs_path),
1532 common_dir_abs_path: Some(common_dir_abs_path),
1533 ..
1534 } = update
1535 {
1536 let original_repo_abs_path: Arc<Path> =
1537 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1538 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1539 let is_trusted = TrustedWorktrees::try_get_global(cx)
1540 .map(|trusted_worktrees| {
1541 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1542 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1543 })
1544 })
1545 .unwrap_or(false);
1546 let git_store = cx.weak_entity();
1547 let repo = cx.new(|cx| {
1548 let mut repo = Repository::local(
1549 id,
1550 work_directory_abs_path.clone(),
1551 original_repo_abs_path.clone(),
1552 dot_git_abs_path.clone(),
1553 project_environment.downgrade(),
1554 fs.clone(),
1555 is_trusted,
1556 git_store,
1557 cx,
1558 );
1559 if let Some(updates_tx) = updates_tx.as_ref() {
1560 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1561 updates_tx
1562 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1563 .ok();
1564 }
1565 repo.schedule_scan(updates_tx.clone(), cx);
1566 repo
1567 });
1568 self._subscriptions
1569 .push(cx.subscribe(&repo, Self::on_repository_event));
1570 self._subscriptions
1571 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1572 self.repositories.insert(id, repo);
1573 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1574 cx.emit(GitStoreEvent::RepositoryAdded);
1575 self.active_repo_id.get_or_insert_with(|| {
1576 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1577 id
1578 });
1579 }
1580 }
1581
1582 for id in removed_ids {
1583 if self.active_repo_id == Some(id) {
1584 self.active_repo_id = None;
1585 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1586 }
1587 self.repositories.remove(&id);
1588 if let Some(updates_tx) = updates_tx.as_ref() {
1589 updates_tx
1590 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1591 .ok();
1592 }
1593 }
1594 }
1595
1596 fn on_trusted_worktrees_event(
1597 &mut self,
1598 _: Entity<TrustedWorktreesStore>,
1599 event: &TrustedWorktreesEvent,
1600 cx: &mut Context<Self>,
1601 ) {
1602 if !matches!(self.state, GitStoreState::Local { .. }) {
1603 return;
1604 }
1605
1606 let (is_trusted, event_paths) = match event {
1607 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1608 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1609 };
1610
1611 for (repo_id, worktree_ids) in &self.worktree_ids {
1612 if worktree_ids
1613 .iter()
1614 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1615 {
1616 if let Some(repo) = self.repositories.get(repo_id) {
1617 let repository_state = repo.read(cx).repository_state.clone();
1618 cx.background_spawn(async move {
1619 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1620 state.backend.set_trusted(is_trusted);
1621 }
1622 })
1623 .detach();
1624 }
1625 }
1626 }
1627 }
1628
1629 fn on_buffer_store_event(
1630 &mut self,
1631 _: Entity<BufferStore>,
1632 event: &BufferStoreEvent,
1633 cx: &mut Context<Self>,
1634 ) {
1635 match event {
1636 BufferStoreEvent::BufferAdded(buffer) => {
1637 cx.subscribe(buffer, |this, buffer, event, cx| {
1638 if let BufferEvent::LanguageChanged(_) = event {
1639 let buffer_id = buffer.read(cx).remote_id();
1640 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1641 diff_state.update(cx, |diff_state, cx| {
1642 diff_state.buffer_language_changed(buffer, cx);
1643 });
1644 }
1645 }
1646 })
1647 .detach();
1648 }
1649 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1650 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1651 diffs.remove(buffer_id);
1652 }
1653 }
1654 BufferStoreEvent::BufferDropped(buffer_id) => {
1655 self.diffs.remove(buffer_id);
1656 for diffs in self.shared_diffs.values_mut() {
1657 diffs.remove(buffer_id);
1658 }
1659 }
1660 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1661 // Whenever a buffer's file path changes, it's possible that the
1662 // new path is actually a path that is being tracked by a git
1663 // repository. In that case, we'll want to update the buffer's
1664 // `BufferDiffState`, in case it already has one.
1665 let buffer_id = buffer.read(cx).remote_id();
1666 let diff_state = self.diffs.get(&buffer_id);
1667 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1668
1669 if let Some(diff_state) = diff_state
1670 && let Some((repo, repo_path)) = repo
1671 {
1672 let buffer = buffer.clone();
1673 let diff_state = diff_state.clone();
1674
1675 cx.spawn(async move |_git_store, cx| {
1676 async {
1677 let diff_bases_change = repo
1678 .update(cx, |repo, cx| {
1679 repo.load_committed_text(buffer_id, repo_path, cx)
1680 })
1681 .await?;
1682
1683 diff_state.update(cx, |diff_state, cx| {
1684 let buffer_snapshot = buffer.read(cx).text_snapshot();
1685 diff_state.diff_bases_changed(
1686 buffer_snapshot,
1687 Some(diff_bases_change),
1688 cx,
1689 );
1690 });
1691 anyhow::Ok(())
1692 }
1693 .await
1694 .log_err();
1695 })
1696 .detach();
1697 }
1698 }
1699 }
1700 }
1701
1702 pub fn recalculate_buffer_diffs(
1703 &mut self,
1704 buffers: Vec<Entity<Buffer>>,
1705 cx: &mut Context<Self>,
1706 ) -> impl Future<Output = ()> + use<> {
1707 let mut futures = Vec::new();
1708 for buffer in buffers {
1709 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1710 let buffer = buffer.read(cx).text_snapshot();
1711 diff_state.update(cx, |diff_state, cx| {
1712 diff_state.recalculate_diffs(buffer.clone(), cx);
1713 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1714 });
1715 futures.push(diff_state.update(cx, |diff_state, cx| {
1716 diff_state
1717 .reparse_conflict_markers(buffer, cx)
1718 .map(|_| {})
1719 .boxed()
1720 }));
1721 }
1722 }
1723 async move {
1724 futures::future::join_all(futures).await;
1725 }
1726 }
1727
1728 fn on_buffer_diff_event(
1729 &mut self,
1730 diff: Entity<buffer_diff::BufferDiff>,
1731 event: &BufferDiffEvent,
1732 cx: &mut Context<Self>,
1733 ) {
1734 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1735 let buffer_id = diff.read(cx).buffer_id;
1736 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1737 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1738 diff_state.hunk_staging_operation_count += 1;
1739 diff_state.hunk_staging_operation_count
1740 });
1741 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1742 let recv = repo.update(cx, |repo, cx| {
1743 log::debug!("hunks changed for {}", path.as_unix_str());
1744 repo.spawn_set_index_text_job(
1745 path,
1746 new_index_text.as_ref().map(|rope| rope.to_string()),
1747 Some(hunk_staging_operation_count),
1748 cx,
1749 )
1750 });
1751 let diff = diff.downgrade();
1752 cx.spawn(async move |this, cx| {
1753 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1754 diff.update(cx, |diff, cx| {
1755 diff.clear_pending_hunks(cx);
1756 })
1757 .ok();
1758 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1759 .ok();
1760 }
1761 })
1762 .detach();
1763 }
1764 }
1765 }
1766 }
1767
1768 fn local_worktree_git_repos_changed(
1769 &mut self,
1770 worktree: Entity<Worktree>,
1771 changed_repos: &UpdatedGitRepositoriesSet,
1772 cx: &mut Context<Self>,
1773 ) {
1774 log::debug!("local worktree repos changed");
1775 debug_assert!(worktree.read(cx).is_local());
1776
1777 for repository in self.repositories.values() {
1778 repository.update(cx, |repository, cx| {
1779 let repo_abs_path = &repository.work_directory_abs_path;
1780 if changed_repos.iter().any(|update| {
1781 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1782 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1783 }) {
1784 repository.reload_buffer_diff_bases(cx);
1785 }
1786 });
1787 }
1788 }
1789
1790 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1791 &self.repositories
1792 }
1793
1794 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1795 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1796 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1797 Some(status.status)
1798 }
1799
1800 pub fn repository_and_path_for_buffer_id(
1801 &self,
1802 buffer_id: BufferId,
1803 cx: &App,
1804 ) -> Option<(Entity<Repository>, RepoPath)> {
1805 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1806 let project_path = buffer.read(cx).project_path(cx)?;
1807 self.repository_and_path_for_project_path(&project_path, cx)
1808 }
1809
1810 pub fn repository_and_path_for_project_path(
1811 &self,
1812 path: &ProjectPath,
1813 cx: &App,
1814 ) -> Option<(Entity<Repository>, RepoPath)> {
1815 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1816 self.repositories
1817 .values()
1818 .filter_map(|repo| {
1819 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1820 Some((repo.clone(), repo_path))
1821 })
1822 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1823 }
1824
1825 pub fn git_init(
1826 &self,
1827 path: Arc<Path>,
1828 fallback_branch_name: String,
1829 cx: &App,
1830 ) -> Task<Result<()>> {
1831 match &self.state {
1832 GitStoreState::Local { fs, .. } => {
1833 let fs = fs.clone();
1834 cx.background_executor()
1835 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1836 }
1837 GitStoreState::Remote {
1838 upstream_client,
1839 upstream_project_id: project_id,
1840 ..
1841 } => {
1842 let client = upstream_client.clone();
1843 let project_id = *project_id;
1844 cx.background_executor().spawn(async move {
1845 client
1846 .request(proto::GitInit {
1847 project_id: project_id,
1848 abs_path: path.to_string_lossy().into_owned(),
1849 fallback_branch_name,
1850 })
1851 .await?;
1852 Ok(())
1853 })
1854 }
1855 }
1856 }
1857
1858 pub fn git_clone(
1859 &self,
1860 repo: String,
1861 path: impl Into<Arc<std::path::Path>>,
1862 cx: &App,
1863 ) -> Task<Result<()>> {
1864 let path = path.into();
1865 match &self.state {
1866 GitStoreState::Local { fs, .. } => {
1867 let fs = fs.clone();
1868 cx.background_executor()
1869 .spawn(async move { fs.git_clone(&repo, &path).await })
1870 }
1871 GitStoreState::Remote {
1872 upstream_client,
1873 upstream_project_id,
1874 ..
1875 } => {
1876 if upstream_client.is_via_collab() {
1877 return Task::ready(Err(anyhow!(
1878 "Git Clone isn't supported for project guests"
1879 )));
1880 }
1881 let request = upstream_client.request(proto::GitClone {
1882 project_id: *upstream_project_id,
1883 abs_path: path.to_string_lossy().into_owned(),
1884 remote_repo: repo,
1885 });
1886
1887 cx.background_spawn(async move {
1888 let result = request.await?;
1889
1890 match result.success {
1891 true => Ok(()),
1892 false => Err(anyhow!("Git Clone failed")),
1893 }
1894 })
1895 }
1896 }
1897 }
1898
1899 async fn handle_update_repository(
1900 this: Entity<Self>,
1901 envelope: TypedEnvelope<proto::UpdateRepository>,
1902 mut cx: AsyncApp,
1903 ) -> Result<()> {
1904 this.update(&mut cx, |this, cx| {
1905 let path_style = this.worktree_store.read(cx).path_style();
1906 let mut update = envelope.payload;
1907
1908 let id = RepositoryId::from_proto(update.id);
1909 let client = this.upstream_client().context("no upstream client")?;
1910
1911 let original_repo_abs_path: Option<Arc<Path>> = update
1912 .original_repo_abs_path
1913 .as_deref()
1914 .map(|p| Path::new(p).into());
1915
1916 let mut repo_subscription = None;
1917 let repo = this.repositories.entry(id).or_insert_with(|| {
1918 let git_store = cx.weak_entity();
1919 let repo = cx.new(|cx| {
1920 Repository::remote(
1921 id,
1922 Path::new(&update.abs_path).into(),
1923 original_repo_abs_path.clone(),
1924 path_style,
1925 ProjectId(update.project_id),
1926 client,
1927 git_store,
1928 cx,
1929 )
1930 });
1931 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1932 cx.emit(GitStoreEvent::RepositoryAdded);
1933 repo
1934 });
1935 this._subscriptions.extend(repo_subscription);
1936
1937 repo.update(cx, {
1938 let update = update.clone();
1939 |repo, cx| repo.apply_remote_update(update, cx)
1940 })?;
1941
1942 this.active_repo_id.get_or_insert_with(|| {
1943 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1944 id
1945 });
1946
1947 if let Some((client, project_id)) = this.downstream_client() {
1948 update.project_id = project_id.to_proto();
1949 client.send(update).log_err();
1950 }
1951 Ok(())
1952 })
1953 }
1954
1955 async fn handle_remove_repository(
1956 this: Entity<Self>,
1957 envelope: TypedEnvelope<proto::RemoveRepository>,
1958 mut cx: AsyncApp,
1959 ) -> Result<()> {
1960 this.update(&mut cx, |this, cx| {
1961 let mut update = envelope.payload;
1962 let id = RepositoryId::from_proto(update.id);
1963 this.repositories.remove(&id);
1964 if let Some((client, project_id)) = this.downstream_client() {
1965 update.project_id = project_id.to_proto();
1966 client.send(update).log_err();
1967 }
1968 if this.active_repo_id == Some(id) {
1969 this.active_repo_id = None;
1970 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1971 }
1972 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1973 });
1974 Ok(())
1975 }
1976
1977 async fn handle_git_init(
1978 this: Entity<Self>,
1979 envelope: TypedEnvelope<proto::GitInit>,
1980 cx: AsyncApp,
1981 ) -> Result<proto::Ack> {
1982 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1983 let name = envelope.payload.fallback_branch_name;
1984 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1985 .await?;
1986
1987 Ok(proto::Ack {})
1988 }
1989
1990 async fn handle_git_clone(
1991 this: Entity<Self>,
1992 envelope: TypedEnvelope<proto::GitClone>,
1993 cx: AsyncApp,
1994 ) -> Result<proto::GitCloneResponse> {
1995 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1996 let repo_name = envelope.payload.remote_repo;
1997 let result = cx
1998 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
1999 .await;
2000
2001 Ok(proto::GitCloneResponse {
2002 success: result.is_ok(),
2003 })
2004 }
2005
2006 async fn handle_fetch(
2007 this: Entity<Self>,
2008 envelope: TypedEnvelope<proto::Fetch>,
2009 mut cx: AsyncApp,
2010 ) -> Result<proto::RemoteMessageResponse> {
2011 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2012 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2013 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2014 let askpass_id = envelope.payload.askpass_id;
2015
2016 let askpass = make_remote_delegate(
2017 this,
2018 envelope.payload.project_id,
2019 repository_id,
2020 askpass_id,
2021 &mut cx,
2022 );
2023
2024 let remote_output = repository_handle
2025 .update(&mut cx, |repository_handle, cx| {
2026 repository_handle.fetch(fetch_options, askpass, cx)
2027 })
2028 .await??;
2029
2030 Ok(proto::RemoteMessageResponse {
2031 stdout: remote_output.stdout,
2032 stderr: remote_output.stderr,
2033 })
2034 }
2035
2036 async fn handle_push(
2037 this: Entity<Self>,
2038 envelope: TypedEnvelope<proto::Push>,
2039 mut cx: AsyncApp,
2040 ) -> Result<proto::RemoteMessageResponse> {
2041 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2042 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2043
2044 let askpass_id = envelope.payload.askpass_id;
2045 let askpass = make_remote_delegate(
2046 this,
2047 envelope.payload.project_id,
2048 repository_id,
2049 askpass_id,
2050 &mut cx,
2051 );
2052
2053 let options = envelope
2054 .payload
2055 .options
2056 .as_ref()
2057 .map(|_| match envelope.payload.options() {
2058 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2059 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2060 });
2061
2062 let branch_name = envelope.payload.branch_name.into();
2063 let remote_branch_name = envelope.payload.remote_branch_name.into();
2064 let remote_name = envelope.payload.remote_name.into();
2065
2066 let remote_output = repository_handle
2067 .update(&mut cx, |repository_handle, cx| {
2068 repository_handle.push(
2069 branch_name,
2070 remote_branch_name,
2071 remote_name,
2072 options,
2073 askpass,
2074 cx,
2075 )
2076 })
2077 .await??;
2078 Ok(proto::RemoteMessageResponse {
2079 stdout: remote_output.stdout,
2080 stderr: remote_output.stderr,
2081 })
2082 }
2083
2084 async fn handle_pull(
2085 this: Entity<Self>,
2086 envelope: TypedEnvelope<proto::Pull>,
2087 mut cx: AsyncApp,
2088 ) -> Result<proto::RemoteMessageResponse> {
2089 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2090 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2091 let askpass_id = envelope.payload.askpass_id;
2092 let askpass = make_remote_delegate(
2093 this,
2094 envelope.payload.project_id,
2095 repository_id,
2096 askpass_id,
2097 &mut cx,
2098 );
2099
2100 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2101 let remote_name = envelope.payload.remote_name.into();
2102 let rebase = envelope.payload.rebase;
2103
2104 let remote_message = repository_handle
2105 .update(&mut cx, |repository_handle, cx| {
2106 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2107 })
2108 .await??;
2109
2110 Ok(proto::RemoteMessageResponse {
2111 stdout: remote_message.stdout,
2112 stderr: remote_message.stderr,
2113 })
2114 }
2115
2116 async fn handle_stage(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::Stage>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::Ack> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123
2124 let entries = envelope
2125 .payload
2126 .paths
2127 .into_iter()
2128 .map(|path| RepoPath::new(&path))
2129 .collect::<Result<Vec<_>>>()?;
2130
2131 repository_handle
2132 .update(&mut cx, |repository_handle, cx| {
2133 repository_handle.stage_entries(entries, cx)
2134 })
2135 .await?;
2136 Ok(proto::Ack {})
2137 }
2138
2139 async fn handle_unstage(
2140 this: Entity<Self>,
2141 envelope: TypedEnvelope<proto::Unstage>,
2142 mut cx: AsyncApp,
2143 ) -> Result<proto::Ack> {
2144 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2145 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2146
2147 let entries = envelope
2148 .payload
2149 .paths
2150 .into_iter()
2151 .map(|path| RepoPath::new(&path))
2152 .collect::<Result<Vec<_>>>()?;
2153
2154 repository_handle
2155 .update(&mut cx, |repository_handle, cx| {
2156 repository_handle.unstage_entries(entries, cx)
2157 })
2158 .await?;
2159
2160 Ok(proto::Ack {})
2161 }
2162
2163 async fn handle_stash(
2164 this: Entity<Self>,
2165 envelope: TypedEnvelope<proto::Stash>,
2166 mut cx: AsyncApp,
2167 ) -> Result<proto::Ack> {
2168 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2169 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2170
2171 let entries = envelope
2172 .payload
2173 .paths
2174 .into_iter()
2175 .map(|path| RepoPath::new(&path))
2176 .collect::<Result<Vec<_>>>()?;
2177
2178 repository_handle
2179 .update(&mut cx, |repository_handle, cx| {
2180 repository_handle.stash_entries(entries, cx)
2181 })
2182 .await?;
2183
2184 Ok(proto::Ack {})
2185 }
2186
2187 async fn handle_stash_pop(
2188 this: Entity<Self>,
2189 envelope: TypedEnvelope<proto::StashPop>,
2190 mut cx: AsyncApp,
2191 ) -> Result<proto::Ack> {
2192 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2193 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2194 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2195
2196 repository_handle
2197 .update(&mut cx, |repository_handle, cx| {
2198 repository_handle.stash_pop(stash_index, cx)
2199 })
2200 .await?;
2201
2202 Ok(proto::Ack {})
2203 }
2204
2205 async fn handle_stash_apply(
2206 this: Entity<Self>,
2207 envelope: TypedEnvelope<proto::StashApply>,
2208 mut cx: AsyncApp,
2209 ) -> Result<proto::Ack> {
2210 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2211 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2212 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2213
2214 repository_handle
2215 .update(&mut cx, |repository_handle, cx| {
2216 repository_handle.stash_apply(stash_index, cx)
2217 })
2218 .await?;
2219
2220 Ok(proto::Ack {})
2221 }
2222
2223 async fn handle_stash_drop(
2224 this: Entity<Self>,
2225 envelope: TypedEnvelope<proto::StashDrop>,
2226 mut cx: AsyncApp,
2227 ) -> Result<proto::Ack> {
2228 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2229 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2230 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2231
2232 repository_handle
2233 .update(&mut cx, |repository_handle, cx| {
2234 repository_handle.stash_drop(stash_index, cx)
2235 })
2236 .await??;
2237
2238 Ok(proto::Ack {})
2239 }
2240
2241 async fn handle_set_index_text(
2242 this: Entity<Self>,
2243 envelope: TypedEnvelope<proto::SetIndexText>,
2244 mut cx: AsyncApp,
2245 ) -> Result<proto::Ack> {
2246 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2247 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2248 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2249
2250 repository_handle
2251 .update(&mut cx, |repository_handle, cx| {
2252 repository_handle.spawn_set_index_text_job(
2253 repo_path,
2254 envelope.payload.text,
2255 None,
2256 cx,
2257 )
2258 })
2259 .await??;
2260 Ok(proto::Ack {})
2261 }
2262
2263 async fn handle_run_hook(
2264 this: Entity<Self>,
2265 envelope: TypedEnvelope<proto::RunGitHook>,
2266 mut cx: AsyncApp,
2267 ) -> Result<proto::Ack> {
2268 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2269 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2270 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2271 repository_handle
2272 .update(&mut cx, |repository_handle, cx| {
2273 repository_handle.run_hook(hook, cx)
2274 })
2275 .await??;
2276 Ok(proto::Ack {})
2277 }
2278
2279 async fn handle_commit(
2280 this: Entity<Self>,
2281 envelope: TypedEnvelope<proto::Commit>,
2282 mut cx: AsyncApp,
2283 ) -> Result<proto::Ack> {
2284 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2285 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2286 let askpass_id = envelope.payload.askpass_id;
2287
2288 let askpass = make_remote_delegate(
2289 this,
2290 envelope.payload.project_id,
2291 repository_id,
2292 askpass_id,
2293 &mut cx,
2294 );
2295
2296 let message = SharedString::from(envelope.payload.message);
2297 let name = envelope.payload.name.map(SharedString::from);
2298 let email = envelope.payload.email.map(SharedString::from);
2299 let options = envelope.payload.options.unwrap_or_default();
2300
2301 repository_handle
2302 .update(&mut cx, |repository_handle, cx| {
2303 repository_handle.commit(
2304 message,
2305 name.zip(email),
2306 CommitOptions {
2307 amend: options.amend,
2308 signoff: options.signoff,
2309 },
2310 askpass,
2311 cx,
2312 )
2313 })
2314 .await??;
2315 Ok(proto::Ack {})
2316 }
2317
2318 async fn handle_get_remotes(
2319 this: Entity<Self>,
2320 envelope: TypedEnvelope<proto::GetRemotes>,
2321 mut cx: AsyncApp,
2322 ) -> Result<proto::GetRemotesResponse> {
2323 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2324 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2325
2326 let branch_name = envelope.payload.branch_name;
2327 let is_push = envelope.payload.is_push;
2328
2329 let remotes = repository_handle
2330 .update(&mut cx, |repository_handle, _| {
2331 repository_handle.get_remotes(branch_name, is_push)
2332 })
2333 .await??;
2334
2335 Ok(proto::GetRemotesResponse {
2336 remotes: remotes
2337 .into_iter()
2338 .map(|remotes| proto::get_remotes_response::Remote {
2339 name: remotes.name.to_string(),
2340 })
2341 .collect::<Vec<_>>(),
2342 })
2343 }
2344
2345 async fn handle_get_worktrees(
2346 this: Entity<Self>,
2347 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2348 mut cx: AsyncApp,
2349 ) -> Result<proto::GitWorktreesResponse> {
2350 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2351 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2352
2353 let worktrees = repository_handle
2354 .update(&mut cx, |repository_handle, _| {
2355 repository_handle.worktrees()
2356 })
2357 .await??;
2358
2359 Ok(proto::GitWorktreesResponse {
2360 worktrees: worktrees
2361 .into_iter()
2362 .map(|worktree| worktree_to_proto(&worktree))
2363 .collect::<Vec<_>>(),
2364 })
2365 }
2366
2367 async fn handle_create_worktree(
2368 this: Entity<Self>,
2369 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2370 mut cx: AsyncApp,
2371 ) -> Result<proto::Ack> {
2372 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2373 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2374 let directory = PathBuf::from(envelope.payload.directory);
2375 let name = envelope.payload.name;
2376 let commit = envelope.payload.commit;
2377
2378 repository_handle
2379 .update(&mut cx, |repository_handle, _| {
2380 repository_handle.create_worktree(name, directory, commit)
2381 })
2382 .await??;
2383
2384 Ok(proto::Ack {})
2385 }
2386
2387 async fn handle_get_branches(
2388 this: Entity<Self>,
2389 envelope: TypedEnvelope<proto::GitGetBranches>,
2390 mut cx: AsyncApp,
2391 ) -> Result<proto::GitBranchesResponse> {
2392 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2393 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2394
2395 let branches = repository_handle
2396 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2397 .await??;
2398
2399 Ok(proto::GitBranchesResponse {
2400 branches: branches
2401 .into_iter()
2402 .map(|branch| branch_to_proto(&branch))
2403 .collect::<Vec<_>>(),
2404 })
2405 }
2406 async fn handle_get_default_branch(
2407 this: Entity<Self>,
2408 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2409 mut cx: AsyncApp,
2410 ) -> Result<proto::GetDefaultBranchResponse> {
2411 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2412 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2413
2414 let branch = repository_handle
2415 .update(&mut cx, |repository_handle, _| {
2416 repository_handle.default_branch(false)
2417 })
2418 .await??
2419 .map(Into::into);
2420
2421 Ok(proto::GetDefaultBranchResponse { branch })
2422 }
2423 async fn handle_create_branch(
2424 this: Entity<Self>,
2425 envelope: TypedEnvelope<proto::GitCreateBranch>,
2426 mut cx: AsyncApp,
2427 ) -> Result<proto::Ack> {
2428 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2429 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2430 let branch_name = envelope.payload.branch_name;
2431
2432 repository_handle
2433 .update(&mut cx, |repository_handle, _| {
2434 repository_handle.create_branch(branch_name, None)
2435 })
2436 .await??;
2437
2438 Ok(proto::Ack {})
2439 }
2440
2441 async fn handle_change_branch(
2442 this: Entity<Self>,
2443 envelope: TypedEnvelope<proto::GitChangeBranch>,
2444 mut cx: AsyncApp,
2445 ) -> Result<proto::Ack> {
2446 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2447 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2448 let branch_name = envelope.payload.branch_name;
2449
2450 repository_handle
2451 .update(&mut cx, |repository_handle, _| {
2452 repository_handle.change_branch(branch_name)
2453 })
2454 .await??;
2455
2456 Ok(proto::Ack {})
2457 }
2458
2459 async fn handle_rename_branch(
2460 this: Entity<Self>,
2461 envelope: TypedEnvelope<proto::GitRenameBranch>,
2462 mut cx: AsyncApp,
2463 ) -> Result<proto::Ack> {
2464 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2465 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2466 let branch = envelope.payload.branch;
2467 let new_name = envelope.payload.new_name;
2468
2469 repository_handle
2470 .update(&mut cx, |repository_handle, _| {
2471 repository_handle.rename_branch(branch, new_name)
2472 })
2473 .await??;
2474
2475 Ok(proto::Ack {})
2476 }
2477
2478 async fn handle_create_remote(
2479 this: Entity<Self>,
2480 envelope: TypedEnvelope<proto::GitCreateRemote>,
2481 mut cx: AsyncApp,
2482 ) -> Result<proto::Ack> {
2483 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2484 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2485 let remote_name = envelope.payload.remote_name;
2486 let remote_url = envelope.payload.remote_url;
2487
2488 repository_handle
2489 .update(&mut cx, |repository_handle, _| {
2490 repository_handle.create_remote(remote_name, remote_url)
2491 })
2492 .await??;
2493
2494 Ok(proto::Ack {})
2495 }
2496
2497 async fn handle_delete_branch(
2498 this: Entity<Self>,
2499 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2500 mut cx: AsyncApp,
2501 ) -> Result<proto::Ack> {
2502 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2503 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2504 let branch_name = envelope.payload.branch_name;
2505
2506 repository_handle
2507 .update(&mut cx, |repository_handle, _| {
2508 repository_handle.delete_branch(branch_name)
2509 })
2510 .await??;
2511
2512 Ok(proto::Ack {})
2513 }
2514
2515 async fn handle_remove_remote(
2516 this: Entity<Self>,
2517 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2518 mut cx: AsyncApp,
2519 ) -> Result<proto::Ack> {
2520 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2521 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2522 let remote_name = envelope.payload.remote_name;
2523
2524 repository_handle
2525 .update(&mut cx, |repository_handle, _| {
2526 repository_handle.remove_remote(remote_name)
2527 })
2528 .await??;
2529
2530 Ok(proto::Ack {})
2531 }
2532
2533 async fn handle_show(
2534 this: Entity<Self>,
2535 envelope: TypedEnvelope<proto::GitShow>,
2536 mut cx: AsyncApp,
2537 ) -> Result<proto::GitCommitDetails> {
2538 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2539 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2540
2541 let commit = repository_handle
2542 .update(&mut cx, |repository_handle, _| {
2543 repository_handle.show(envelope.payload.commit)
2544 })
2545 .await??;
2546 Ok(proto::GitCommitDetails {
2547 sha: commit.sha.into(),
2548 message: commit.message.into(),
2549 commit_timestamp: commit.commit_timestamp,
2550 author_email: commit.author_email.into(),
2551 author_name: commit.author_name.into(),
2552 })
2553 }
2554
2555 async fn handle_load_commit_diff(
2556 this: Entity<Self>,
2557 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2558 mut cx: AsyncApp,
2559 ) -> Result<proto::LoadCommitDiffResponse> {
2560 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2561 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2562
2563 let commit_diff = repository_handle
2564 .update(&mut cx, |repository_handle, _| {
2565 repository_handle.load_commit_diff(envelope.payload.commit)
2566 })
2567 .await??;
2568 Ok(proto::LoadCommitDiffResponse {
2569 files: commit_diff
2570 .files
2571 .into_iter()
2572 .map(|file| proto::CommitFile {
2573 path: file.path.to_proto(),
2574 old_text: file.old_text,
2575 new_text: file.new_text,
2576 is_binary: file.is_binary,
2577 })
2578 .collect(),
2579 })
2580 }
2581
2582 async fn handle_file_history(
2583 this: Entity<Self>,
2584 envelope: TypedEnvelope<proto::GitFileHistory>,
2585 mut cx: AsyncApp,
2586 ) -> Result<proto::GitFileHistoryResponse> {
2587 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2588 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2589 let path = RepoPath::from_proto(&envelope.payload.path)?;
2590 let skip = envelope.payload.skip as usize;
2591 let limit = envelope.payload.limit.map(|l| l as usize);
2592
2593 let file_history = repository_handle
2594 .update(&mut cx, |repository_handle, _| {
2595 repository_handle.file_history_paginated(path, skip, limit)
2596 })
2597 .await??;
2598
2599 Ok(proto::GitFileHistoryResponse {
2600 entries: file_history
2601 .entries
2602 .into_iter()
2603 .map(|entry| proto::FileHistoryEntry {
2604 sha: entry.sha.to_string(),
2605 subject: entry.subject.to_string(),
2606 message: entry.message.to_string(),
2607 commit_timestamp: entry.commit_timestamp,
2608 author_name: entry.author_name.to_string(),
2609 author_email: entry.author_email.to_string(),
2610 })
2611 .collect(),
2612 path: file_history.path.to_proto(),
2613 })
2614 }
2615
2616 async fn handle_reset(
2617 this: Entity<Self>,
2618 envelope: TypedEnvelope<proto::GitReset>,
2619 mut cx: AsyncApp,
2620 ) -> Result<proto::Ack> {
2621 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2622 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2623
2624 let mode = match envelope.payload.mode() {
2625 git_reset::ResetMode::Soft => ResetMode::Soft,
2626 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2627 };
2628
2629 repository_handle
2630 .update(&mut cx, |repository_handle, cx| {
2631 repository_handle.reset(envelope.payload.commit, mode, cx)
2632 })
2633 .await??;
2634 Ok(proto::Ack {})
2635 }
2636
2637 async fn handle_checkout_files(
2638 this: Entity<Self>,
2639 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2640 mut cx: AsyncApp,
2641 ) -> Result<proto::Ack> {
2642 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2643 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2644 let paths = envelope
2645 .payload
2646 .paths
2647 .iter()
2648 .map(|s| RepoPath::from_proto(s))
2649 .collect::<Result<Vec<_>>>()?;
2650
2651 repository_handle
2652 .update(&mut cx, |repository_handle, cx| {
2653 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2654 })
2655 .await?;
2656 Ok(proto::Ack {})
2657 }
2658
2659 async fn handle_open_commit_message_buffer(
2660 this: Entity<Self>,
2661 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2662 mut cx: AsyncApp,
2663 ) -> Result<proto::OpenBufferResponse> {
2664 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2665 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2666 let buffer = repository
2667 .update(&mut cx, |repository, cx| {
2668 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2669 })
2670 .await?;
2671
2672 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2673 this.update(&mut cx, |this, cx| {
2674 this.buffer_store.update(cx, |buffer_store, cx| {
2675 buffer_store
2676 .create_buffer_for_peer(
2677 &buffer,
2678 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2679 cx,
2680 )
2681 .detach_and_log_err(cx);
2682 })
2683 });
2684
2685 Ok(proto::OpenBufferResponse {
2686 buffer_id: buffer_id.to_proto(),
2687 })
2688 }
2689
2690 async fn handle_askpass(
2691 this: Entity<Self>,
2692 envelope: TypedEnvelope<proto::AskPassRequest>,
2693 mut cx: AsyncApp,
2694 ) -> Result<proto::AskPassResponse> {
2695 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2696 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2697
2698 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2699 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2700 debug_panic!("no askpass found");
2701 anyhow::bail!("no askpass found");
2702 };
2703
2704 let response = askpass
2705 .ask_password(envelope.payload.prompt)
2706 .await
2707 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2708
2709 delegates
2710 .lock()
2711 .insert(envelope.payload.askpass_id, askpass);
2712
2713 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2714 Ok(proto::AskPassResponse {
2715 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2716 })
2717 }
2718
2719 async fn handle_check_for_pushed_commits(
2720 this: Entity<Self>,
2721 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2722 mut cx: AsyncApp,
2723 ) -> Result<proto::CheckForPushedCommitsResponse> {
2724 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2725 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2726
2727 let branches = repository_handle
2728 .update(&mut cx, |repository_handle, _| {
2729 repository_handle.check_for_pushed_commits()
2730 })
2731 .await??;
2732 Ok(proto::CheckForPushedCommitsResponse {
2733 pushed_to: branches
2734 .into_iter()
2735 .map(|commit| commit.to_string())
2736 .collect(),
2737 })
2738 }
2739
2740 async fn handle_git_diff(
2741 this: Entity<Self>,
2742 envelope: TypedEnvelope<proto::GitDiff>,
2743 mut cx: AsyncApp,
2744 ) -> Result<proto::GitDiffResponse> {
2745 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2746 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2747 let diff_type = match envelope.payload.diff_type() {
2748 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2749 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2750 proto::git_diff::DiffType::MergeBase => {
2751 let base_ref = envelope
2752 .payload
2753 .merge_base_ref
2754 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2755 DiffType::MergeBase {
2756 base_ref: base_ref.into(),
2757 }
2758 }
2759 };
2760
2761 let mut diff = repository_handle
2762 .update(&mut cx, |repository_handle, cx| {
2763 repository_handle.diff(diff_type, cx)
2764 })
2765 .await??;
2766 const ONE_MB: usize = 1_000_000;
2767 if diff.len() > ONE_MB {
2768 diff = diff.chars().take(ONE_MB).collect()
2769 }
2770
2771 Ok(proto::GitDiffResponse { diff })
2772 }
2773
2774 async fn handle_tree_diff(
2775 this: Entity<Self>,
2776 request: TypedEnvelope<proto::GetTreeDiff>,
2777 mut cx: AsyncApp,
2778 ) -> Result<proto::GetTreeDiffResponse> {
2779 let repository_id = RepositoryId(request.payload.repository_id);
2780 let diff_type = if request.payload.is_merge {
2781 DiffTreeType::MergeBase {
2782 base: request.payload.base.into(),
2783 head: request.payload.head.into(),
2784 }
2785 } else {
2786 DiffTreeType::Since {
2787 base: request.payload.base.into(),
2788 head: request.payload.head.into(),
2789 }
2790 };
2791
2792 let diff = this
2793 .update(&mut cx, |this, cx| {
2794 let repository = this.repositories().get(&repository_id)?;
2795 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2796 })
2797 .context("missing repository")?
2798 .await??;
2799
2800 Ok(proto::GetTreeDiffResponse {
2801 entries: diff
2802 .entries
2803 .into_iter()
2804 .map(|(path, status)| proto::TreeDiffStatus {
2805 path: path.as_ref().to_proto(),
2806 status: match status {
2807 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2808 TreeDiffStatus::Modified { .. } => {
2809 proto::tree_diff_status::Status::Modified.into()
2810 }
2811 TreeDiffStatus::Deleted { .. } => {
2812 proto::tree_diff_status::Status::Deleted.into()
2813 }
2814 },
2815 oid: match status {
2816 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2817 Some(old.to_string())
2818 }
2819 TreeDiffStatus::Added => None,
2820 },
2821 })
2822 .collect(),
2823 })
2824 }
2825
2826 async fn handle_get_blob_content(
2827 this: Entity<Self>,
2828 request: TypedEnvelope<proto::GetBlobContent>,
2829 mut cx: AsyncApp,
2830 ) -> Result<proto::GetBlobContentResponse> {
2831 let oid = git::Oid::from_str(&request.payload.oid)?;
2832 let repository_id = RepositoryId(request.payload.repository_id);
2833 let content = this
2834 .update(&mut cx, |this, cx| {
2835 let repository = this.repositories().get(&repository_id)?;
2836 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2837 })
2838 .context("missing repository")?
2839 .await?;
2840 Ok(proto::GetBlobContentResponse { content })
2841 }
2842
2843 async fn handle_open_unstaged_diff(
2844 this: Entity<Self>,
2845 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2846 mut cx: AsyncApp,
2847 ) -> Result<proto::OpenUnstagedDiffResponse> {
2848 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2849 let diff = this
2850 .update(&mut cx, |this, cx| {
2851 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2852 Some(this.open_unstaged_diff(buffer, cx))
2853 })
2854 .context("missing buffer")?
2855 .await?;
2856 this.update(&mut cx, |this, _| {
2857 let shared_diffs = this
2858 .shared_diffs
2859 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2860 .or_default();
2861 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2862 });
2863 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2864 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2865 }
2866
2867 async fn handle_open_uncommitted_diff(
2868 this: Entity<Self>,
2869 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2870 mut cx: AsyncApp,
2871 ) -> Result<proto::OpenUncommittedDiffResponse> {
2872 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2873 let diff = this
2874 .update(&mut cx, |this, cx| {
2875 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2876 Some(this.open_uncommitted_diff(buffer, cx))
2877 })
2878 .context("missing buffer")?
2879 .await?;
2880 this.update(&mut cx, |this, _| {
2881 let shared_diffs = this
2882 .shared_diffs
2883 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2884 .or_default();
2885 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2886 });
2887 Ok(diff.read_with(&cx, |diff, cx| {
2888 use proto::open_uncommitted_diff_response::Mode;
2889
2890 let unstaged_diff = diff.secondary_diff();
2891 let index_snapshot = unstaged_diff.and_then(|diff| {
2892 let diff = diff.read(cx);
2893 diff.base_text_exists().then(|| diff.base_text(cx))
2894 });
2895
2896 let mode;
2897 let staged_text;
2898 let committed_text;
2899 if diff.base_text_exists() {
2900 let committed_snapshot = diff.base_text(cx);
2901 committed_text = Some(committed_snapshot.text());
2902 if let Some(index_text) = index_snapshot {
2903 if index_text.remote_id() == committed_snapshot.remote_id() {
2904 mode = Mode::IndexMatchesHead;
2905 staged_text = None;
2906 } else {
2907 mode = Mode::IndexAndHead;
2908 staged_text = Some(index_text.text());
2909 }
2910 } else {
2911 mode = Mode::IndexAndHead;
2912 staged_text = None;
2913 }
2914 } else {
2915 mode = Mode::IndexAndHead;
2916 committed_text = None;
2917 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2918 }
2919
2920 proto::OpenUncommittedDiffResponse {
2921 committed_text,
2922 staged_text,
2923 mode: mode.into(),
2924 }
2925 }))
2926 }
2927
2928 async fn handle_update_diff_bases(
2929 this: Entity<Self>,
2930 request: TypedEnvelope<proto::UpdateDiffBases>,
2931 mut cx: AsyncApp,
2932 ) -> Result<()> {
2933 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2934 this.update(&mut cx, |this, cx| {
2935 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2936 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2937 {
2938 let buffer = buffer.read(cx).text_snapshot();
2939 diff_state.update(cx, |diff_state, cx| {
2940 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2941 })
2942 }
2943 });
2944 Ok(())
2945 }
2946
2947 async fn handle_blame_buffer(
2948 this: Entity<Self>,
2949 envelope: TypedEnvelope<proto::BlameBuffer>,
2950 mut cx: AsyncApp,
2951 ) -> Result<proto::BlameBufferResponse> {
2952 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2953 let version = deserialize_version(&envelope.payload.version);
2954 let buffer = this.read_with(&cx, |this, cx| {
2955 this.buffer_store.read(cx).get_existing(buffer_id)
2956 })?;
2957 buffer
2958 .update(&mut cx, |buffer, _| {
2959 buffer.wait_for_version(version.clone())
2960 })
2961 .await?;
2962 let blame = this
2963 .update(&mut cx, |this, cx| {
2964 this.blame_buffer(&buffer, Some(version), cx)
2965 })
2966 .await?;
2967 Ok(serialize_blame_buffer_response(blame))
2968 }
2969
2970 async fn handle_get_permalink_to_line(
2971 this: Entity<Self>,
2972 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2973 mut cx: AsyncApp,
2974 ) -> Result<proto::GetPermalinkToLineResponse> {
2975 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2976 // let version = deserialize_version(&envelope.payload.version);
2977 let selection = {
2978 let proto_selection = envelope
2979 .payload
2980 .selection
2981 .context("no selection to get permalink for defined")?;
2982 proto_selection.start as u32..proto_selection.end as u32
2983 };
2984 let buffer = this.read_with(&cx, |this, cx| {
2985 this.buffer_store.read(cx).get_existing(buffer_id)
2986 })?;
2987 let permalink = this
2988 .update(&mut cx, |this, cx| {
2989 this.get_permalink_to_line(&buffer, selection, cx)
2990 })
2991 .await?;
2992 Ok(proto::GetPermalinkToLineResponse {
2993 permalink: permalink.to_string(),
2994 })
2995 }
2996
2997 fn repository_for_request(
2998 this: &Entity<Self>,
2999 id: RepositoryId,
3000 cx: &mut AsyncApp,
3001 ) -> Result<Entity<Repository>> {
3002 this.read_with(cx, |this, _| {
3003 this.repositories
3004 .get(&id)
3005 .context("missing repository handle")
3006 .cloned()
3007 })
3008 }
3009
3010 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3011 self.repositories
3012 .iter()
3013 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3014 .collect()
3015 }
3016
3017 fn process_updated_entries(
3018 &self,
3019 worktree: &Entity<Worktree>,
3020 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3021 cx: &mut App,
3022 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3023 let path_style = worktree.read(cx).path_style();
3024 let mut repo_paths = self
3025 .repositories
3026 .values()
3027 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3028 .collect::<Vec<_>>();
3029 let mut entries: Vec<_> = updated_entries
3030 .iter()
3031 .map(|(path, _, _)| path.clone())
3032 .collect();
3033 entries.sort();
3034 let worktree = worktree.read(cx);
3035
3036 let entries = entries
3037 .into_iter()
3038 .map(|path| worktree.absolutize(&path))
3039 .collect::<Arc<[_]>>();
3040
3041 let executor = cx.background_executor().clone();
3042 cx.background_executor().spawn(async move {
3043 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3044 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3045 let mut tasks = FuturesOrdered::new();
3046 for (repo_path, repo) in repo_paths.into_iter().rev() {
3047 let entries = entries.clone();
3048 let task = executor.spawn(async move {
3049 // Find all repository paths that belong to this repo
3050 let mut ix = entries.partition_point(|path| path < &*repo_path);
3051 if ix == entries.len() {
3052 return None;
3053 };
3054
3055 let mut paths = Vec::new();
3056 // All paths prefixed by a given repo will constitute a continuous range.
3057 while let Some(path) = entries.get(ix)
3058 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3059 &repo_path, path, path_style,
3060 )
3061 {
3062 paths.push((repo_path, ix));
3063 ix += 1;
3064 }
3065 if paths.is_empty() {
3066 None
3067 } else {
3068 Some((repo, paths))
3069 }
3070 });
3071 tasks.push_back(task);
3072 }
3073
3074 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3075 let mut path_was_used = vec![false; entries.len()];
3076 let tasks = tasks.collect::<Vec<_>>().await;
3077 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3078 // We always want to assign a path to it's innermost repository.
3079 for t in tasks {
3080 let Some((repo, paths)) = t else {
3081 continue;
3082 };
3083 let entry = paths_by_git_repo.entry(repo).or_default();
3084 for (repo_path, ix) in paths {
3085 if path_was_used[ix] {
3086 continue;
3087 }
3088 path_was_used[ix] = true;
3089 entry.push(repo_path);
3090 }
3091 }
3092
3093 paths_by_git_repo
3094 })
3095 }
3096}
3097
3098impl BufferGitState {
3099 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3100 Self {
3101 unstaged_diff: Default::default(),
3102 uncommitted_diff: Default::default(),
3103 oid_diffs: Default::default(),
3104 recalculate_diff_task: Default::default(),
3105 language: Default::default(),
3106 language_registry: Default::default(),
3107 recalculating_tx: postage::watch::channel_with(false).0,
3108 hunk_staging_operation_count: 0,
3109 hunk_staging_operation_count_as_of_write: 0,
3110 head_text: Default::default(),
3111 index_text: Default::default(),
3112 oid_texts: Default::default(),
3113 head_changed: Default::default(),
3114 index_changed: Default::default(),
3115 language_changed: Default::default(),
3116 conflict_updated_futures: Default::default(),
3117 conflict_set: Default::default(),
3118 reparse_conflict_markers_task: Default::default(),
3119 }
3120 }
3121
3122 #[ztracing::instrument(skip_all)]
3123 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3124 self.language = buffer.read(cx).language().cloned();
3125 self.language_changed = true;
3126 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3127 }
3128
3129 fn reparse_conflict_markers(
3130 &mut self,
3131 buffer: text::BufferSnapshot,
3132 cx: &mut Context<Self>,
3133 ) -> oneshot::Receiver<()> {
3134 let (tx, rx) = oneshot::channel();
3135
3136 let Some(conflict_set) = self
3137 .conflict_set
3138 .as_ref()
3139 .and_then(|conflict_set| conflict_set.upgrade())
3140 else {
3141 return rx;
3142 };
3143
3144 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3145 if conflict_set.has_conflict {
3146 Some(conflict_set.snapshot())
3147 } else {
3148 None
3149 }
3150 });
3151
3152 if let Some(old_snapshot) = old_snapshot {
3153 self.conflict_updated_futures.push(tx);
3154 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3155 let (snapshot, changed_range) = cx
3156 .background_spawn(async move {
3157 let new_snapshot = ConflictSet::parse(&buffer);
3158 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3159 (new_snapshot, changed_range)
3160 })
3161 .await;
3162 this.update(cx, |this, cx| {
3163 if let Some(conflict_set) = &this.conflict_set {
3164 conflict_set
3165 .update(cx, |conflict_set, cx| {
3166 conflict_set.set_snapshot(snapshot, changed_range, cx);
3167 })
3168 .ok();
3169 }
3170 let futures = std::mem::take(&mut this.conflict_updated_futures);
3171 for tx in futures {
3172 tx.send(()).ok();
3173 }
3174 })
3175 }))
3176 }
3177
3178 rx
3179 }
3180
3181 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3182 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3183 }
3184
3185 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3186 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3187 }
3188
3189 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3190 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3191 }
3192
3193 fn handle_base_texts_updated(
3194 &mut self,
3195 buffer: text::BufferSnapshot,
3196 message: proto::UpdateDiffBases,
3197 cx: &mut Context<Self>,
3198 ) {
3199 use proto::update_diff_bases::Mode;
3200
3201 let Some(mode) = Mode::from_i32(message.mode) else {
3202 return;
3203 };
3204
3205 let diff_bases_change = match mode {
3206 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3207 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3208 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3209 Mode::IndexAndHead => DiffBasesChange::SetEach {
3210 index: message.staged_text,
3211 head: message.committed_text,
3212 },
3213 };
3214
3215 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3216 }
3217
3218 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3219 if *self.recalculating_tx.borrow() {
3220 let mut rx = self.recalculating_tx.subscribe();
3221 Some(async move {
3222 loop {
3223 let is_recalculating = rx.recv().await;
3224 if is_recalculating != Some(true) {
3225 break;
3226 }
3227 }
3228 })
3229 } else {
3230 None
3231 }
3232 }
3233
3234 fn diff_bases_changed(
3235 &mut self,
3236 buffer: text::BufferSnapshot,
3237 diff_bases_change: Option<DiffBasesChange>,
3238 cx: &mut Context<Self>,
3239 ) {
3240 match diff_bases_change {
3241 Some(DiffBasesChange::SetIndex(index)) => {
3242 self.index_text = index.map(|mut index| {
3243 text::LineEnding::normalize(&mut index);
3244 Arc::from(index.as_str())
3245 });
3246 self.index_changed = true;
3247 }
3248 Some(DiffBasesChange::SetHead(head)) => {
3249 self.head_text = head.map(|mut head| {
3250 text::LineEnding::normalize(&mut head);
3251 Arc::from(head.as_str())
3252 });
3253 self.head_changed = true;
3254 }
3255 Some(DiffBasesChange::SetBoth(text)) => {
3256 let text = text.map(|mut text| {
3257 text::LineEnding::normalize(&mut text);
3258 Arc::from(text.as_str())
3259 });
3260 self.head_text = text.clone();
3261 self.index_text = text;
3262 self.head_changed = true;
3263 self.index_changed = true;
3264 }
3265 Some(DiffBasesChange::SetEach { index, head }) => {
3266 self.index_text = index.map(|mut index| {
3267 text::LineEnding::normalize(&mut index);
3268 Arc::from(index.as_str())
3269 });
3270 self.index_changed = true;
3271 self.head_text = head.map(|mut head| {
3272 text::LineEnding::normalize(&mut head);
3273 Arc::from(head.as_str())
3274 });
3275 self.head_changed = true;
3276 }
3277 None => {}
3278 }
3279
3280 self.recalculate_diffs(buffer, cx)
3281 }
3282
3283 #[ztracing::instrument(skip_all)]
3284 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3285 *self.recalculating_tx.borrow_mut() = true;
3286
3287 let language = self.language.clone();
3288 let language_registry = self.language_registry.clone();
3289 let unstaged_diff = self.unstaged_diff();
3290 let uncommitted_diff = self.uncommitted_diff();
3291 let head = self.head_text.clone();
3292 let index = self.index_text.clone();
3293 let index_changed = self.index_changed;
3294 let head_changed = self.head_changed;
3295 let language_changed = self.language_changed;
3296 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3297 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3298 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3299 (None, None) => true,
3300 _ => false,
3301 };
3302
3303 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3304 .oid_diffs
3305 .iter()
3306 .filter_map(|(oid, weak)| {
3307 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3308 weak.upgrade().map(|diff| (*oid, diff, base_text))
3309 })
3310 .collect();
3311
3312 self.oid_diffs.retain(|oid, weak| {
3313 let alive = weak.upgrade().is_some();
3314 if !alive {
3315 if let Some(oid) = oid {
3316 self.oid_texts.remove(oid);
3317 }
3318 }
3319 alive
3320 });
3321 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3322 log::debug!(
3323 "start recalculating diffs for buffer {}",
3324 buffer.remote_id()
3325 );
3326
3327 let mut new_unstaged_diff = None;
3328 if let Some(unstaged_diff) = &unstaged_diff {
3329 new_unstaged_diff = Some(
3330 cx.update(|cx| {
3331 unstaged_diff.read(cx).update_diff(
3332 buffer.clone(),
3333 index,
3334 index_changed.then_some(false),
3335 language.clone(),
3336 cx,
3337 )
3338 })
3339 .await,
3340 );
3341 }
3342
3343 // Dropping BufferDiff can be expensive, so yield back to the event loop
3344 // for a bit
3345 yield_now().await;
3346
3347 let mut new_uncommitted_diff = None;
3348 if let Some(uncommitted_diff) = &uncommitted_diff {
3349 new_uncommitted_diff = if index_matches_head {
3350 new_unstaged_diff.clone()
3351 } else {
3352 Some(
3353 cx.update(|cx| {
3354 uncommitted_diff.read(cx).update_diff(
3355 buffer.clone(),
3356 head,
3357 head_changed.then_some(true),
3358 language.clone(),
3359 cx,
3360 )
3361 })
3362 .await,
3363 )
3364 }
3365 }
3366
3367 // Dropping BufferDiff can be expensive, so yield back to the event loop
3368 // for a bit
3369 yield_now().await;
3370
3371 let cancel = this.update(cx, |this, _| {
3372 // This checks whether all pending stage/unstage operations
3373 // have quiesced (i.e. both the corresponding write and the
3374 // read of that write have completed). If not, then we cancel
3375 // this recalculation attempt to avoid invalidating pending
3376 // state too quickly; another recalculation will come along
3377 // later and clear the pending state once the state of the index has settled.
3378 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3379 *this.recalculating_tx.borrow_mut() = false;
3380 true
3381 } else {
3382 false
3383 }
3384 })?;
3385 if cancel {
3386 log::debug!(
3387 concat!(
3388 "aborting recalculating diffs for buffer {}",
3389 "due to subsequent hunk operations",
3390 ),
3391 buffer.remote_id()
3392 );
3393 return Ok(());
3394 }
3395
3396 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3397 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3398 {
3399 let task = unstaged_diff.update(cx, |diff, cx| {
3400 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3401 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3402 // view multibuffers.
3403 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3404 });
3405 Some(task.await)
3406 } else {
3407 None
3408 };
3409
3410 yield_now().await;
3411
3412 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3413 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3414 {
3415 uncommitted_diff
3416 .update(cx, |diff, cx| {
3417 if language_changed {
3418 diff.language_changed(language.clone(), language_registry, cx);
3419 }
3420 diff.set_snapshot_with_secondary(
3421 new_uncommitted_diff,
3422 &buffer,
3423 unstaged_changed_range.flatten(),
3424 true,
3425 cx,
3426 )
3427 })
3428 .await;
3429 }
3430
3431 yield_now().await;
3432
3433 for (oid, oid_diff, base_text) in oid_diffs {
3434 let new_oid_diff = cx
3435 .update(|cx| {
3436 oid_diff.read(cx).update_diff(
3437 buffer.clone(),
3438 base_text,
3439 None,
3440 language.clone(),
3441 cx,
3442 )
3443 })
3444 .await;
3445
3446 oid_diff
3447 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3448 .await;
3449
3450 log::debug!(
3451 "finished recalculating oid diff for buffer {} oid {:?}",
3452 buffer.remote_id(),
3453 oid
3454 );
3455
3456 yield_now().await;
3457 }
3458
3459 log::debug!(
3460 "finished recalculating diffs for buffer {}",
3461 buffer.remote_id()
3462 );
3463
3464 if let Some(this) = this.upgrade() {
3465 this.update(cx, |this, _| {
3466 this.index_changed = false;
3467 this.head_changed = false;
3468 this.language_changed = false;
3469 *this.recalculating_tx.borrow_mut() = false;
3470 });
3471 }
3472
3473 Ok(())
3474 }));
3475 }
3476}
3477
3478fn make_remote_delegate(
3479 this: Entity<GitStore>,
3480 project_id: u64,
3481 repository_id: RepositoryId,
3482 askpass_id: u64,
3483 cx: &mut AsyncApp,
3484) -> AskPassDelegate {
3485 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3486 this.update(cx, |this, cx| {
3487 let Some((client, _)) = this.downstream_client() else {
3488 return;
3489 };
3490 let response = client.request(proto::AskPassRequest {
3491 project_id,
3492 repository_id: repository_id.to_proto(),
3493 askpass_id,
3494 prompt,
3495 });
3496 cx.spawn(async move |_, _| {
3497 let mut response = response.await?.response;
3498 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3499 .ok();
3500 response.zeroize();
3501 anyhow::Ok(())
3502 })
3503 .detach_and_log_err(cx);
3504 });
3505 })
3506}
3507
3508impl RepositoryId {
3509 pub fn to_proto(self) -> u64 {
3510 self.0
3511 }
3512
3513 pub fn from_proto(id: u64) -> Self {
3514 RepositoryId(id)
3515 }
3516}
3517
3518impl RepositorySnapshot {
3519 fn empty(
3520 id: RepositoryId,
3521 work_directory_abs_path: Arc<Path>,
3522 original_repo_abs_path: Option<Arc<Path>>,
3523 path_style: PathStyle,
3524 ) -> Self {
3525 Self {
3526 id,
3527 statuses_by_path: Default::default(),
3528 original_repo_abs_path: original_repo_abs_path
3529 .unwrap_or_else(|| work_directory_abs_path.clone()),
3530 work_directory_abs_path,
3531 branch: None,
3532 head_commit: None,
3533 scan_id: 0,
3534 merge: Default::default(),
3535 remote_origin_url: None,
3536 remote_upstream_url: None,
3537 stash_entries: Default::default(),
3538 path_style,
3539 }
3540 }
3541
3542 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3543 proto::UpdateRepository {
3544 branch_summary: self.branch.as_ref().map(branch_to_proto),
3545 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3546 updated_statuses: self
3547 .statuses_by_path
3548 .iter()
3549 .map(|entry| entry.to_proto())
3550 .collect(),
3551 removed_statuses: Default::default(),
3552 current_merge_conflicts: self
3553 .merge
3554 .merge_heads_by_conflicted_path
3555 .iter()
3556 .map(|(repo_path, _)| repo_path.to_proto())
3557 .collect(),
3558 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3559 project_id,
3560 id: self.id.to_proto(),
3561 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3562 entry_ids: vec![self.id.to_proto()],
3563 scan_id: self.scan_id,
3564 is_last_update: true,
3565 stash_entries: self
3566 .stash_entries
3567 .entries
3568 .iter()
3569 .map(stash_to_proto)
3570 .collect(),
3571 remote_upstream_url: self.remote_upstream_url.clone(),
3572 remote_origin_url: self.remote_origin_url.clone(),
3573 original_repo_abs_path: Some(
3574 self.original_repo_abs_path.to_string_lossy().into_owned(),
3575 ),
3576 }
3577 }
3578
3579 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3580 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3581 let mut removed_statuses: Vec<String> = Vec::new();
3582
3583 let mut new_statuses = self.statuses_by_path.iter().peekable();
3584 let mut old_statuses = old.statuses_by_path.iter().peekable();
3585
3586 let mut current_new_entry = new_statuses.next();
3587 let mut current_old_entry = old_statuses.next();
3588 loop {
3589 match (current_new_entry, current_old_entry) {
3590 (Some(new_entry), Some(old_entry)) => {
3591 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3592 Ordering::Less => {
3593 updated_statuses.push(new_entry.to_proto());
3594 current_new_entry = new_statuses.next();
3595 }
3596 Ordering::Equal => {
3597 if new_entry.status != old_entry.status
3598 || new_entry.diff_stat != old_entry.diff_stat
3599 {
3600 updated_statuses.push(new_entry.to_proto());
3601 }
3602 current_old_entry = old_statuses.next();
3603 current_new_entry = new_statuses.next();
3604 }
3605 Ordering::Greater => {
3606 removed_statuses.push(old_entry.repo_path.to_proto());
3607 current_old_entry = old_statuses.next();
3608 }
3609 }
3610 }
3611 (None, Some(old_entry)) => {
3612 removed_statuses.push(old_entry.repo_path.to_proto());
3613 current_old_entry = old_statuses.next();
3614 }
3615 (Some(new_entry), None) => {
3616 updated_statuses.push(new_entry.to_proto());
3617 current_new_entry = new_statuses.next();
3618 }
3619 (None, None) => break,
3620 }
3621 }
3622
3623 proto::UpdateRepository {
3624 branch_summary: self.branch.as_ref().map(branch_to_proto),
3625 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3626 updated_statuses,
3627 removed_statuses,
3628 current_merge_conflicts: self
3629 .merge
3630 .merge_heads_by_conflicted_path
3631 .iter()
3632 .map(|(path, _)| path.to_proto())
3633 .collect(),
3634 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3635 project_id,
3636 id: self.id.to_proto(),
3637 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3638 entry_ids: vec![],
3639 scan_id: self.scan_id,
3640 is_last_update: true,
3641 stash_entries: self
3642 .stash_entries
3643 .entries
3644 .iter()
3645 .map(stash_to_proto)
3646 .collect(),
3647 remote_upstream_url: self.remote_upstream_url.clone(),
3648 remote_origin_url: self.remote_origin_url.clone(),
3649 original_repo_abs_path: Some(
3650 self.original_repo_abs_path.to_string_lossy().into_owned(),
3651 ),
3652 }
3653 }
3654
3655 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3656 self.statuses_by_path.iter().cloned()
3657 }
3658
3659 pub fn status_summary(&self) -> GitSummary {
3660 self.statuses_by_path.summary().item_summary
3661 }
3662
3663 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3664 self.statuses_by_path
3665 .get(&PathKey(path.as_ref().clone()), ())
3666 .cloned()
3667 }
3668
3669 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3670 self.statuses_by_path
3671 .get(&PathKey(path.as_ref().clone()), ())
3672 .and_then(|entry| entry.diff_stat)
3673 }
3674
3675 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3676 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3677 }
3678
3679 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3680 self.path_style
3681 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3682 .unwrap()
3683 .into()
3684 }
3685
3686 #[inline]
3687 fn abs_path_to_repo_path_inner(
3688 work_directory_abs_path: &Path,
3689 abs_path: &Path,
3690 path_style: PathStyle,
3691 ) -> Option<RepoPath> {
3692 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3693 Some(RepoPath::from_rel_path(&rel_path))
3694 }
3695
3696 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3697 self.merge
3698 .merge_heads_by_conflicted_path
3699 .contains_key(repo_path)
3700 }
3701
3702 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3703 let had_conflict_on_last_merge_head_change = self
3704 .merge
3705 .merge_heads_by_conflicted_path
3706 .contains_key(repo_path);
3707 let has_conflict_currently = self
3708 .status_for_path(repo_path)
3709 .is_some_and(|entry| entry.status.is_conflicted());
3710 had_conflict_on_last_merge_head_change || has_conflict_currently
3711 }
3712
3713 /// This is the name that will be displayed in the repository selector for this repository.
3714 pub fn display_name(&self) -> SharedString {
3715 self.work_directory_abs_path
3716 .file_name()
3717 .unwrap_or_default()
3718 .to_string_lossy()
3719 .to_string()
3720 .into()
3721 }
3722}
3723
3724pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3725 proto::StashEntry {
3726 oid: entry.oid.as_bytes().to_vec(),
3727 message: entry.message.clone(),
3728 branch: entry.branch.clone(),
3729 index: entry.index as u64,
3730 timestamp: entry.timestamp,
3731 }
3732}
3733
3734pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3735 Ok(StashEntry {
3736 oid: Oid::from_bytes(&entry.oid)?,
3737 message: entry.message.clone(),
3738 index: entry.index as usize,
3739 branch: entry.branch.clone(),
3740 timestamp: entry.timestamp,
3741 })
3742}
3743
3744impl MergeDetails {
3745 async fn update(
3746 &mut self,
3747 backend: &Arc<dyn GitRepository>,
3748 current_conflicted_paths: Vec<RepoPath>,
3749 ) -> Result<bool> {
3750 log::debug!("load merge details");
3751 self.message = backend.merge_message().await.map(SharedString::from);
3752 let heads = backend
3753 .revparse_batch(vec![
3754 "MERGE_HEAD".into(),
3755 "CHERRY_PICK_HEAD".into(),
3756 "REBASE_HEAD".into(),
3757 "REVERT_HEAD".into(),
3758 "APPLY_HEAD".into(),
3759 ])
3760 .await
3761 .log_err()
3762 .unwrap_or_default()
3763 .into_iter()
3764 .map(|opt| opt.map(SharedString::from))
3765 .collect::<Vec<_>>();
3766
3767 let mut conflicts_changed = false;
3768
3769 // Record the merge state for newly conflicted paths
3770 for path in ¤t_conflicted_paths {
3771 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3772 conflicts_changed = true;
3773 self.merge_heads_by_conflicted_path
3774 .insert(path.clone(), heads.clone());
3775 }
3776 }
3777
3778 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3779 self.merge_heads_by_conflicted_path
3780 .retain(|path, old_merge_heads| {
3781 let keep = current_conflicted_paths.contains(path)
3782 || (old_merge_heads == &heads
3783 && old_merge_heads.iter().any(|head| head.is_some()));
3784 if !keep {
3785 conflicts_changed = true;
3786 }
3787 keep
3788 });
3789
3790 Ok(conflicts_changed)
3791 }
3792}
3793
3794impl Repository {
3795 pub fn is_trusted(&self) -> bool {
3796 match self.repository_state.peek() {
3797 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3798 _ => false,
3799 }
3800 }
3801
3802 pub fn snapshot(&self) -> RepositorySnapshot {
3803 self.snapshot.clone()
3804 }
3805
3806 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3807 self.pending_ops.iter().cloned()
3808 }
3809
3810 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3811 self.pending_ops.summary().clone()
3812 }
3813
3814 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3815 self.pending_ops
3816 .get(&PathKey(path.as_ref().clone()), ())
3817 .cloned()
3818 }
3819
3820 fn local(
3821 id: RepositoryId,
3822 work_directory_abs_path: Arc<Path>,
3823 original_repo_abs_path: Arc<Path>,
3824 dot_git_abs_path: Arc<Path>,
3825 project_environment: WeakEntity<ProjectEnvironment>,
3826 fs: Arc<dyn Fs>,
3827 is_trusted: bool,
3828 git_store: WeakEntity<GitStore>,
3829 cx: &mut Context<Self>,
3830 ) -> Self {
3831 let snapshot = RepositorySnapshot::empty(
3832 id,
3833 work_directory_abs_path.clone(),
3834 Some(original_repo_abs_path),
3835 PathStyle::local(),
3836 );
3837 let state = cx
3838 .spawn(async move |_, cx| {
3839 LocalRepositoryState::new(
3840 work_directory_abs_path,
3841 dot_git_abs_path,
3842 project_environment,
3843 fs,
3844 is_trusted,
3845 cx,
3846 )
3847 .await
3848 .map_err(|err| err.to_string())
3849 })
3850 .shared();
3851 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3852 let state = cx
3853 .spawn(async move |_, _| {
3854 let state = state.await?;
3855 Ok(RepositoryState::Local(state))
3856 })
3857 .shared();
3858
3859 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3860 RepositoryEvent::BranchChanged => {
3861 if this.scan_id > 1 {
3862 this.initial_graph_data.clear();
3863 }
3864 }
3865 _ => {}
3866 })
3867 .detach();
3868
3869 Repository {
3870 this: cx.weak_entity(),
3871 git_store,
3872 snapshot,
3873 pending_ops: Default::default(),
3874 repository_state: state,
3875 commit_message_buffer: None,
3876 askpass_delegates: Default::default(),
3877 paths_needing_status_update: Default::default(),
3878 latest_askpass_id: 0,
3879 job_sender,
3880 job_id: 0,
3881 active_jobs: Default::default(),
3882 initial_graph_data: Default::default(),
3883 commit_data: Default::default(),
3884 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3885 }
3886 }
3887
3888 fn remote(
3889 id: RepositoryId,
3890 work_directory_abs_path: Arc<Path>,
3891 original_repo_abs_path: Option<Arc<Path>>,
3892 path_style: PathStyle,
3893 project_id: ProjectId,
3894 client: AnyProtoClient,
3895 git_store: WeakEntity<GitStore>,
3896 cx: &mut Context<Self>,
3897 ) -> Self {
3898 let snapshot = RepositorySnapshot::empty(
3899 id,
3900 work_directory_abs_path,
3901 original_repo_abs_path,
3902 path_style,
3903 );
3904 let repository_state = RemoteRepositoryState { project_id, client };
3905 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3906 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3907 Self {
3908 this: cx.weak_entity(),
3909 snapshot,
3910 commit_message_buffer: None,
3911 git_store,
3912 pending_ops: Default::default(),
3913 paths_needing_status_update: Default::default(),
3914 job_sender,
3915 repository_state,
3916 askpass_delegates: Default::default(),
3917 latest_askpass_id: 0,
3918 active_jobs: Default::default(),
3919 job_id: 0,
3920 initial_graph_data: Default::default(),
3921 commit_data: Default::default(),
3922 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3923 }
3924 }
3925
3926 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3927 self.git_store.upgrade()
3928 }
3929
3930 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3931 let this = cx.weak_entity();
3932 let git_store = self.git_store.clone();
3933 let _ = self.send_keyed_job(
3934 Some(GitJobKey::ReloadBufferDiffBases),
3935 None,
3936 |state, mut cx| async move {
3937 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3938 log::error!("tried to recompute diffs for a non-local repository");
3939 return Ok(());
3940 };
3941
3942 let Some(this) = this.upgrade() else {
3943 return Ok(());
3944 };
3945
3946 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3947 git_store.update(cx, |git_store, cx| {
3948 git_store
3949 .diffs
3950 .iter()
3951 .filter_map(|(buffer_id, diff_state)| {
3952 let buffer_store = git_store.buffer_store.read(cx);
3953 let buffer = buffer_store.get(*buffer_id)?;
3954 let file = File::from_dyn(buffer.read(cx).file())?;
3955 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3956 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3957 log::debug!(
3958 "start reload diff bases for repo path {}",
3959 repo_path.as_unix_str()
3960 );
3961 diff_state.update(cx, |diff_state, _| {
3962 let has_unstaged_diff = diff_state
3963 .unstaged_diff
3964 .as_ref()
3965 .is_some_and(|diff| diff.is_upgradable());
3966 let has_uncommitted_diff = diff_state
3967 .uncommitted_diff
3968 .as_ref()
3969 .is_some_and(|set| set.is_upgradable());
3970
3971 Some((
3972 buffer,
3973 repo_path,
3974 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3975 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3976 ))
3977 })
3978 })
3979 .collect::<Vec<_>>()
3980 })
3981 })?;
3982
3983 let buffer_diff_base_changes = cx
3984 .background_spawn(async move {
3985 let mut changes = Vec::new();
3986 for (buffer, repo_path, current_index_text, current_head_text) in
3987 &repo_diff_state_updates
3988 {
3989 let index_text = if current_index_text.is_some() {
3990 backend.load_index_text(repo_path.clone()).await
3991 } else {
3992 None
3993 };
3994 let head_text = if current_head_text.is_some() {
3995 backend.load_committed_text(repo_path.clone()).await
3996 } else {
3997 None
3998 };
3999
4000 let change =
4001 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4002 (Some(current_index), Some(current_head)) => {
4003 let index_changed =
4004 index_text.as_deref() != current_index.as_deref();
4005 let head_changed =
4006 head_text.as_deref() != current_head.as_deref();
4007 if index_changed && head_changed {
4008 if index_text == head_text {
4009 Some(DiffBasesChange::SetBoth(head_text))
4010 } else {
4011 Some(DiffBasesChange::SetEach {
4012 index: index_text,
4013 head: head_text,
4014 })
4015 }
4016 } else if index_changed {
4017 Some(DiffBasesChange::SetIndex(index_text))
4018 } else if head_changed {
4019 Some(DiffBasesChange::SetHead(head_text))
4020 } else {
4021 None
4022 }
4023 }
4024 (Some(current_index), None) => {
4025 let index_changed =
4026 index_text.as_deref() != current_index.as_deref();
4027 index_changed
4028 .then_some(DiffBasesChange::SetIndex(index_text))
4029 }
4030 (None, Some(current_head)) => {
4031 let head_changed =
4032 head_text.as_deref() != current_head.as_deref();
4033 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4034 }
4035 (None, None) => None,
4036 };
4037
4038 changes.push((buffer.clone(), change))
4039 }
4040 changes
4041 })
4042 .await;
4043
4044 git_store.update(&mut cx, |git_store, cx| {
4045 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4046 let buffer_snapshot = buffer.read(cx).text_snapshot();
4047 let buffer_id = buffer_snapshot.remote_id();
4048 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4049 continue;
4050 };
4051
4052 let downstream_client = git_store.downstream_client();
4053 diff_state.update(cx, |diff_state, cx| {
4054 use proto::update_diff_bases::Mode;
4055
4056 if let Some((diff_bases_change, (client, project_id))) =
4057 diff_bases_change.clone().zip(downstream_client)
4058 {
4059 let (staged_text, committed_text, mode) = match diff_bases_change {
4060 DiffBasesChange::SetIndex(index) => {
4061 (index, None, Mode::IndexOnly)
4062 }
4063 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4064 DiffBasesChange::SetEach { index, head } => {
4065 (index, head, Mode::IndexAndHead)
4066 }
4067 DiffBasesChange::SetBoth(text) => {
4068 (None, text, Mode::IndexMatchesHead)
4069 }
4070 };
4071 client
4072 .send(proto::UpdateDiffBases {
4073 project_id: project_id.to_proto(),
4074 buffer_id: buffer_id.to_proto(),
4075 staged_text,
4076 committed_text,
4077 mode: mode as i32,
4078 })
4079 .log_err();
4080 }
4081
4082 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4083 });
4084 }
4085 })
4086 },
4087 );
4088 }
4089
4090 pub fn send_job<F, Fut, R>(
4091 &mut self,
4092 status: Option<SharedString>,
4093 job: F,
4094 ) -> oneshot::Receiver<R>
4095 where
4096 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4097 Fut: Future<Output = R> + 'static,
4098 R: Send + 'static,
4099 {
4100 self.send_keyed_job(None, status, job)
4101 }
4102
4103 fn send_keyed_job<F, Fut, R>(
4104 &mut self,
4105 key: Option<GitJobKey>,
4106 status: Option<SharedString>,
4107 job: F,
4108 ) -> oneshot::Receiver<R>
4109 where
4110 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4111 Fut: Future<Output = R> + 'static,
4112 R: Send + 'static,
4113 {
4114 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4115 let job_id = post_inc(&mut self.job_id);
4116 let this = self.this.clone();
4117 self.job_sender
4118 .unbounded_send(GitJob {
4119 key,
4120 job: Box::new(move |state, cx: &mut AsyncApp| {
4121 let job = job(state, cx.clone());
4122 cx.spawn(async move |cx| {
4123 if let Some(s) = status.clone() {
4124 this.update(cx, |this, cx| {
4125 this.active_jobs.insert(
4126 job_id,
4127 JobInfo {
4128 start: Instant::now(),
4129 message: s.clone(),
4130 },
4131 );
4132
4133 cx.notify();
4134 })
4135 .ok();
4136 }
4137 let result = job.await;
4138
4139 this.update(cx, |this, cx| {
4140 this.active_jobs.remove(&job_id);
4141 cx.notify();
4142 })
4143 .ok();
4144
4145 result_tx.send(result).ok();
4146 })
4147 }),
4148 })
4149 .ok();
4150 result_rx
4151 }
4152
4153 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4154 let Some(git_store) = self.git_store.upgrade() else {
4155 return;
4156 };
4157 let entity = cx.entity();
4158 git_store.update(cx, |git_store, cx| {
4159 let Some((&id, _)) = git_store
4160 .repositories
4161 .iter()
4162 .find(|(_, handle)| *handle == &entity)
4163 else {
4164 return;
4165 };
4166 git_store.active_repo_id = Some(id);
4167 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4168 });
4169 }
4170
4171 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4172 self.snapshot.status()
4173 }
4174
4175 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4176 self.snapshot.diff_stat_for_path(path)
4177 }
4178
4179 pub fn cached_stash(&self) -> GitStash {
4180 self.snapshot.stash_entries.clone()
4181 }
4182
4183 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4184 let git_store = self.git_store.upgrade()?;
4185 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4186 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4187 let abs_path = SanitizedPath::new(&abs_path);
4188 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4189 Some(ProjectPath {
4190 worktree_id: worktree.read(cx).id(),
4191 path: relative_path,
4192 })
4193 }
4194
4195 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4196 let git_store = self.git_store.upgrade()?;
4197 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4198 let abs_path = worktree_store.absolutize(path, cx)?;
4199 self.snapshot.abs_path_to_repo_path(&abs_path)
4200 }
4201
4202 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4203 other
4204 .read(cx)
4205 .snapshot
4206 .work_directory_abs_path
4207 .starts_with(&self.snapshot.work_directory_abs_path)
4208 }
4209
4210 pub fn open_commit_buffer(
4211 &mut self,
4212 languages: Option<Arc<LanguageRegistry>>,
4213 buffer_store: Entity<BufferStore>,
4214 cx: &mut Context<Self>,
4215 ) -> Task<Result<Entity<Buffer>>> {
4216 let id = self.id;
4217 if let Some(buffer) = self.commit_message_buffer.clone() {
4218 return Task::ready(Ok(buffer));
4219 }
4220 let this = cx.weak_entity();
4221
4222 let rx = self.send_job(None, move |state, mut cx| async move {
4223 let Some(this) = this.upgrade() else {
4224 bail!("git store was dropped");
4225 };
4226 match state {
4227 RepositoryState::Local(..) => {
4228 this.update(&mut cx, |_, cx| {
4229 Self::open_local_commit_buffer(languages, buffer_store, cx)
4230 })
4231 .await
4232 }
4233 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4234 let request = client.request(proto::OpenCommitMessageBuffer {
4235 project_id: project_id.0,
4236 repository_id: id.to_proto(),
4237 });
4238 let response = request.await.context("requesting to open commit buffer")?;
4239 let buffer_id = BufferId::new(response.buffer_id)?;
4240 let buffer = buffer_store
4241 .update(&mut cx, |buffer_store, cx| {
4242 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4243 })
4244 .await?;
4245 if let Some(language_registry) = languages {
4246 let git_commit_language =
4247 language_registry.language_for_name("Git Commit").await?;
4248 buffer.update(&mut cx, |buffer, cx| {
4249 buffer.set_language(Some(git_commit_language), cx);
4250 });
4251 }
4252 this.update(&mut cx, |this, _| {
4253 this.commit_message_buffer = Some(buffer.clone());
4254 });
4255 Ok(buffer)
4256 }
4257 }
4258 });
4259
4260 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4261 }
4262
4263 fn open_local_commit_buffer(
4264 language_registry: Option<Arc<LanguageRegistry>>,
4265 buffer_store: Entity<BufferStore>,
4266 cx: &mut Context<Self>,
4267 ) -> Task<Result<Entity<Buffer>>> {
4268 cx.spawn(async move |repository, cx| {
4269 let git_commit_language = match language_registry {
4270 Some(language_registry) => {
4271 Some(language_registry.language_for_name("Git Commit").await?)
4272 }
4273 None => None,
4274 };
4275 let buffer = buffer_store
4276 .update(cx, |buffer_store, cx| {
4277 buffer_store.create_buffer(git_commit_language, false, cx)
4278 })
4279 .await?;
4280
4281 repository.update(cx, |repository, _| {
4282 repository.commit_message_buffer = Some(buffer.clone());
4283 })?;
4284 Ok(buffer)
4285 })
4286 }
4287
4288 pub fn checkout_files(
4289 &mut self,
4290 commit: &str,
4291 paths: Vec<RepoPath>,
4292 cx: &mut Context<Self>,
4293 ) -> Task<Result<()>> {
4294 let commit = commit.to_string();
4295 let id = self.id;
4296
4297 self.spawn_job_with_tracking(
4298 paths.clone(),
4299 pending_op::GitStatus::Reverted,
4300 cx,
4301 async move |this, cx| {
4302 this.update(cx, |this, _cx| {
4303 this.send_job(
4304 Some(format!("git checkout {}", commit).into()),
4305 move |git_repo, _| async move {
4306 match git_repo {
4307 RepositoryState::Local(LocalRepositoryState {
4308 backend,
4309 environment,
4310 ..
4311 }) => {
4312 backend
4313 .checkout_files(commit, paths, environment.clone())
4314 .await
4315 }
4316 RepositoryState::Remote(RemoteRepositoryState {
4317 project_id,
4318 client,
4319 }) => {
4320 client
4321 .request(proto::GitCheckoutFiles {
4322 project_id: project_id.0,
4323 repository_id: id.to_proto(),
4324 commit,
4325 paths: paths
4326 .into_iter()
4327 .map(|p| p.to_proto())
4328 .collect(),
4329 })
4330 .await?;
4331
4332 Ok(())
4333 }
4334 }
4335 },
4336 )
4337 })?
4338 .await?
4339 },
4340 )
4341 }
4342
4343 pub fn reset(
4344 &mut self,
4345 commit: String,
4346 reset_mode: ResetMode,
4347 _cx: &mut App,
4348 ) -> oneshot::Receiver<Result<()>> {
4349 let id = self.id;
4350
4351 self.send_job(None, move |git_repo, _| async move {
4352 match git_repo {
4353 RepositoryState::Local(LocalRepositoryState {
4354 backend,
4355 environment,
4356 ..
4357 }) => backend.reset(commit, reset_mode, environment).await,
4358 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4359 client
4360 .request(proto::GitReset {
4361 project_id: project_id.0,
4362 repository_id: id.to_proto(),
4363 commit,
4364 mode: match reset_mode {
4365 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4366 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4367 },
4368 })
4369 .await?;
4370
4371 Ok(())
4372 }
4373 }
4374 })
4375 }
4376
4377 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4378 let id = self.id;
4379 self.send_job(None, move |git_repo, _cx| async move {
4380 match git_repo {
4381 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4382 backend.show(commit).await
4383 }
4384 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4385 let resp = client
4386 .request(proto::GitShow {
4387 project_id: project_id.0,
4388 repository_id: id.to_proto(),
4389 commit,
4390 })
4391 .await?;
4392
4393 Ok(CommitDetails {
4394 sha: resp.sha.into(),
4395 message: resp.message.into(),
4396 commit_timestamp: resp.commit_timestamp,
4397 author_email: resp.author_email.into(),
4398 author_name: resp.author_name.into(),
4399 })
4400 }
4401 }
4402 })
4403 }
4404
4405 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4406 let id = self.id;
4407 self.send_job(None, move |git_repo, cx| async move {
4408 match git_repo {
4409 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4410 backend.load_commit(commit, cx).await
4411 }
4412 RepositoryState::Remote(RemoteRepositoryState {
4413 client, project_id, ..
4414 }) => {
4415 let response = client
4416 .request(proto::LoadCommitDiff {
4417 project_id: project_id.0,
4418 repository_id: id.to_proto(),
4419 commit,
4420 })
4421 .await?;
4422 Ok(CommitDiff {
4423 files: response
4424 .files
4425 .into_iter()
4426 .map(|file| {
4427 Ok(CommitFile {
4428 path: RepoPath::from_proto(&file.path)?,
4429 old_text: file.old_text,
4430 new_text: file.new_text,
4431 is_binary: file.is_binary,
4432 })
4433 })
4434 .collect::<Result<Vec<_>>>()?,
4435 })
4436 }
4437 }
4438 })
4439 }
4440
4441 pub fn file_history(
4442 &mut self,
4443 path: RepoPath,
4444 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4445 self.file_history_paginated(path, 0, None)
4446 }
4447
4448 pub fn file_history_paginated(
4449 &mut self,
4450 path: RepoPath,
4451 skip: usize,
4452 limit: Option<usize>,
4453 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4454 let id = self.id;
4455 self.send_job(None, move |git_repo, _cx| async move {
4456 match git_repo {
4457 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4458 backend.file_history_paginated(path, skip, limit).await
4459 }
4460 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4461 let response = client
4462 .request(proto::GitFileHistory {
4463 project_id: project_id.0,
4464 repository_id: id.to_proto(),
4465 path: path.to_proto(),
4466 skip: skip as u64,
4467 limit: limit.map(|l| l as u64),
4468 })
4469 .await?;
4470 Ok(git::repository::FileHistory {
4471 entries: response
4472 .entries
4473 .into_iter()
4474 .map(|entry| git::repository::FileHistoryEntry {
4475 sha: entry.sha.into(),
4476 subject: entry.subject.into(),
4477 message: entry.message.into(),
4478 commit_timestamp: entry.commit_timestamp,
4479 author_name: entry.author_name.into(),
4480 author_email: entry.author_email.into(),
4481 })
4482 .collect(),
4483 path: RepoPath::from_proto(&response.path)?,
4484 })
4485 }
4486 }
4487 })
4488 }
4489
4490 pub fn get_graph_data(
4491 &self,
4492 log_source: LogSource,
4493 log_order: LogOrder,
4494 ) -> Option<&InitialGitGraphData> {
4495 self.initial_graph_data.get(&(log_source, log_order))
4496 }
4497
4498 pub fn graph_data(
4499 &mut self,
4500 log_source: LogSource,
4501 log_order: LogOrder,
4502 range: Range<usize>,
4503 cx: &mut Context<Self>,
4504 ) -> GraphDataResponse<'_> {
4505 let initial_commit_data = self
4506 .initial_graph_data
4507 .entry((log_source.clone(), log_order))
4508 .or_insert_with(|| {
4509 let state = self.repository_state.clone();
4510 let log_source = log_source.clone();
4511
4512 let fetch_task = cx.spawn(async move |repository, cx| {
4513 let state = state.await;
4514 let result = match state {
4515 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4516 Self::local_git_graph_data(
4517 repository.clone(),
4518 backend,
4519 log_source.clone(),
4520 log_order,
4521 cx,
4522 )
4523 .await
4524 }
4525 Ok(RepositoryState::Remote(_)) => {
4526 Err("Git graph is not supported for collab yet".into())
4527 }
4528 Err(e) => Err(SharedString::from(e)),
4529 };
4530
4531 if let Err(fetch_task_error) = result {
4532 repository
4533 .update(cx, |repository, _| {
4534 if let Some(data) = repository
4535 .initial_graph_data
4536 .get_mut(&(log_source, log_order))
4537 {
4538 data.error = Some(fetch_task_error);
4539 } else {
4540 debug_panic!(
4541 "This task would be dropped if this entry doesn't exist"
4542 );
4543 }
4544 })
4545 .ok();
4546 }
4547 });
4548
4549 InitialGitGraphData {
4550 fetch_task,
4551 error: None,
4552 commit_data: Vec::new(),
4553 commit_oid_to_index: HashMap::default(),
4554 }
4555 });
4556
4557 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4558 let max_end = initial_commit_data.commit_data.len();
4559
4560 GraphDataResponse {
4561 commits: &initial_commit_data.commit_data
4562 [range.start.min(max_start)..range.end.min(max_end)],
4563 is_loading: !initial_commit_data.fetch_task.is_ready(),
4564 error: initial_commit_data.error.clone(),
4565 }
4566 }
4567
4568 async fn local_git_graph_data(
4569 this: WeakEntity<Self>,
4570 backend: Arc<dyn GitRepository>,
4571 log_source: LogSource,
4572 log_order: LogOrder,
4573 cx: &mut AsyncApp,
4574 ) -> Result<(), SharedString> {
4575 let (request_tx, request_rx) =
4576 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4577
4578 let task = cx.background_executor().spawn({
4579 let log_source = log_source.clone();
4580 async move {
4581 backend
4582 .initial_graph_data(log_source, log_order, request_tx)
4583 .await
4584 .map_err(|err| SharedString::from(err.to_string()))
4585 }
4586 });
4587
4588 let graph_data_key = (log_source, log_order);
4589
4590 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4591 this.update(cx, |repository, cx| {
4592 let graph_data = repository
4593 .initial_graph_data
4594 .entry(graph_data_key.clone())
4595 .and_modify(|graph_data| {
4596 for commit_data in initial_graph_commit_data {
4597 graph_data
4598 .commit_oid_to_index
4599 .insert(commit_data.sha, graph_data.commit_data.len());
4600 graph_data.commit_data.push(commit_data);
4601
4602 cx.emit(RepositoryEvent::GraphEvent(
4603 graph_data_key.clone(),
4604 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4605 ));
4606 }
4607 });
4608
4609 match &graph_data {
4610 Entry::Occupied(_) => {}
4611 Entry::Vacant(_) => {
4612 debug_panic!("This task should be dropped if data doesn't exist");
4613 }
4614 }
4615 })
4616 .ok();
4617 }
4618
4619 task.await?;
4620 Ok(())
4621 }
4622
4623 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4624 if !self.commit_data.contains_key(&sha) {
4625 match &self.graph_commit_data_handler {
4626 GraphCommitHandlerState::Open(handler) => {
4627 if handler.commit_data_request.try_send(sha).is_ok() {
4628 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4629 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4630 }
4631 }
4632 GraphCommitHandlerState::Closed => {
4633 self.open_graph_commit_data_handler(cx);
4634 }
4635 GraphCommitHandlerState::Starting => {}
4636 }
4637 }
4638
4639 self.commit_data
4640 .get(&sha)
4641 .unwrap_or(&CommitDataState::Loading)
4642 }
4643
4644 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4645 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4646
4647 let state = self.repository_state.clone();
4648 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4649 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4650
4651 let foreground_task = cx.spawn(async move |this, cx| {
4652 while let Ok((sha, commit_data)) = result_rx.recv().await {
4653 let result = this.update(cx, |this, cx| {
4654 let old_value = this
4655 .commit_data
4656 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4657 debug_assert!(
4658 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4659 "We should never overwrite commit data"
4660 );
4661
4662 cx.notify();
4663 });
4664 if result.is_err() {
4665 break;
4666 }
4667 }
4668
4669 this.update(cx, |this, _cx| {
4670 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4671 })
4672 .ok();
4673 });
4674
4675 let request_tx_for_handler = request_tx;
4676 let background_executor = cx.background_executor().clone();
4677
4678 cx.background_spawn(async move {
4679 let backend = match state.await {
4680 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4681 Ok(RepositoryState::Remote(_)) => {
4682 log::error!("commit_data_reader not supported for remote repositories");
4683 return;
4684 }
4685 Err(error) => {
4686 log::error!("failed to get repository state: {error}");
4687 return;
4688 }
4689 };
4690
4691 let reader = match backend.commit_data_reader() {
4692 Ok(reader) => reader,
4693 Err(error) => {
4694 log::error!("failed to create commit data reader: {error:?}");
4695 return;
4696 }
4697 };
4698
4699 loop {
4700 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4701
4702 futures::select_biased! {
4703 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4704 let Ok(sha) = sha else {
4705 break;
4706 };
4707
4708 match reader.read(sha).await {
4709 Ok(commit_data) => {
4710 if result_tx.send((sha, commit_data)).await.is_err() {
4711 break;
4712 }
4713 }
4714 Err(error) => {
4715 log::error!("failed to read commit data for {sha}: {error:?}");
4716 }
4717 }
4718 }
4719 _ = futures::FutureExt::fuse(timeout) => {
4720 break;
4721 }
4722 }
4723 }
4724
4725 drop(result_tx);
4726 })
4727 .detach();
4728
4729 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4730 _task: foreground_task,
4731 commit_data_request: request_tx_for_handler,
4732 });
4733 }
4734
4735 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4736 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4737 }
4738
4739 fn save_buffers<'a>(
4740 &self,
4741 entries: impl IntoIterator<Item = &'a RepoPath>,
4742 cx: &mut Context<Self>,
4743 ) -> Vec<Task<anyhow::Result<()>>> {
4744 let mut save_futures = Vec::new();
4745 if let Some(buffer_store) = self.buffer_store(cx) {
4746 buffer_store.update(cx, |buffer_store, cx| {
4747 for path in entries {
4748 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4749 continue;
4750 };
4751 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4752 && buffer
4753 .read(cx)
4754 .file()
4755 .is_some_and(|file| file.disk_state().exists())
4756 && buffer.read(cx).has_unsaved_edits()
4757 {
4758 save_futures.push(buffer_store.save_buffer(buffer, cx));
4759 }
4760 }
4761 })
4762 }
4763 save_futures
4764 }
4765
4766 pub fn stage_entries(
4767 &mut self,
4768 entries: Vec<RepoPath>,
4769 cx: &mut Context<Self>,
4770 ) -> Task<anyhow::Result<()>> {
4771 self.stage_or_unstage_entries(true, entries, cx)
4772 }
4773
4774 pub fn unstage_entries(
4775 &mut self,
4776 entries: Vec<RepoPath>,
4777 cx: &mut Context<Self>,
4778 ) -> Task<anyhow::Result<()>> {
4779 self.stage_or_unstage_entries(false, entries, cx)
4780 }
4781
4782 fn stage_or_unstage_entries(
4783 &mut self,
4784 stage: bool,
4785 entries: Vec<RepoPath>,
4786 cx: &mut Context<Self>,
4787 ) -> Task<anyhow::Result<()>> {
4788 if entries.is_empty() {
4789 return Task::ready(Ok(()));
4790 }
4791 let Some(git_store) = self.git_store.upgrade() else {
4792 return Task::ready(Ok(()));
4793 };
4794 let id = self.id;
4795 let save_tasks = self.save_buffers(&entries, cx);
4796 let paths = entries
4797 .iter()
4798 .map(|p| p.as_unix_str())
4799 .collect::<Vec<_>>()
4800 .join(" ");
4801 let status = if stage {
4802 format!("git add {paths}")
4803 } else {
4804 format!("git reset {paths}")
4805 };
4806 let job_key = GitJobKey::WriteIndex(entries.clone());
4807
4808 self.spawn_job_with_tracking(
4809 entries.clone(),
4810 if stage {
4811 pending_op::GitStatus::Staged
4812 } else {
4813 pending_op::GitStatus::Unstaged
4814 },
4815 cx,
4816 async move |this, cx| {
4817 for save_task in save_tasks {
4818 save_task.await?;
4819 }
4820
4821 this.update(cx, |this, cx| {
4822 let weak_this = cx.weak_entity();
4823 this.send_keyed_job(
4824 Some(job_key),
4825 Some(status.into()),
4826 move |git_repo, mut cx| async move {
4827 let hunk_staging_operation_counts = weak_this
4828 .update(&mut cx, |this, cx| {
4829 let mut hunk_staging_operation_counts = HashMap::default();
4830 for path in &entries {
4831 let Some(project_path) =
4832 this.repo_path_to_project_path(path, cx)
4833 else {
4834 continue;
4835 };
4836 let Some(buffer) = git_store
4837 .read(cx)
4838 .buffer_store
4839 .read(cx)
4840 .get_by_path(&project_path)
4841 else {
4842 continue;
4843 };
4844 let Some(diff_state) = git_store
4845 .read(cx)
4846 .diffs
4847 .get(&buffer.read(cx).remote_id())
4848 .cloned()
4849 else {
4850 continue;
4851 };
4852 let Some(uncommitted_diff) =
4853 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4854 |uncommitted_diff| uncommitted_diff.upgrade(),
4855 )
4856 else {
4857 continue;
4858 };
4859 let buffer_snapshot = buffer.read(cx).text_snapshot();
4860 let file_exists = buffer
4861 .read(cx)
4862 .file()
4863 .is_some_and(|file| file.disk_state().exists());
4864 let hunk_staging_operation_count =
4865 diff_state.update(cx, |diff_state, cx| {
4866 uncommitted_diff.update(
4867 cx,
4868 |uncommitted_diff, cx| {
4869 uncommitted_diff
4870 .stage_or_unstage_all_hunks(
4871 stage,
4872 &buffer_snapshot,
4873 file_exists,
4874 cx,
4875 );
4876 },
4877 );
4878
4879 diff_state.hunk_staging_operation_count += 1;
4880 diff_state.hunk_staging_operation_count
4881 });
4882 hunk_staging_operation_counts.insert(
4883 diff_state.downgrade(),
4884 hunk_staging_operation_count,
4885 );
4886 }
4887 hunk_staging_operation_counts
4888 })
4889 .unwrap_or_default();
4890
4891 let result = match git_repo {
4892 RepositoryState::Local(LocalRepositoryState {
4893 backend,
4894 environment,
4895 ..
4896 }) => {
4897 if stage {
4898 backend.stage_paths(entries, environment.clone()).await
4899 } else {
4900 backend.unstage_paths(entries, environment.clone()).await
4901 }
4902 }
4903 RepositoryState::Remote(RemoteRepositoryState {
4904 project_id,
4905 client,
4906 }) => {
4907 if stage {
4908 client
4909 .request(proto::Stage {
4910 project_id: project_id.0,
4911 repository_id: id.to_proto(),
4912 paths: entries
4913 .into_iter()
4914 .map(|repo_path| repo_path.to_proto())
4915 .collect(),
4916 })
4917 .await
4918 .context("sending stage request")
4919 .map(|_| ())
4920 } else {
4921 client
4922 .request(proto::Unstage {
4923 project_id: project_id.0,
4924 repository_id: id.to_proto(),
4925 paths: entries
4926 .into_iter()
4927 .map(|repo_path| repo_path.to_proto())
4928 .collect(),
4929 })
4930 .await
4931 .context("sending unstage request")
4932 .map(|_| ())
4933 }
4934 }
4935 };
4936
4937 for (diff_state, hunk_staging_operation_count) in
4938 hunk_staging_operation_counts
4939 {
4940 diff_state
4941 .update(&mut cx, |diff_state, cx| {
4942 if result.is_ok() {
4943 diff_state.hunk_staging_operation_count_as_of_write =
4944 hunk_staging_operation_count;
4945 } else if let Some(uncommitted_diff) =
4946 &diff_state.uncommitted_diff
4947 {
4948 uncommitted_diff
4949 .update(cx, |uncommitted_diff, cx| {
4950 uncommitted_diff.clear_pending_hunks(cx);
4951 })
4952 .ok();
4953 }
4954 })
4955 .ok();
4956 }
4957
4958 result
4959 },
4960 )
4961 })?
4962 .await?
4963 },
4964 )
4965 }
4966
4967 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4968 let to_stage = self
4969 .cached_status()
4970 .filter_map(|entry| {
4971 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4972 if ops.staging() || ops.staged() {
4973 None
4974 } else {
4975 Some(entry.repo_path)
4976 }
4977 } else if entry.status.staging().is_fully_staged() {
4978 None
4979 } else {
4980 Some(entry.repo_path)
4981 }
4982 })
4983 .collect();
4984 self.stage_or_unstage_entries(true, to_stage, cx)
4985 }
4986
4987 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4988 let to_unstage = self
4989 .cached_status()
4990 .filter_map(|entry| {
4991 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4992 if !ops.staging() && !ops.staged() {
4993 None
4994 } else {
4995 Some(entry.repo_path)
4996 }
4997 } else if entry.status.staging().is_fully_unstaged() {
4998 None
4999 } else {
5000 Some(entry.repo_path)
5001 }
5002 })
5003 .collect();
5004 self.stage_or_unstage_entries(false, to_unstage, cx)
5005 }
5006
5007 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5008 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5009
5010 self.stash_entries(to_stash, cx)
5011 }
5012
5013 pub fn stash_entries(
5014 &mut self,
5015 entries: Vec<RepoPath>,
5016 cx: &mut Context<Self>,
5017 ) -> Task<anyhow::Result<()>> {
5018 let id = self.id;
5019
5020 cx.spawn(async move |this, cx| {
5021 this.update(cx, |this, _| {
5022 this.send_job(None, move |git_repo, _cx| async move {
5023 match git_repo {
5024 RepositoryState::Local(LocalRepositoryState {
5025 backend,
5026 environment,
5027 ..
5028 }) => backend.stash_paths(entries, environment).await,
5029 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5030 client
5031 .request(proto::Stash {
5032 project_id: project_id.0,
5033 repository_id: id.to_proto(),
5034 paths: entries
5035 .into_iter()
5036 .map(|repo_path| repo_path.to_proto())
5037 .collect(),
5038 })
5039 .await?;
5040 Ok(())
5041 }
5042 }
5043 })
5044 })?
5045 .await??;
5046 Ok(())
5047 })
5048 }
5049
5050 pub fn stash_pop(
5051 &mut self,
5052 index: Option<usize>,
5053 cx: &mut Context<Self>,
5054 ) -> Task<anyhow::Result<()>> {
5055 let id = self.id;
5056 cx.spawn(async move |this, cx| {
5057 this.update(cx, |this, _| {
5058 this.send_job(None, move |git_repo, _cx| async move {
5059 match git_repo {
5060 RepositoryState::Local(LocalRepositoryState {
5061 backend,
5062 environment,
5063 ..
5064 }) => backend.stash_pop(index, environment).await,
5065 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5066 client
5067 .request(proto::StashPop {
5068 project_id: project_id.0,
5069 repository_id: id.to_proto(),
5070 stash_index: index.map(|i| i as u64),
5071 })
5072 .await
5073 .context("sending stash pop request")?;
5074 Ok(())
5075 }
5076 }
5077 })
5078 })?
5079 .await??;
5080 Ok(())
5081 })
5082 }
5083
5084 pub fn stash_apply(
5085 &mut self,
5086 index: Option<usize>,
5087 cx: &mut Context<Self>,
5088 ) -> Task<anyhow::Result<()>> {
5089 let id = self.id;
5090 cx.spawn(async move |this, cx| {
5091 this.update(cx, |this, _| {
5092 this.send_job(None, move |git_repo, _cx| async move {
5093 match git_repo {
5094 RepositoryState::Local(LocalRepositoryState {
5095 backend,
5096 environment,
5097 ..
5098 }) => backend.stash_apply(index, environment).await,
5099 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5100 client
5101 .request(proto::StashApply {
5102 project_id: project_id.0,
5103 repository_id: id.to_proto(),
5104 stash_index: index.map(|i| i as u64),
5105 })
5106 .await
5107 .context("sending stash apply request")?;
5108 Ok(())
5109 }
5110 }
5111 })
5112 })?
5113 .await??;
5114 Ok(())
5115 })
5116 }
5117
5118 pub fn stash_drop(
5119 &mut self,
5120 index: Option<usize>,
5121 cx: &mut Context<Self>,
5122 ) -> oneshot::Receiver<anyhow::Result<()>> {
5123 let id = self.id;
5124 let updates_tx = self
5125 .git_store()
5126 .and_then(|git_store| match &git_store.read(cx).state {
5127 GitStoreState::Local { downstream, .. } => downstream
5128 .as_ref()
5129 .map(|downstream| downstream.updates_tx.clone()),
5130 _ => None,
5131 });
5132 let this = cx.weak_entity();
5133 self.send_job(None, move |git_repo, mut cx| async move {
5134 match git_repo {
5135 RepositoryState::Local(LocalRepositoryState {
5136 backend,
5137 environment,
5138 ..
5139 }) => {
5140 // TODO would be nice to not have to do this manually
5141 let result = backend.stash_drop(index, environment).await;
5142 if result.is_ok()
5143 && let Ok(stash_entries) = backend.stash_entries().await
5144 {
5145 let snapshot = this.update(&mut cx, |this, cx| {
5146 this.snapshot.stash_entries = stash_entries;
5147 cx.emit(RepositoryEvent::StashEntriesChanged);
5148 this.snapshot.clone()
5149 })?;
5150 if let Some(updates_tx) = updates_tx {
5151 updates_tx
5152 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5153 .ok();
5154 }
5155 }
5156
5157 result
5158 }
5159 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5160 client
5161 .request(proto::StashDrop {
5162 project_id: project_id.0,
5163 repository_id: id.to_proto(),
5164 stash_index: index.map(|i| i as u64),
5165 })
5166 .await
5167 .context("sending stash pop request")?;
5168 Ok(())
5169 }
5170 }
5171 })
5172 }
5173
5174 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5175 let id = self.id;
5176 self.send_job(
5177 Some(format!("git hook {}", hook.as_str()).into()),
5178 move |git_repo, _cx| async move {
5179 match git_repo {
5180 RepositoryState::Local(LocalRepositoryState {
5181 backend,
5182 environment,
5183 ..
5184 }) => backend.run_hook(hook, environment.clone()).await,
5185 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5186 client
5187 .request(proto::RunGitHook {
5188 project_id: project_id.0,
5189 repository_id: id.to_proto(),
5190 hook: hook.to_proto(),
5191 })
5192 .await?;
5193
5194 Ok(())
5195 }
5196 }
5197 },
5198 )
5199 }
5200
5201 pub fn commit(
5202 &mut self,
5203 message: SharedString,
5204 name_and_email: Option<(SharedString, SharedString)>,
5205 options: CommitOptions,
5206 askpass: AskPassDelegate,
5207 cx: &mut App,
5208 ) -> oneshot::Receiver<Result<()>> {
5209 let id = self.id;
5210 let askpass_delegates = self.askpass_delegates.clone();
5211 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5212
5213 let rx = self.run_hook(RunHook::PreCommit, cx);
5214
5215 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5216 rx.await??;
5217
5218 match git_repo {
5219 RepositoryState::Local(LocalRepositoryState {
5220 backend,
5221 environment,
5222 ..
5223 }) => {
5224 backend
5225 .commit(message, name_and_email, options, askpass, environment)
5226 .await
5227 }
5228 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5229 askpass_delegates.lock().insert(askpass_id, askpass);
5230 let _defer = util::defer(|| {
5231 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5232 debug_assert!(askpass_delegate.is_some());
5233 });
5234 let (name, email) = name_and_email.unzip();
5235 client
5236 .request(proto::Commit {
5237 project_id: project_id.0,
5238 repository_id: id.to_proto(),
5239 message: String::from(message),
5240 name: name.map(String::from),
5241 email: email.map(String::from),
5242 options: Some(proto::commit::CommitOptions {
5243 amend: options.amend,
5244 signoff: options.signoff,
5245 }),
5246 askpass_id,
5247 })
5248 .await?;
5249
5250 Ok(())
5251 }
5252 }
5253 })
5254 }
5255
5256 pub fn fetch(
5257 &mut self,
5258 fetch_options: FetchOptions,
5259 askpass: AskPassDelegate,
5260 _cx: &mut App,
5261 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5262 let askpass_delegates = self.askpass_delegates.clone();
5263 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5264 let id = self.id;
5265
5266 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5267 match git_repo {
5268 RepositoryState::Local(LocalRepositoryState {
5269 backend,
5270 environment,
5271 ..
5272 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5273 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5274 askpass_delegates.lock().insert(askpass_id, askpass);
5275 let _defer = util::defer(|| {
5276 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5277 debug_assert!(askpass_delegate.is_some());
5278 });
5279
5280 let response = client
5281 .request(proto::Fetch {
5282 project_id: project_id.0,
5283 repository_id: id.to_proto(),
5284 askpass_id,
5285 remote: fetch_options.to_proto(),
5286 })
5287 .await?;
5288
5289 Ok(RemoteCommandOutput {
5290 stdout: response.stdout,
5291 stderr: response.stderr,
5292 })
5293 }
5294 }
5295 })
5296 }
5297
5298 pub fn push(
5299 &mut self,
5300 branch: SharedString,
5301 remote_branch: SharedString,
5302 remote: SharedString,
5303 options: Option<PushOptions>,
5304 askpass: AskPassDelegate,
5305 cx: &mut Context<Self>,
5306 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5307 let askpass_delegates = self.askpass_delegates.clone();
5308 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5309 let id = self.id;
5310
5311 let args = options
5312 .map(|option| match option {
5313 PushOptions::SetUpstream => " --set-upstream",
5314 PushOptions::Force => " --force-with-lease",
5315 })
5316 .unwrap_or("");
5317
5318 let updates_tx = self
5319 .git_store()
5320 .and_then(|git_store| match &git_store.read(cx).state {
5321 GitStoreState::Local { downstream, .. } => downstream
5322 .as_ref()
5323 .map(|downstream| downstream.updates_tx.clone()),
5324 _ => None,
5325 });
5326
5327 let this = cx.weak_entity();
5328 self.send_job(
5329 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5330 move |git_repo, mut cx| async move {
5331 match git_repo {
5332 RepositoryState::Local(LocalRepositoryState {
5333 backend,
5334 environment,
5335 ..
5336 }) => {
5337 let result = backend
5338 .push(
5339 branch.to_string(),
5340 remote_branch.to_string(),
5341 remote.to_string(),
5342 options,
5343 askpass,
5344 environment.clone(),
5345 cx.clone(),
5346 )
5347 .await;
5348 // TODO would be nice to not have to do this manually
5349 if result.is_ok() {
5350 let branches = backend.branches().await?;
5351 let branch = branches.into_iter().find(|branch| branch.is_head);
5352 log::info!("head branch after scan is {branch:?}");
5353 let snapshot = this.update(&mut cx, |this, cx| {
5354 this.snapshot.branch = branch;
5355 cx.emit(RepositoryEvent::BranchChanged);
5356 this.snapshot.clone()
5357 })?;
5358 if let Some(updates_tx) = updates_tx {
5359 updates_tx
5360 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5361 .ok();
5362 }
5363 }
5364 result
5365 }
5366 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5367 askpass_delegates.lock().insert(askpass_id, askpass);
5368 let _defer = util::defer(|| {
5369 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5370 debug_assert!(askpass_delegate.is_some());
5371 });
5372 let response = client
5373 .request(proto::Push {
5374 project_id: project_id.0,
5375 repository_id: id.to_proto(),
5376 askpass_id,
5377 branch_name: branch.to_string(),
5378 remote_branch_name: remote_branch.to_string(),
5379 remote_name: remote.to_string(),
5380 options: options.map(|options| match options {
5381 PushOptions::Force => proto::push::PushOptions::Force,
5382 PushOptions::SetUpstream => {
5383 proto::push::PushOptions::SetUpstream
5384 }
5385 }
5386 as i32),
5387 })
5388 .await?;
5389
5390 Ok(RemoteCommandOutput {
5391 stdout: response.stdout,
5392 stderr: response.stderr,
5393 })
5394 }
5395 }
5396 },
5397 )
5398 }
5399
5400 pub fn pull(
5401 &mut self,
5402 branch: Option<SharedString>,
5403 remote: SharedString,
5404 rebase: bool,
5405 askpass: AskPassDelegate,
5406 _cx: &mut App,
5407 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5408 let askpass_delegates = self.askpass_delegates.clone();
5409 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5410 let id = self.id;
5411
5412 let mut status = "git pull".to_string();
5413 if rebase {
5414 status.push_str(" --rebase");
5415 }
5416 status.push_str(&format!(" {}", remote));
5417 if let Some(b) = &branch {
5418 status.push_str(&format!(" {}", b));
5419 }
5420
5421 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5422 match git_repo {
5423 RepositoryState::Local(LocalRepositoryState {
5424 backend,
5425 environment,
5426 ..
5427 }) => {
5428 backend
5429 .pull(
5430 branch.as_ref().map(|b| b.to_string()),
5431 remote.to_string(),
5432 rebase,
5433 askpass,
5434 environment.clone(),
5435 cx,
5436 )
5437 .await
5438 }
5439 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5440 askpass_delegates.lock().insert(askpass_id, askpass);
5441 let _defer = util::defer(|| {
5442 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5443 debug_assert!(askpass_delegate.is_some());
5444 });
5445 let response = client
5446 .request(proto::Pull {
5447 project_id: project_id.0,
5448 repository_id: id.to_proto(),
5449 askpass_id,
5450 rebase,
5451 branch_name: branch.as_ref().map(|b| b.to_string()),
5452 remote_name: remote.to_string(),
5453 })
5454 .await?;
5455
5456 Ok(RemoteCommandOutput {
5457 stdout: response.stdout,
5458 stderr: response.stderr,
5459 })
5460 }
5461 }
5462 })
5463 }
5464
5465 fn spawn_set_index_text_job(
5466 &mut self,
5467 path: RepoPath,
5468 content: Option<String>,
5469 hunk_staging_operation_count: Option<usize>,
5470 cx: &mut Context<Self>,
5471 ) -> oneshot::Receiver<anyhow::Result<()>> {
5472 let id = self.id;
5473 let this = cx.weak_entity();
5474 let git_store = self.git_store.clone();
5475 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5476 self.send_keyed_job(
5477 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5478 None,
5479 move |git_repo, mut cx| async move {
5480 log::debug!(
5481 "start updating index text for buffer {}",
5482 path.as_unix_str()
5483 );
5484
5485 match git_repo {
5486 RepositoryState::Local(LocalRepositoryState {
5487 fs,
5488 backend,
5489 environment,
5490 ..
5491 }) => {
5492 let executable = match fs.metadata(&abs_path).await {
5493 Ok(Some(meta)) => meta.is_executable,
5494 Ok(None) => false,
5495 Err(_err) => false,
5496 };
5497 backend
5498 .set_index_text(path.clone(), content, environment.clone(), executable)
5499 .await?;
5500 }
5501 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5502 client
5503 .request(proto::SetIndexText {
5504 project_id: project_id.0,
5505 repository_id: id.to_proto(),
5506 path: path.to_proto(),
5507 text: content,
5508 })
5509 .await?;
5510 }
5511 }
5512 log::debug!(
5513 "finish updating index text for buffer {}",
5514 path.as_unix_str()
5515 );
5516
5517 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5518 let project_path = this
5519 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5520 .ok()
5521 .flatten();
5522 git_store
5523 .update(&mut cx, |git_store, cx| {
5524 let buffer_id = git_store
5525 .buffer_store
5526 .read(cx)
5527 .get_by_path(&project_path?)?
5528 .read(cx)
5529 .remote_id();
5530 let diff_state = git_store.diffs.get(&buffer_id)?;
5531 diff_state.update(cx, |diff_state, _| {
5532 diff_state.hunk_staging_operation_count_as_of_write =
5533 hunk_staging_operation_count;
5534 });
5535 Some(())
5536 })
5537 .context("Git store dropped")?;
5538 }
5539 Ok(())
5540 },
5541 )
5542 }
5543
5544 pub fn create_remote(
5545 &mut self,
5546 remote_name: String,
5547 remote_url: String,
5548 ) -> oneshot::Receiver<Result<()>> {
5549 let id = self.id;
5550 self.send_job(
5551 Some(format!("git remote add {remote_name} {remote_url}").into()),
5552 move |repo, _cx| async move {
5553 match repo {
5554 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5555 backend.create_remote(remote_name, remote_url).await
5556 }
5557 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5558 client
5559 .request(proto::GitCreateRemote {
5560 project_id: project_id.0,
5561 repository_id: id.to_proto(),
5562 remote_name,
5563 remote_url,
5564 })
5565 .await?;
5566
5567 Ok(())
5568 }
5569 }
5570 },
5571 )
5572 }
5573
5574 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5575 let id = self.id;
5576 self.send_job(
5577 Some(format!("git remove remote {remote_name}").into()),
5578 move |repo, _cx| async move {
5579 match repo {
5580 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5581 backend.remove_remote(remote_name).await
5582 }
5583 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5584 client
5585 .request(proto::GitRemoveRemote {
5586 project_id: project_id.0,
5587 repository_id: id.to_proto(),
5588 remote_name,
5589 })
5590 .await?;
5591
5592 Ok(())
5593 }
5594 }
5595 },
5596 )
5597 }
5598
5599 pub fn get_remotes(
5600 &mut self,
5601 branch_name: Option<String>,
5602 is_push: bool,
5603 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5604 let id = self.id;
5605 self.send_job(None, move |repo, _cx| async move {
5606 match repo {
5607 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5608 let remote = if let Some(branch_name) = branch_name {
5609 if is_push {
5610 backend.get_push_remote(branch_name).await?
5611 } else {
5612 backend.get_branch_remote(branch_name).await?
5613 }
5614 } else {
5615 None
5616 };
5617
5618 match remote {
5619 Some(remote) => Ok(vec![remote]),
5620 None => backend.get_all_remotes().await,
5621 }
5622 }
5623 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5624 let response = client
5625 .request(proto::GetRemotes {
5626 project_id: project_id.0,
5627 repository_id: id.to_proto(),
5628 branch_name,
5629 is_push,
5630 })
5631 .await?;
5632
5633 let remotes = response
5634 .remotes
5635 .into_iter()
5636 .map(|remotes| Remote {
5637 name: remotes.name.into(),
5638 })
5639 .collect();
5640
5641 Ok(remotes)
5642 }
5643 }
5644 })
5645 }
5646
5647 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5648 let id = self.id;
5649 self.send_job(None, move |repo, _| async move {
5650 match repo {
5651 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5652 backend.branches().await
5653 }
5654 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5655 let response = client
5656 .request(proto::GitGetBranches {
5657 project_id: project_id.0,
5658 repository_id: id.to_proto(),
5659 })
5660 .await?;
5661
5662 let branches = response
5663 .branches
5664 .into_iter()
5665 .map(|branch| proto_to_branch(&branch))
5666 .collect();
5667
5668 Ok(branches)
5669 }
5670 }
5671 })
5672 }
5673
5674 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5675 let id = self.id;
5676 self.send_job(None, move |repo, _| async move {
5677 match repo {
5678 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5679 backend.worktrees().await
5680 }
5681 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5682 let response = client
5683 .request(proto::GitGetWorktrees {
5684 project_id: project_id.0,
5685 repository_id: id.to_proto(),
5686 })
5687 .await?;
5688
5689 let worktrees = response
5690 .worktrees
5691 .into_iter()
5692 .map(|worktree| proto_to_worktree(&worktree))
5693 .collect();
5694
5695 Ok(worktrees)
5696 }
5697 }
5698 })
5699 }
5700
5701 pub fn create_worktree(
5702 &mut self,
5703 name: String,
5704 directory: PathBuf,
5705 commit: Option<String>,
5706 ) -> oneshot::Receiver<Result<()>> {
5707 let id = self.id;
5708 self.send_job(
5709 Some("git worktree add".into()),
5710 move |repo, _cx| async move {
5711 match repo {
5712 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5713 backend.create_worktree(name, directory, commit).await
5714 }
5715 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5716 client
5717 .request(proto::GitCreateWorktree {
5718 project_id: project_id.0,
5719 repository_id: id.to_proto(),
5720 name,
5721 directory: directory.to_string_lossy().to_string(),
5722 commit,
5723 })
5724 .await?;
5725
5726 Ok(())
5727 }
5728 }
5729 },
5730 )
5731 }
5732
5733 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5734 self.send_job(
5735 Some(format!("git worktree remove: {}", path.display()).into()),
5736 move |repo, _cx| async move {
5737 match repo {
5738 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5739 backend.remove_worktree(path, force).await
5740 }
5741 RepositoryState::Remote(_) => {
5742 anyhow::bail!(
5743 "Removing worktrees on remote repositories is not yet supported"
5744 )
5745 }
5746 }
5747 },
5748 )
5749 }
5750
5751 pub fn default_branch(
5752 &mut self,
5753 include_remote_name: bool,
5754 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5755 let id = self.id;
5756 self.send_job(None, move |repo, _| async move {
5757 match repo {
5758 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5759 backend.default_branch(include_remote_name).await
5760 }
5761 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5762 let response = client
5763 .request(proto::GetDefaultBranch {
5764 project_id: project_id.0,
5765 repository_id: id.to_proto(),
5766 })
5767 .await?;
5768
5769 anyhow::Ok(response.branch.map(SharedString::from))
5770 }
5771 }
5772 })
5773 }
5774
5775 pub fn diff_tree(
5776 &mut self,
5777 diff_type: DiffTreeType,
5778 _cx: &App,
5779 ) -> oneshot::Receiver<Result<TreeDiff>> {
5780 let repository_id = self.snapshot.id;
5781 self.send_job(None, move |repo, _cx| async move {
5782 match repo {
5783 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5784 backend.diff_tree(diff_type).await
5785 }
5786 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5787 let response = client
5788 .request(proto::GetTreeDiff {
5789 project_id: project_id.0,
5790 repository_id: repository_id.0,
5791 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5792 base: diff_type.base().to_string(),
5793 head: diff_type.head().to_string(),
5794 })
5795 .await?;
5796
5797 let entries = response
5798 .entries
5799 .into_iter()
5800 .filter_map(|entry| {
5801 let status = match entry.status() {
5802 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5803 proto::tree_diff_status::Status::Modified => {
5804 TreeDiffStatus::Modified {
5805 old: git::Oid::from_str(
5806 &entry.oid.context("missing oid").log_err()?,
5807 )
5808 .log_err()?,
5809 }
5810 }
5811 proto::tree_diff_status::Status::Deleted => {
5812 TreeDiffStatus::Deleted {
5813 old: git::Oid::from_str(
5814 &entry.oid.context("missing oid").log_err()?,
5815 )
5816 .log_err()?,
5817 }
5818 }
5819 };
5820 Some((
5821 RepoPath::from_rel_path(
5822 &RelPath::from_proto(&entry.path).log_err()?,
5823 ),
5824 status,
5825 ))
5826 })
5827 .collect();
5828
5829 Ok(TreeDiff { entries })
5830 }
5831 }
5832 })
5833 }
5834
5835 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5836 let id = self.id;
5837 self.send_job(None, move |repo, _cx| async move {
5838 match repo {
5839 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5840 backend.diff(diff_type).await
5841 }
5842 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5843 let (proto_diff_type, merge_base_ref) = match &diff_type {
5844 DiffType::HeadToIndex => {
5845 (proto::git_diff::DiffType::HeadToIndex.into(), None)
5846 }
5847 DiffType::HeadToWorktree => {
5848 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
5849 }
5850 DiffType::MergeBase { base_ref } => (
5851 proto::git_diff::DiffType::MergeBase.into(),
5852 Some(base_ref.to_string()),
5853 ),
5854 };
5855 let response = client
5856 .request(proto::GitDiff {
5857 project_id: project_id.0,
5858 repository_id: id.to_proto(),
5859 diff_type: proto_diff_type,
5860 merge_base_ref,
5861 })
5862 .await?;
5863
5864 Ok(response.diff)
5865 }
5866 }
5867 })
5868 }
5869
5870 pub fn create_branch(
5871 &mut self,
5872 branch_name: String,
5873 base_branch: Option<String>,
5874 ) -> oneshot::Receiver<Result<()>> {
5875 let id = self.id;
5876 let status_msg = if let Some(ref base) = base_branch {
5877 format!("git switch -c {branch_name} {base}").into()
5878 } else {
5879 format!("git switch -c {branch_name}").into()
5880 };
5881 self.send_job(Some(status_msg), move |repo, _cx| async move {
5882 match repo {
5883 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5884 backend.create_branch(branch_name, base_branch).await
5885 }
5886 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5887 client
5888 .request(proto::GitCreateBranch {
5889 project_id: project_id.0,
5890 repository_id: id.to_proto(),
5891 branch_name,
5892 })
5893 .await?;
5894
5895 Ok(())
5896 }
5897 }
5898 })
5899 }
5900
5901 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5902 let id = self.id;
5903 self.send_job(
5904 Some(format!("git switch {branch_name}").into()),
5905 move |repo, _cx| async move {
5906 match repo {
5907 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5908 backend.change_branch(branch_name).await
5909 }
5910 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5911 client
5912 .request(proto::GitChangeBranch {
5913 project_id: project_id.0,
5914 repository_id: id.to_proto(),
5915 branch_name,
5916 })
5917 .await?;
5918
5919 Ok(())
5920 }
5921 }
5922 },
5923 )
5924 }
5925
5926 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5927 let id = self.id;
5928 self.send_job(
5929 Some(format!("git branch -d {branch_name}").into()),
5930 move |repo, _cx| async move {
5931 match repo {
5932 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5933 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5934 client
5935 .request(proto::GitDeleteBranch {
5936 project_id: project_id.0,
5937 repository_id: id.to_proto(),
5938 branch_name,
5939 })
5940 .await?;
5941
5942 Ok(())
5943 }
5944 }
5945 },
5946 )
5947 }
5948
5949 pub fn rename_branch(
5950 &mut self,
5951 branch: String,
5952 new_name: String,
5953 ) -> oneshot::Receiver<Result<()>> {
5954 let id = self.id;
5955 self.send_job(
5956 Some(format!("git branch -m {branch} {new_name}").into()),
5957 move |repo, _cx| async move {
5958 match repo {
5959 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5960 backend.rename_branch(branch, new_name).await
5961 }
5962 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5963 client
5964 .request(proto::GitRenameBranch {
5965 project_id: project_id.0,
5966 repository_id: id.to_proto(),
5967 branch,
5968 new_name,
5969 })
5970 .await?;
5971
5972 Ok(())
5973 }
5974 }
5975 },
5976 )
5977 }
5978
5979 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5980 let id = self.id;
5981 self.send_job(None, move |repo, _cx| async move {
5982 match repo {
5983 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5984 backend.check_for_pushed_commit().await
5985 }
5986 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5987 let response = client
5988 .request(proto::CheckForPushedCommits {
5989 project_id: project_id.0,
5990 repository_id: id.to_proto(),
5991 })
5992 .await?;
5993
5994 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5995
5996 Ok(branches)
5997 }
5998 }
5999 })
6000 }
6001
6002 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6003 self.send_job(None, |repo, _cx| async move {
6004 match repo {
6005 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6006 backend.checkpoint().await
6007 }
6008 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6009 }
6010 })
6011 }
6012
6013 pub fn restore_checkpoint(
6014 &mut self,
6015 checkpoint: GitRepositoryCheckpoint,
6016 ) -> oneshot::Receiver<Result<()>> {
6017 self.send_job(None, move |repo, _cx| async move {
6018 match repo {
6019 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6020 backend.restore_checkpoint(checkpoint).await
6021 }
6022 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6023 }
6024 })
6025 }
6026
6027 pub(crate) fn apply_remote_update(
6028 &mut self,
6029 update: proto::UpdateRepository,
6030 cx: &mut Context<Self>,
6031 ) -> Result<()> {
6032 if let Some(main_path) = &update.original_repo_abs_path {
6033 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6034 }
6035
6036 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6037 let new_head_commit = update
6038 .head_commit_details
6039 .as_ref()
6040 .map(proto_to_commit_details);
6041 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6042 cx.emit(RepositoryEvent::BranchChanged)
6043 }
6044 self.snapshot.branch = new_branch;
6045 self.snapshot.head_commit = new_head_commit;
6046
6047 // We don't store any merge head state for downstream projects; the upstream
6048 // will track it and we will just get the updated conflicts
6049 let new_merge_heads = TreeMap::from_ordered_entries(
6050 update
6051 .current_merge_conflicts
6052 .into_iter()
6053 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6054 );
6055 let conflicts_changed =
6056 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6057 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6058 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6059 let new_stash_entries = GitStash {
6060 entries: update
6061 .stash_entries
6062 .iter()
6063 .filter_map(|entry| proto_to_stash(entry).ok())
6064 .collect(),
6065 };
6066 if self.snapshot.stash_entries != new_stash_entries {
6067 cx.emit(RepositoryEvent::StashEntriesChanged)
6068 }
6069 self.snapshot.stash_entries = new_stash_entries;
6070 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6071 self.snapshot.remote_origin_url = update.remote_origin_url;
6072
6073 let edits = update
6074 .removed_statuses
6075 .into_iter()
6076 .filter_map(|path| {
6077 Some(sum_tree::Edit::Remove(PathKey(
6078 RelPath::from_proto(&path).log_err()?,
6079 )))
6080 })
6081 .chain(
6082 update
6083 .updated_statuses
6084 .into_iter()
6085 .filter_map(|updated_status| {
6086 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6087 }),
6088 )
6089 .collect::<Vec<_>>();
6090 if conflicts_changed || !edits.is_empty() {
6091 cx.emit(RepositoryEvent::StatusesChanged);
6092 }
6093 self.snapshot.statuses_by_path.edit(edits, ());
6094
6095 if update.is_last_update {
6096 self.snapshot.scan_id = update.scan_id;
6097 }
6098 self.clear_pending_ops(cx);
6099 Ok(())
6100 }
6101
6102 pub fn compare_checkpoints(
6103 &mut self,
6104 left: GitRepositoryCheckpoint,
6105 right: GitRepositoryCheckpoint,
6106 ) -> oneshot::Receiver<Result<bool>> {
6107 self.send_job(None, move |repo, _cx| async move {
6108 match repo {
6109 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6110 backend.compare_checkpoints(left, right).await
6111 }
6112 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6113 }
6114 })
6115 }
6116
6117 pub fn diff_checkpoints(
6118 &mut self,
6119 base_checkpoint: GitRepositoryCheckpoint,
6120 target_checkpoint: GitRepositoryCheckpoint,
6121 ) -> oneshot::Receiver<Result<String>> {
6122 self.send_job(None, move |repo, _cx| async move {
6123 match repo {
6124 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6125 backend
6126 .diff_checkpoints(base_checkpoint, target_checkpoint)
6127 .await
6128 }
6129 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6130 }
6131 })
6132 }
6133
6134 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6135 let updated = SumTree::from_iter(
6136 self.pending_ops.iter().filter_map(|ops| {
6137 let inner_ops: Vec<PendingOp> =
6138 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6139 if inner_ops.is_empty() {
6140 None
6141 } else {
6142 Some(PendingOps {
6143 repo_path: ops.repo_path.clone(),
6144 ops: inner_ops,
6145 })
6146 }
6147 }),
6148 (),
6149 );
6150
6151 if updated != self.pending_ops {
6152 cx.emit(RepositoryEvent::PendingOpsChanged {
6153 pending_ops: self.pending_ops.clone(),
6154 })
6155 }
6156
6157 self.pending_ops = updated;
6158 }
6159
6160 fn schedule_scan(
6161 &mut self,
6162 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6163 cx: &mut Context<Self>,
6164 ) {
6165 let this = cx.weak_entity();
6166 let _ = self.send_keyed_job(
6167 Some(GitJobKey::ReloadGitState),
6168 None,
6169 |state, mut cx| async move {
6170 log::debug!("run scheduled git status scan");
6171
6172 let Some(this) = this.upgrade() else {
6173 return Ok(());
6174 };
6175 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6176 bail!("not a local repository")
6177 };
6178 let compute_snapshot = this.update(&mut cx, |this, _| {
6179 this.paths_needing_status_update.clear();
6180 compute_snapshot(
6181 this.id,
6182 this.work_directory_abs_path.clone(),
6183 this.snapshot.clone(),
6184 backend.clone(),
6185 )
6186 });
6187 let (snapshot, events) = cx.background_spawn(compute_snapshot).await?;
6188 this.update(&mut cx, |this, cx| {
6189 this.snapshot = snapshot.clone();
6190 this.clear_pending_ops(cx);
6191 for event in events {
6192 cx.emit(event);
6193 }
6194 });
6195 if let Some(updates_tx) = updates_tx {
6196 updates_tx
6197 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6198 .ok();
6199 }
6200 Ok(())
6201 },
6202 );
6203 }
6204
6205 fn spawn_local_git_worker(
6206 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6207 cx: &mut Context<Self>,
6208 ) -> mpsc::UnboundedSender<GitJob> {
6209 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6210
6211 cx.spawn(async move |_, cx| {
6212 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6213 if let Some(git_hosting_provider_registry) =
6214 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6215 {
6216 git_hosting_providers::register_additional_providers(
6217 git_hosting_provider_registry,
6218 state.backend.clone(),
6219 )
6220 .await;
6221 }
6222 let state = RepositoryState::Local(state);
6223 let mut jobs = VecDeque::new();
6224 loop {
6225 while let Ok(Some(next_job)) = job_rx.try_next() {
6226 jobs.push_back(next_job);
6227 }
6228
6229 if let Some(job) = jobs.pop_front() {
6230 if let Some(current_key) = &job.key
6231 && jobs
6232 .iter()
6233 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6234 {
6235 continue;
6236 }
6237 (job.job)(state.clone(), cx).await;
6238 } else if let Some(job) = job_rx.next().await {
6239 jobs.push_back(job);
6240 } else {
6241 break;
6242 }
6243 }
6244 anyhow::Ok(())
6245 })
6246 .detach_and_log_err(cx);
6247
6248 job_tx
6249 }
6250
6251 fn spawn_remote_git_worker(
6252 state: RemoteRepositoryState,
6253 cx: &mut Context<Self>,
6254 ) -> mpsc::UnboundedSender<GitJob> {
6255 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6256
6257 cx.spawn(async move |_, cx| {
6258 let state = RepositoryState::Remote(state);
6259 let mut jobs = VecDeque::new();
6260 loop {
6261 while let Ok(Some(next_job)) = job_rx.try_next() {
6262 jobs.push_back(next_job);
6263 }
6264
6265 if let Some(job) = jobs.pop_front() {
6266 if let Some(current_key) = &job.key
6267 && jobs
6268 .iter()
6269 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6270 {
6271 continue;
6272 }
6273 (job.job)(state.clone(), cx).await;
6274 } else if let Some(job) = job_rx.next().await {
6275 jobs.push_back(job);
6276 } else {
6277 break;
6278 }
6279 }
6280 anyhow::Ok(())
6281 })
6282 .detach_and_log_err(cx);
6283
6284 job_tx
6285 }
6286
6287 fn load_staged_text(
6288 &mut self,
6289 buffer_id: BufferId,
6290 repo_path: RepoPath,
6291 cx: &App,
6292 ) -> Task<Result<Option<String>>> {
6293 let rx = self.send_job(None, move |state, _| async move {
6294 match state {
6295 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6296 anyhow::Ok(backend.load_index_text(repo_path).await)
6297 }
6298 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6299 let response = client
6300 .request(proto::OpenUnstagedDiff {
6301 project_id: project_id.to_proto(),
6302 buffer_id: buffer_id.to_proto(),
6303 })
6304 .await?;
6305 Ok(response.staged_text)
6306 }
6307 }
6308 });
6309 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6310 }
6311
6312 fn load_committed_text(
6313 &mut self,
6314 buffer_id: BufferId,
6315 repo_path: RepoPath,
6316 cx: &App,
6317 ) -> Task<Result<DiffBasesChange>> {
6318 let rx = self.send_job(None, move |state, _| async move {
6319 match state {
6320 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6321 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6322 let staged_text = backend.load_index_text(repo_path).await;
6323 let diff_bases_change = if committed_text == staged_text {
6324 DiffBasesChange::SetBoth(committed_text)
6325 } else {
6326 DiffBasesChange::SetEach {
6327 index: staged_text,
6328 head: committed_text,
6329 }
6330 };
6331 anyhow::Ok(diff_bases_change)
6332 }
6333 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6334 use proto::open_uncommitted_diff_response::Mode;
6335
6336 let response = client
6337 .request(proto::OpenUncommittedDiff {
6338 project_id: project_id.to_proto(),
6339 buffer_id: buffer_id.to_proto(),
6340 })
6341 .await?;
6342 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6343 let bases = match mode {
6344 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6345 Mode::IndexAndHead => DiffBasesChange::SetEach {
6346 head: response.committed_text,
6347 index: response.staged_text,
6348 },
6349 };
6350 Ok(bases)
6351 }
6352 }
6353 });
6354
6355 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6356 }
6357
6358 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6359 let repository_id = self.snapshot.id;
6360 let rx = self.send_job(None, move |state, _| async move {
6361 match state {
6362 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6363 backend.load_blob_content(oid).await
6364 }
6365 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6366 let response = client
6367 .request(proto::GetBlobContent {
6368 project_id: project_id.to_proto(),
6369 repository_id: repository_id.0,
6370 oid: oid.to_string(),
6371 })
6372 .await?;
6373 Ok(response.content)
6374 }
6375 }
6376 });
6377 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6378 }
6379
6380 fn paths_changed(
6381 &mut self,
6382 paths: Vec<RepoPath>,
6383 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6384 cx: &mut Context<Self>,
6385 ) {
6386 if !paths.is_empty() {
6387 self.paths_needing_status_update.push(paths);
6388 }
6389
6390 let this = cx.weak_entity();
6391 let _ = self.send_keyed_job(
6392 Some(GitJobKey::RefreshStatuses),
6393 None,
6394 |state, mut cx| async move {
6395 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6396 (
6397 this.snapshot.clone(),
6398 mem::take(&mut this.paths_needing_status_update),
6399 )
6400 })?;
6401 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6402 bail!("not a local repository")
6403 };
6404
6405 if changed_paths.is_empty() {
6406 return Ok(());
6407 }
6408
6409 let has_head = prev_snapshot.head_commit.is_some();
6410
6411 let stash_entries = backend.stash_entries().await?;
6412 let changed_path_statuses = cx
6413 .background_spawn(async move {
6414 let mut changed_paths =
6415 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6416 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6417
6418 let status_task = backend.status(&changed_paths_vec);
6419 let diff_stat_future = if has_head {
6420 backend.diff_stat(&changed_paths_vec)
6421 } else {
6422 future::ready(Ok(status::GitDiffStat {
6423 entries: Arc::default(),
6424 }))
6425 .boxed()
6426 };
6427
6428 let (statuses, diff_stats) =
6429 futures::future::try_join(status_task, diff_stat_future).await?;
6430
6431 let diff_stats: HashMap<RepoPath, DiffStat> =
6432 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6433
6434 let mut changed_path_statuses = Vec::new();
6435 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6436 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6437
6438 for (repo_path, status) in &*statuses.entries {
6439 let current_diff_stat = diff_stats.get(repo_path).copied();
6440
6441 changed_paths.remove(repo_path);
6442 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6443 && cursor.item().is_some_and(|entry| {
6444 entry.status == *status && entry.diff_stat == current_diff_stat
6445 })
6446 {
6447 continue;
6448 }
6449
6450 changed_path_statuses.push(Edit::Insert(StatusEntry {
6451 repo_path: repo_path.clone(),
6452 status: *status,
6453 diff_stat: current_diff_stat,
6454 }));
6455 }
6456 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6457 for path in changed_paths.into_iter() {
6458 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6459 changed_path_statuses
6460 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6461 }
6462 }
6463 anyhow::Ok(changed_path_statuses)
6464 })
6465 .await?;
6466
6467 this.update(&mut cx, |this, cx| {
6468 if this.snapshot.stash_entries != stash_entries {
6469 cx.emit(RepositoryEvent::StashEntriesChanged);
6470 this.snapshot.stash_entries = stash_entries;
6471 }
6472
6473 if !changed_path_statuses.is_empty() {
6474 cx.emit(RepositoryEvent::StatusesChanged);
6475 this.snapshot
6476 .statuses_by_path
6477 .edit(changed_path_statuses, ());
6478 this.snapshot.scan_id += 1;
6479 }
6480
6481 if let Some(updates_tx) = updates_tx {
6482 updates_tx
6483 .unbounded_send(DownstreamUpdate::UpdateRepository(
6484 this.snapshot.clone(),
6485 ))
6486 .ok();
6487 }
6488 })
6489 },
6490 );
6491 }
6492
6493 /// currently running git command and when it started
6494 pub fn current_job(&self) -> Option<JobInfo> {
6495 self.active_jobs.values().next().cloned()
6496 }
6497
6498 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6499 self.send_job(None, |_, _| async {})
6500 }
6501
6502 fn spawn_job_with_tracking<AsyncFn>(
6503 &mut self,
6504 paths: Vec<RepoPath>,
6505 git_status: pending_op::GitStatus,
6506 cx: &mut Context<Self>,
6507 f: AsyncFn,
6508 ) -> Task<Result<()>>
6509 where
6510 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6511 {
6512 let ids = self.new_pending_ops_for_paths(paths, git_status);
6513
6514 cx.spawn(async move |this, cx| {
6515 let (job_status, result) = match f(this.clone(), cx).await {
6516 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6517 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6518 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6519 };
6520
6521 this.update(cx, |this, _| {
6522 let mut edits = Vec::with_capacity(ids.len());
6523 for (id, entry) in ids {
6524 if let Some(mut ops) = this
6525 .pending_ops
6526 .get(&PathKey(entry.as_ref().clone()), ())
6527 .cloned()
6528 {
6529 if let Some(op) = ops.op_by_id_mut(id) {
6530 op.job_status = job_status;
6531 }
6532 edits.push(sum_tree::Edit::Insert(ops));
6533 }
6534 }
6535 this.pending_ops.edit(edits, ());
6536 })?;
6537
6538 result
6539 })
6540 }
6541
6542 fn new_pending_ops_for_paths(
6543 &mut self,
6544 paths: Vec<RepoPath>,
6545 git_status: pending_op::GitStatus,
6546 ) -> Vec<(PendingOpId, RepoPath)> {
6547 let mut edits = Vec::with_capacity(paths.len());
6548 let mut ids = Vec::with_capacity(paths.len());
6549 for path in paths {
6550 let mut ops = self
6551 .pending_ops
6552 .get(&PathKey(path.as_ref().clone()), ())
6553 .cloned()
6554 .unwrap_or_else(|| PendingOps::new(&path));
6555 let id = ops.max_id() + 1;
6556 ops.ops.push(PendingOp {
6557 id,
6558 git_status,
6559 job_status: pending_op::JobStatus::Running,
6560 });
6561 edits.push(sum_tree::Edit::Insert(ops));
6562 ids.push((id, path));
6563 }
6564 self.pending_ops.edit(edits, ());
6565 ids
6566 }
6567 pub fn default_remote_url(&self) -> Option<String> {
6568 self.remote_upstream_url
6569 .clone()
6570 .or(self.remote_origin_url.clone())
6571 }
6572}
6573
6574fn get_permalink_in_rust_registry_src(
6575 provider_registry: Arc<GitHostingProviderRegistry>,
6576 path: PathBuf,
6577 selection: Range<u32>,
6578) -> Result<url::Url> {
6579 #[derive(Deserialize)]
6580 struct CargoVcsGit {
6581 sha1: String,
6582 }
6583
6584 #[derive(Deserialize)]
6585 struct CargoVcsInfo {
6586 git: CargoVcsGit,
6587 path_in_vcs: String,
6588 }
6589
6590 #[derive(Deserialize)]
6591 struct CargoPackage {
6592 repository: String,
6593 }
6594
6595 #[derive(Deserialize)]
6596 struct CargoToml {
6597 package: CargoPackage,
6598 }
6599
6600 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6601 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6602 Some((dir, json))
6603 }) else {
6604 bail!("No .cargo_vcs_info.json found in parent directories")
6605 };
6606 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6607 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6608 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6609 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6610 .context("parsing package.repository field of manifest")?;
6611 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6612 let permalink = provider.build_permalink(
6613 remote,
6614 BuildPermalinkParams::new(
6615 &cargo_vcs_info.git.sha1,
6616 &RepoPath::from_rel_path(
6617 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6618 ),
6619 Some(selection),
6620 ),
6621 );
6622 Ok(permalink)
6623}
6624
6625fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6626 let Some(blame) = blame else {
6627 return proto::BlameBufferResponse {
6628 blame_response: None,
6629 };
6630 };
6631
6632 let entries = blame
6633 .entries
6634 .into_iter()
6635 .map(|entry| proto::BlameEntry {
6636 sha: entry.sha.as_bytes().into(),
6637 start_line: entry.range.start,
6638 end_line: entry.range.end,
6639 original_line_number: entry.original_line_number,
6640 author: entry.author,
6641 author_mail: entry.author_mail,
6642 author_time: entry.author_time,
6643 author_tz: entry.author_tz,
6644 committer: entry.committer_name,
6645 committer_mail: entry.committer_email,
6646 committer_time: entry.committer_time,
6647 committer_tz: entry.committer_tz,
6648 summary: entry.summary,
6649 previous: entry.previous,
6650 filename: entry.filename,
6651 })
6652 .collect::<Vec<_>>();
6653
6654 let messages = blame
6655 .messages
6656 .into_iter()
6657 .map(|(oid, message)| proto::CommitMessage {
6658 oid: oid.as_bytes().into(),
6659 message,
6660 })
6661 .collect::<Vec<_>>();
6662
6663 proto::BlameBufferResponse {
6664 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6665 }
6666}
6667
6668fn deserialize_blame_buffer_response(
6669 response: proto::BlameBufferResponse,
6670) -> Option<git::blame::Blame> {
6671 let response = response.blame_response?;
6672 let entries = response
6673 .entries
6674 .into_iter()
6675 .filter_map(|entry| {
6676 Some(git::blame::BlameEntry {
6677 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6678 range: entry.start_line..entry.end_line,
6679 original_line_number: entry.original_line_number,
6680 committer_name: entry.committer,
6681 committer_time: entry.committer_time,
6682 committer_tz: entry.committer_tz,
6683 committer_email: entry.committer_mail,
6684 author: entry.author,
6685 author_mail: entry.author_mail,
6686 author_time: entry.author_time,
6687 author_tz: entry.author_tz,
6688 summary: entry.summary,
6689 previous: entry.previous,
6690 filename: entry.filename,
6691 })
6692 })
6693 .collect::<Vec<_>>();
6694
6695 let messages = response
6696 .messages
6697 .into_iter()
6698 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6699 .collect::<HashMap<_, _>>();
6700
6701 Some(Blame { entries, messages })
6702}
6703
6704fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6705 proto::Branch {
6706 is_head: branch.is_head,
6707 ref_name: branch.ref_name.to_string(),
6708 unix_timestamp: branch
6709 .most_recent_commit
6710 .as_ref()
6711 .map(|commit| commit.commit_timestamp as u64),
6712 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6713 ref_name: upstream.ref_name.to_string(),
6714 tracking: upstream
6715 .tracking
6716 .status()
6717 .map(|upstream| proto::UpstreamTracking {
6718 ahead: upstream.ahead as u64,
6719 behind: upstream.behind as u64,
6720 }),
6721 }),
6722 most_recent_commit: branch
6723 .most_recent_commit
6724 .as_ref()
6725 .map(|commit| proto::CommitSummary {
6726 sha: commit.sha.to_string(),
6727 subject: commit.subject.to_string(),
6728 commit_timestamp: commit.commit_timestamp,
6729 author_name: commit.author_name.to_string(),
6730 }),
6731 }
6732}
6733
6734fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6735 proto::Worktree {
6736 path: worktree.path.to_string_lossy().to_string(),
6737 ref_name: worktree.ref_name.to_string(),
6738 sha: worktree.sha.to_string(),
6739 }
6740}
6741
6742fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6743 git::repository::Worktree {
6744 path: PathBuf::from(proto.path.clone()),
6745 ref_name: proto.ref_name.clone().into(),
6746 sha: proto.sha.clone().into(),
6747 }
6748}
6749
6750fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6751 git::repository::Branch {
6752 is_head: proto.is_head,
6753 ref_name: proto.ref_name.clone().into(),
6754 upstream: proto
6755 .upstream
6756 .as_ref()
6757 .map(|upstream| git::repository::Upstream {
6758 ref_name: upstream.ref_name.to_string().into(),
6759 tracking: upstream
6760 .tracking
6761 .as_ref()
6762 .map(|tracking| {
6763 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6764 ahead: tracking.ahead as u32,
6765 behind: tracking.behind as u32,
6766 })
6767 })
6768 .unwrap_or(git::repository::UpstreamTracking::Gone),
6769 }),
6770 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6771 git::repository::CommitSummary {
6772 sha: commit.sha.to_string().into(),
6773 subject: commit.subject.to_string().into(),
6774 commit_timestamp: commit.commit_timestamp,
6775 author_name: commit.author_name.to_string().into(),
6776 has_parent: true,
6777 }
6778 }),
6779 }
6780}
6781
6782fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6783 proto::GitCommitDetails {
6784 sha: commit.sha.to_string(),
6785 message: commit.message.to_string(),
6786 commit_timestamp: commit.commit_timestamp,
6787 author_email: commit.author_email.to_string(),
6788 author_name: commit.author_name.to_string(),
6789 }
6790}
6791
6792fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6793 CommitDetails {
6794 sha: proto.sha.clone().into(),
6795 message: proto.message.clone().into(),
6796 commit_timestamp: proto.commit_timestamp,
6797 author_email: proto.author_email.clone().into(),
6798 author_name: proto.author_name.clone().into(),
6799 }
6800}
6801
6802async fn compute_snapshot(
6803 id: RepositoryId,
6804 work_directory_abs_path: Arc<Path>,
6805 prev_snapshot: RepositorySnapshot,
6806 backend: Arc<dyn GitRepository>,
6807) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6808 let mut events = Vec::new();
6809 let branches = backend.branches().await?;
6810 let branch = branches.into_iter().find(|branch| branch.is_head);
6811
6812 // Useful when branch is None in detached head state
6813 let head_commit = match backend.head_sha().await {
6814 Some(head_sha) => backend.show(head_sha).await.log_err(),
6815 None => None,
6816 };
6817
6818 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> = if head_commit.is_some() {
6819 backend.diff_stat(&[])
6820 } else {
6821 future::ready(Ok(status::GitDiffStat {
6822 entries: Arc::default(),
6823 }))
6824 .boxed()
6825 };
6826 let (statuses, diff_stats) = futures::future::try_join(
6827 backend.status(&[RepoPath::from_rel_path(
6828 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6829 )]),
6830 diff_stat_future,
6831 )
6832 .await?;
6833
6834 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
6835 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
6836 let stash_entries = backend.stash_entries().await?;
6837 let mut conflicted_paths = Vec::new();
6838 let statuses_by_path = SumTree::from_iter(
6839 statuses.entries.iter().map(|(repo_path, status)| {
6840 if status.is_conflicted() {
6841 conflicted_paths.push(repo_path.clone());
6842 }
6843 StatusEntry {
6844 repo_path: repo_path.clone(),
6845 status: *status,
6846 diff_stat: diff_stat_map.get(repo_path).copied(),
6847 }
6848 }),
6849 (),
6850 );
6851 let mut merge_details = prev_snapshot.merge;
6852 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
6853 log::debug!("new merge details: {merge_details:?}");
6854
6855 if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path {
6856 events.push(RepositoryEvent::StatusesChanged)
6857 }
6858
6859 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6860 events.push(RepositoryEvent::BranchChanged);
6861 }
6862
6863 let remote_origin_url = backend.remote_url("origin").await;
6864 let remote_upstream_url = backend.remote_url("upstream").await;
6865
6866 let snapshot = RepositorySnapshot {
6867 id,
6868 statuses_by_path,
6869 work_directory_abs_path,
6870 original_repo_abs_path: prev_snapshot.original_repo_abs_path,
6871 path_style: prev_snapshot.path_style,
6872 scan_id: prev_snapshot.scan_id + 1,
6873 branch,
6874 head_commit,
6875 merge: merge_details,
6876 remote_origin_url,
6877 remote_upstream_url,
6878 stash_entries,
6879 };
6880
6881 Ok((snapshot, events))
6882}
6883
6884fn status_from_proto(
6885 simple_status: i32,
6886 status: Option<proto::GitFileStatus>,
6887) -> anyhow::Result<FileStatus> {
6888 use proto::git_file_status::Variant;
6889
6890 let Some(variant) = status.and_then(|status| status.variant) else {
6891 let code = proto::GitStatus::from_i32(simple_status)
6892 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6893 let result = match code {
6894 proto::GitStatus::Added => TrackedStatus {
6895 worktree_status: StatusCode::Added,
6896 index_status: StatusCode::Unmodified,
6897 }
6898 .into(),
6899 proto::GitStatus::Modified => TrackedStatus {
6900 worktree_status: StatusCode::Modified,
6901 index_status: StatusCode::Unmodified,
6902 }
6903 .into(),
6904 proto::GitStatus::Conflict => UnmergedStatus {
6905 first_head: UnmergedStatusCode::Updated,
6906 second_head: UnmergedStatusCode::Updated,
6907 }
6908 .into(),
6909 proto::GitStatus::Deleted => TrackedStatus {
6910 worktree_status: StatusCode::Deleted,
6911 index_status: StatusCode::Unmodified,
6912 }
6913 .into(),
6914 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6915 };
6916 return Ok(result);
6917 };
6918
6919 let result = match variant {
6920 Variant::Untracked(_) => FileStatus::Untracked,
6921 Variant::Ignored(_) => FileStatus::Ignored,
6922 Variant::Unmerged(unmerged) => {
6923 let [first_head, second_head] =
6924 [unmerged.first_head, unmerged.second_head].map(|head| {
6925 let code = proto::GitStatus::from_i32(head)
6926 .with_context(|| format!("Invalid git status code: {head}"))?;
6927 let result = match code {
6928 proto::GitStatus::Added => UnmergedStatusCode::Added,
6929 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6930 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6931 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6932 };
6933 Ok(result)
6934 });
6935 let [first_head, second_head] = [first_head?, second_head?];
6936 UnmergedStatus {
6937 first_head,
6938 second_head,
6939 }
6940 .into()
6941 }
6942 Variant::Tracked(tracked) => {
6943 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6944 .map(|status| {
6945 let code = proto::GitStatus::from_i32(status)
6946 .with_context(|| format!("Invalid git status code: {status}"))?;
6947 let result = match code {
6948 proto::GitStatus::Modified => StatusCode::Modified,
6949 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6950 proto::GitStatus::Added => StatusCode::Added,
6951 proto::GitStatus::Deleted => StatusCode::Deleted,
6952 proto::GitStatus::Renamed => StatusCode::Renamed,
6953 proto::GitStatus::Copied => StatusCode::Copied,
6954 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6955 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6956 };
6957 Ok(result)
6958 });
6959 let [index_status, worktree_status] = [index_status?, worktree_status?];
6960 TrackedStatus {
6961 index_status,
6962 worktree_status,
6963 }
6964 .into()
6965 }
6966 };
6967 Ok(result)
6968}
6969
6970fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6971 use proto::git_file_status::{Tracked, Unmerged, Variant};
6972
6973 let variant = match status {
6974 FileStatus::Untracked => Variant::Untracked(Default::default()),
6975 FileStatus::Ignored => Variant::Ignored(Default::default()),
6976 FileStatus::Unmerged(UnmergedStatus {
6977 first_head,
6978 second_head,
6979 }) => Variant::Unmerged(Unmerged {
6980 first_head: unmerged_status_to_proto(first_head),
6981 second_head: unmerged_status_to_proto(second_head),
6982 }),
6983 FileStatus::Tracked(TrackedStatus {
6984 index_status,
6985 worktree_status,
6986 }) => Variant::Tracked(Tracked {
6987 index_status: tracked_status_to_proto(index_status),
6988 worktree_status: tracked_status_to_proto(worktree_status),
6989 }),
6990 };
6991 proto::GitFileStatus {
6992 variant: Some(variant),
6993 }
6994}
6995
6996fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6997 match code {
6998 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6999 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7000 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7001 }
7002}
7003
7004fn tracked_status_to_proto(code: StatusCode) -> i32 {
7005 match code {
7006 StatusCode::Added => proto::GitStatus::Added as _,
7007 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7008 StatusCode::Modified => proto::GitStatus::Modified as _,
7009 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7010 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7011 StatusCode::Copied => proto::GitStatus::Copied as _,
7012 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7013 }
7014}