1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
34 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
35 UpstreamTrackingStatus, Worktree as GitWorktree,
36 },
37 stash::{GitStash, StashEntry},
38 status::{
39 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
40 UnmergedStatus, UnmergedStatusCode,
41 },
42};
43use gpui::{
44 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
45 WeakEntity,
46};
47use language::{
48 Buffer, BufferEvent, Language, LanguageRegistry,
49 proto::{deserialize_version, serialize_version},
50};
51use parking_lot::Mutex;
52use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
53use postage::stream::Stream as _;
54use rpc::{
55 AnyProtoClient, TypedEnvelope,
56 proto::{self, git_reset, split_repository_update},
57};
58use serde::Deserialize;
59use settings::WorktreeId;
60use smol::future::yield_now;
61use std::{
62 cmp::Ordering,
63 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
64 future::Future,
65 mem,
66 ops::Range,
67 path::{Path, PathBuf},
68 str::FromStr,
69 sync::{
70 Arc,
71 atomic::{self, AtomicU64},
72 },
73 time::Instant,
74};
75use sum_tree::{Edit, SumTree, TreeMap};
76use task::Shell;
77use text::{Bias, BufferId};
78use util::{
79 ResultExt, debug_panic,
80 paths::{PathStyle, SanitizedPath},
81 post_inc,
82 rel_path::RelPath,
83};
84use worktree::{
85 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
86 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
87};
88use zeroize::Zeroize;
89
90pub struct GitStore {
91 state: GitStoreState,
92 buffer_store: Entity<BufferStore>,
93 worktree_store: Entity<WorktreeStore>,
94 repositories: HashMap<RepositoryId, Entity<Repository>>,
95 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
96 active_repo_id: Option<RepositoryId>,
97 #[allow(clippy::type_complexity)]
98 loading_diffs:
99 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
100 diffs: HashMap<BufferId, Entity<BufferGitState>>,
101 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
102 _subscriptions: Vec<Subscription>,
103}
104
105#[derive(Default)]
106struct SharedDiffs {
107 unstaged: Option<Entity<BufferDiff>>,
108 uncommitted: Option<Entity<BufferDiff>>,
109}
110
111struct BufferGitState {
112 unstaged_diff: Option<WeakEntity<BufferDiff>>,
113 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
114 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
115 conflict_set: Option<WeakEntity<ConflictSet>>,
116 recalculate_diff_task: Option<Task<Result<()>>>,
117 reparse_conflict_markers_task: Option<Task<Result<()>>>,
118 language: Option<Arc<Language>>,
119 language_registry: Option<Arc<LanguageRegistry>>,
120 conflict_updated_futures: Vec<oneshot::Sender<()>>,
121 recalculating_tx: postage::watch::Sender<bool>,
122
123 /// These operation counts are used to ensure that head and index text
124 /// values read from the git repository are up-to-date with any hunk staging
125 /// operations that have been performed on the BufferDiff.
126 ///
127 /// The operation count is incremented immediately when the user initiates a
128 /// hunk stage/unstage operation. Then, upon finishing writing the new index
129 /// text do disk, the `operation count as of write` is updated to reflect
130 /// the operation count that prompted the write.
131 hunk_staging_operation_count: usize,
132 hunk_staging_operation_count_as_of_write: usize,
133
134 head_text: Option<Arc<str>>,
135 index_text: Option<Arc<str>>,
136 oid_texts: HashMap<git::Oid, Arc<str>>,
137 head_changed: bool,
138 index_changed: bool,
139 language_changed: bool,
140}
141
142#[derive(Clone, Debug)]
143enum DiffBasesChange {
144 SetIndex(Option<String>),
145 SetHead(Option<String>),
146 SetEach {
147 index: Option<String>,
148 head: Option<String>,
149 },
150 SetBoth(Option<String>),
151}
152
153#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
154enum DiffKind {
155 Unstaged,
156 Uncommitted,
157 SinceOid(Option<git::Oid>),
158}
159
160enum GitStoreState {
161 Local {
162 next_repository_id: Arc<AtomicU64>,
163 downstream: Option<LocalDownstreamState>,
164 project_environment: Entity<ProjectEnvironment>,
165 fs: Arc<dyn Fs>,
166 },
167 Remote {
168 upstream_client: AnyProtoClient,
169 upstream_project_id: u64,
170 downstream: Option<(AnyProtoClient, ProjectId)>,
171 },
172}
173
174enum DownstreamUpdate {
175 UpdateRepository(RepositorySnapshot),
176 RemoveRepository(RepositoryId),
177}
178
179struct LocalDownstreamState {
180 client: AnyProtoClient,
181 project_id: ProjectId,
182 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
183 _task: Task<Result<()>>,
184}
185
186#[derive(Clone, Debug)]
187pub struct GitStoreCheckpoint {
188 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
189}
190
191#[derive(Clone, Debug, PartialEq, Eq)]
192pub struct StatusEntry {
193 pub repo_path: RepoPath,
194 pub status: FileStatus,
195}
196
197impl StatusEntry {
198 fn to_proto(&self) -> proto::StatusEntry {
199 let simple_status = match self.status {
200 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
201 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
202 FileStatus::Tracked(TrackedStatus {
203 index_status,
204 worktree_status,
205 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
206 worktree_status
207 } else {
208 index_status
209 }),
210 };
211
212 proto::StatusEntry {
213 repo_path: self.repo_path.to_proto(),
214 simple_status,
215 status: Some(status_to_proto(self.status)),
216 }
217 }
218}
219
220impl TryFrom<proto::StatusEntry> for StatusEntry {
221 type Error = anyhow::Error;
222
223 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
224 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
225 let status = status_from_proto(value.simple_status, value.status)?;
226 Ok(Self { repo_path, status })
227 }
228}
229
230impl sum_tree::Item for StatusEntry {
231 type Summary = PathSummary<GitSummary>;
232
233 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
234 PathSummary {
235 max_path: self.repo_path.as_ref().clone(),
236 item_summary: self.status.summary(),
237 }
238 }
239}
240
241impl sum_tree::KeyedItem for StatusEntry {
242 type Key = PathKey;
243
244 fn key(&self) -> Self::Key {
245 PathKey(self.repo_path.as_ref().clone())
246 }
247}
248
249#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
250pub struct RepositoryId(pub u64);
251
252#[derive(Clone, Debug, Default, PartialEq, Eq)]
253pub struct MergeDetails {
254 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
255 pub message: Option<SharedString>,
256}
257
258#[derive(Clone)]
259pub enum CommitDataState {
260 Loading,
261 Loaded(Arc<GraphCommitData>),
262}
263
264#[derive(Clone, Debug, PartialEq, Eq)]
265pub struct RepositorySnapshot {
266 pub id: RepositoryId,
267 pub statuses_by_path: SumTree<StatusEntry>,
268 pub work_directory_abs_path: Arc<Path>,
269 /// The working directory of the original repository. For a normal
270 /// checkout this equals `work_directory_abs_path`. For a git worktree
271 /// checkout, this is the original repo's working directory — used to
272 /// anchor new worktree creation so they don't nest.
273 pub original_repo_abs_path: Arc<Path>,
274 pub path_style: PathStyle,
275 pub branch: Option<Branch>,
276 pub head_commit: Option<CommitDetails>,
277 pub scan_id: u64,
278 pub merge: MergeDetails,
279 pub remote_origin_url: Option<String>,
280 pub remote_upstream_url: Option<String>,
281 pub stash_entries: GitStash,
282}
283
284type JobId = u64;
285
286#[derive(Clone, Debug, PartialEq, Eq)]
287pub struct JobInfo {
288 pub start: Instant,
289 pub message: SharedString,
290}
291
292struct GraphCommitDataHandler {
293 _task: Task<()>,
294 commit_data_request: smol::channel::Sender<Oid>,
295}
296
297enum GraphCommitHandlerState {
298 Starting,
299 Open(GraphCommitDataHandler),
300 Closed,
301}
302
303pub struct InitialGitGraphData {
304 fetch_task: Task<()>,
305 pub error: Option<SharedString>,
306 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
307 pub commit_oid_to_index: HashMap<Oid, usize>,
308}
309
310pub struct GraphDataResponse<'a> {
311 pub commits: &'a [Arc<InitialGraphCommitData>],
312 pub is_loading: bool,
313 pub error: Option<SharedString>,
314}
315
316pub struct Repository {
317 this: WeakEntity<Self>,
318 snapshot: RepositorySnapshot,
319 commit_message_buffer: Option<Entity<Buffer>>,
320 git_store: WeakEntity<GitStore>,
321 // For a local repository, holds paths that have had worktree events since the last status scan completed,
322 // and that should be examined during the next status scan.
323 paths_needing_status_update: Vec<Vec<RepoPath>>,
324 job_sender: mpsc::UnboundedSender<GitJob>,
325 active_jobs: HashMap<JobId, JobInfo>,
326 pending_ops: SumTree<PendingOps>,
327 job_id: JobId,
328 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
329 latest_askpass_id: u64,
330 repository_state: Shared<Task<Result<RepositoryState, String>>>,
331 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
332 graph_commit_data_handler: GraphCommitHandlerState,
333 commit_data: HashMap<Oid, CommitDataState>,
334}
335
336impl std::ops::Deref for Repository {
337 type Target = RepositorySnapshot;
338
339 fn deref(&self) -> &Self::Target {
340 &self.snapshot
341 }
342}
343
344#[derive(Clone)]
345pub struct LocalRepositoryState {
346 pub fs: Arc<dyn Fs>,
347 pub backend: Arc<dyn GitRepository>,
348 pub environment: Arc<HashMap<String, String>>,
349}
350
351impl LocalRepositoryState {
352 async fn new(
353 work_directory_abs_path: Arc<Path>,
354 dot_git_abs_path: Arc<Path>,
355 project_environment: WeakEntity<ProjectEnvironment>,
356 fs: Arc<dyn Fs>,
357 cx: &mut AsyncApp,
358 ) -> anyhow::Result<Self> {
359 let environment = project_environment
360 .update(cx, |project_environment, cx| {
361 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
362 })?
363 .await
364 .unwrap_or_else(|| {
365 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
366 HashMap::default()
367 });
368 let search_paths = environment.get("PATH").map(|val| val.to_owned());
369 let backend = cx
370 .background_spawn({
371 let fs = fs.clone();
372 async move {
373 let system_git_binary_path = search_paths
374 .and_then(|search_paths| {
375 which::which_in("git", Some(search_paths), &work_directory_abs_path)
376 .ok()
377 })
378 .or_else(|| which::which("git").ok());
379 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
380 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
381 }
382 })
383 .await?;
384 Ok(LocalRepositoryState {
385 backend,
386 environment: Arc::new(environment),
387 fs,
388 })
389 }
390}
391
392#[derive(Clone)]
393pub struct RemoteRepositoryState {
394 pub project_id: ProjectId,
395 pub client: AnyProtoClient,
396}
397
398#[derive(Clone)]
399pub enum RepositoryState {
400 Local(LocalRepositoryState),
401 Remote(RemoteRepositoryState),
402}
403
404#[derive(Clone, Debug, PartialEq, Eq)]
405pub enum GitGraphEvent {
406 CountUpdated(usize),
407 FullyLoaded,
408 LoadingError,
409}
410
411#[derive(Clone, Debug, PartialEq, Eq)]
412pub enum RepositoryEvent {
413 StatusesChanged,
414 BranchChanged,
415 StashEntriesChanged,
416 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
417 GraphEvent((LogSource, LogOrder), GitGraphEvent),
418}
419
420#[derive(Clone, Debug)]
421pub struct JobsUpdated;
422
423#[derive(Debug)]
424pub enum GitStoreEvent {
425 ActiveRepositoryChanged(Option<RepositoryId>),
426 /// Bool is true when the repository that's updated is the active repository
427 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
428 RepositoryAdded,
429 RepositoryRemoved(RepositoryId),
430 IndexWriteError(anyhow::Error),
431 JobsUpdated,
432 ConflictsUpdated,
433}
434
435impl EventEmitter<RepositoryEvent> for Repository {}
436impl EventEmitter<JobsUpdated> for Repository {}
437impl EventEmitter<GitStoreEvent> for GitStore {}
438
439pub struct GitJob {
440 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
441 key: Option<GitJobKey>,
442}
443
444#[derive(PartialEq, Eq)]
445enum GitJobKey {
446 WriteIndex(Vec<RepoPath>),
447 ReloadBufferDiffBases,
448 RefreshStatuses,
449 ReloadGitState,
450}
451
452impl GitStore {
453 pub fn local(
454 worktree_store: &Entity<WorktreeStore>,
455 buffer_store: Entity<BufferStore>,
456 environment: Entity<ProjectEnvironment>,
457 fs: Arc<dyn Fs>,
458 cx: &mut Context<Self>,
459 ) -> Self {
460 Self::new(
461 worktree_store.clone(),
462 buffer_store,
463 GitStoreState::Local {
464 next_repository_id: Arc::new(AtomicU64::new(1)),
465 downstream: None,
466 project_environment: environment,
467 fs,
468 },
469 cx,
470 )
471 }
472
473 pub fn remote(
474 worktree_store: &Entity<WorktreeStore>,
475 buffer_store: Entity<BufferStore>,
476 upstream_client: AnyProtoClient,
477 project_id: u64,
478 cx: &mut Context<Self>,
479 ) -> Self {
480 Self::new(
481 worktree_store.clone(),
482 buffer_store,
483 GitStoreState::Remote {
484 upstream_client,
485 upstream_project_id: project_id,
486 downstream: None,
487 },
488 cx,
489 )
490 }
491
492 fn new(
493 worktree_store: Entity<WorktreeStore>,
494 buffer_store: Entity<BufferStore>,
495 state: GitStoreState,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 let _subscriptions = vec![
499 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
500 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
501 ];
502
503 GitStore {
504 state,
505 buffer_store,
506 worktree_store,
507 repositories: HashMap::default(),
508 worktree_ids: HashMap::default(),
509 active_repo_id: None,
510 _subscriptions,
511 loading_diffs: HashMap::default(),
512 shared_diffs: HashMap::default(),
513 diffs: HashMap::default(),
514 }
515 }
516
517 pub fn init(client: &AnyProtoClient) {
518 client.add_entity_request_handler(Self::handle_get_remotes);
519 client.add_entity_request_handler(Self::handle_get_branches);
520 client.add_entity_request_handler(Self::handle_get_default_branch);
521 client.add_entity_request_handler(Self::handle_change_branch);
522 client.add_entity_request_handler(Self::handle_create_branch);
523 client.add_entity_request_handler(Self::handle_rename_branch);
524 client.add_entity_request_handler(Self::handle_create_remote);
525 client.add_entity_request_handler(Self::handle_remove_remote);
526 client.add_entity_request_handler(Self::handle_delete_branch);
527 client.add_entity_request_handler(Self::handle_git_init);
528 client.add_entity_request_handler(Self::handle_push);
529 client.add_entity_request_handler(Self::handle_pull);
530 client.add_entity_request_handler(Self::handle_fetch);
531 client.add_entity_request_handler(Self::handle_stage);
532 client.add_entity_request_handler(Self::handle_unstage);
533 client.add_entity_request_handler(Self::handle_stash);
534 client.add_entity_request_handler(Self::handle_stash_pop);
535 client.add_entity_request_handler(Self::handle_stash_apply);
536 client.add_entity_request_handler(Self::handle_stash_drop);
537 client.add_entity_request_handler(Self::handle_commit);
538 client.add_entity_request_handler(Self::handle_run_hook);
539 client.add_entity_request_handler(Self::handle_reset);
540 client.add_entity_request_handler(Self::handle_show);
541 client.add_entity_request_handler(Self::handle_load_commit_diff);
542 client.add_entity_request_handler(Self::handle_file_history);
543 client.add_entity_request_handler(Self::handle_checkout_files);
544 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
545 client.add_entity_request_handler(Self::handle_set_index_text);
546 client.add_entity_request_handler(Self::handle_askpass);
547 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
548 client.add_entity_request_handler(Self::handle_git_diff);
549 client.add_entity_request_handler(Self::handle_git_diff_stat);
550 client.add_entity_request_handler(Self::handle_tree_diff);
551 client.add_entity_request_handler(Self::handle_get_blob_content);
552 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
553 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
554 client.add_entity_message_handler(Self::handle_update_diff_bases);
555 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
556 client.add_entity_request_handler(Self::handle_blame_buffer);
557 client.add_entity_message_handler(Self::handle_update_repository);
558 client.add_entity_message_handler(Self::handle_remove_repository);
559 client.add_entity_request_handler(Self::handle_git_clone);
560 client.add_entity_request_handler(Self::handle_get_worktrees);
561 client.add_entity_request_handler(Self::handle_create_worktree);
562 }
563
564 pub fn is_local(&self) -> bool {
565 matches!(self.state, GitStoreState::Local { .. })
566 }
567 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
568 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
569 let id = repo.read(cx).id;
570 if self.active_repo_id != Some(id) {
571 self.active_repo_id = Some(id);
572 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
573 }
574 }
575 }
576
577 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
578 match &mut self.state {
579 GitStoreState::Remote {
580 downstream: downstream_client,
581 ..
582 } => {
583 for repo in self.repositories.values() {
584 let update = repo.read(cx).snapshot.initial_update(project_id);
585 for update in split_repository_update(update) {
586 client.send(update).log_err();
587 }
588 }
589 *downstream_client = Some((client, ProjectId(project_id)));
590 }
591 GitStoreState::Local {
592 downstream: downstream_client,
593 ..
594 } => {
595 let mut snapshots = HashMap::default();
596 let (updates_tx, mut updates_rx) = mpsc::unbounded();
597 for repo in self.repositories.values() {
598 updates_tx
599 .unbounded_send(DownstreamUpdate::UpdateRepository(
600 repo.read(cx).snapshot.clone(),
601 ))
602 .ok();
603 }
604 *downstream_client = Some(LocalDownstreamState {
605 client: client.clone(),
606 project_id: ProjectId(project_id),
607 updates_tx,
608 _task: cx.spawn(async move |this, cx| {
609 cx.background_spawn(async move {
610 while let Some(update) = updates_rx.next().await {
611 match update {
612 DownstreamUpdate::UpdateRepository(snapshot) => {
613 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
614 {
615 let update =
616 snapshot.build_update(old_snapshot, project_id);
617 *old_snapshot = snapshot;
618 for update in split_repository_update(update) {
619 client.send(update)?;
620 }
621 } else {
622 let update = snapshot.initial_update(project_id);
623 for update in split_repository_update(update) {
624 client.send(update)?;
625 }
626 snapshots.insert(snapshot.id, snapshot);
627 }
628 }
629 DownstreamUpdate::RemoveRepository(id) => {
630 client.send(proto::RemoveRepository {
631 project_id,
632 id: id.to_proto(),
633 })?;
634 }
635 }
636 }
637 anyhow::Ok(())
638 })
639 .await
640 .ok();
641 this.update(cx, |this, _| {
642 if let GitStoreState::Local {
643 downstream: downstream_client,
644 ..
645 } = &mut this.state
646 {
647 downstream_client.take();
648 } else {
649 unreachable!("unshared called on remote store");
650 }
651 })
652 }),
653 });
654 }
655 }
656 }
657
658 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
659 match &mut self.state {
660 GitStoreState::Local {
661 downstream: downstream_client,
662 ..
663 } => {
664 downstream_client.take();
665 }
666 GitStoreState::Remote {
667 downstream: downstream_client,
668 ..
669 } => {
670 downstream_client.take();
671 }
672 }
673 self.shared_diffs.clear();
674 }
675
676 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
677 self.shared_diffs.remove(peer_id);
678 }
679
680 pub fn active_repository(&self) -> Option<Entity<Repository>> {
681 self.active_repo_id
682 .as_ref()
683 .map(|id| self.repositories[id].clone())
684 }
685
686 pub fn open_unstaged_diff(
687 &mut self,
688 buffer: Entity<Buffer>,
689 cx: &mut Context<Self>,
690 ) -> Task<Result<Entity<BufferDiff>>> {
691 let buffer_id = buffer.read(cx).remote_id();
692 if let Some(diff_state) = self.diffs.get(&buffer_id)
693 && let Some(unstaged_diff) = diff_state
694 .read(cx)
695 .unstaged_diff
696 .as_ref()
697 .and_then(|weak| weak.upgrade())
698 {
699 if let Some(task) =
700 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
701 {
702 return cx.background_executor().spawn(async move {
703 task.await;
704 Ok(unstaged_diff)
705 });
706 }
707 return Task::ready(Ok(unstaged_diff));
708 }
709
710 let Some((repo, repo_path)) =
711 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
712 else {
713 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
714 };
715
716 let task = self
717 .loading_diffs
718 .entry((buffer_id, DiffKind::Unstaged))
719 .or_insert_with(|| {
720 let staged_text = repo.update(cx, |repo, cx| {
721 repo.load_staged_text(buffer_id, repo_path, cx)
722 });
723 cx.spawn(async move |this, cx| {
724 Self::open_diff_internal(
725 this,
726 DiffKind::Unstaged,
727 staged_text.await.map(DiffBasesChange::SetIndex),
728 buffer,
729 cx,
730 )
731 .await
732 .map_err(Arc::new)
733 })
734 .shared()
735 })
736 .clone();
737
738 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
739 }
740
741 pub fn open_diff_since(
742 &mut self,
743 oid: Option<git::Oid>,
744 buffer: Entity<Buffer>,
745 repo: Entity<Repository>,
746 cx: &mut Context<Self>,
747 ) -> Task<Result<Entity<BufferDiff>>> {
748 let buffer_id = buffer.read(cx).remote_id();
749
750 if let Some(diff_state) = self.diffs.get(&buffer_id)
751 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
752 {
753 if let Some(task) =
754 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
755 {
756 return cx.background_executor().spawn(async move {
757 task.await;
758 Ok(oid_diff)
759 });
760 }
761 return Task::ready(Ok(oid_diff));
762 }
763
764 let diff_kind = DiffKind::SinceOid(oid);
765 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
766 let task = task.clone();
767 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
768 }
769
770 let task = cx
771 .spawn(async move |this, cx| {
772 let result: Result<Entity<BufferDiff>> = async {
773 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
774 let language_registry =
775 buffer.update(cx, |buffer, _| buffer.language_registry());
776 let content: Option<Arc<str>> = match oid {
777 None => None,
778 Some(oid) => Some(
779 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
780 .await?
781 .into(),
782 ),
783 };
784 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
785
786 buffer_diff
787 .update(cx, |buffer_diff, cx| {
788 buffer_diff.language_changed(
789 buffer_snapshot.language().cloned(),
790 language_registry,
791 cx,
792 );
793 buffer_diff.set_base_text(
794 content.clone(),
795 buffer_snapshot.language().cloned(),
796 buffer_snapshot.text,
797 cx,
798 )
799 })
800 .await?;
801 let unstaged_diff = this
802 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
803 .await?;
804 buffer_diff.update(cx, |buffer_diff, _| {
805 buffer_diff.set_secondary_diff(unstaged_diff);
806 });
807
808 this.update(cx, |this, cx| {
809 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
810 .detach();
811
812 this.loading_diffs.remove(&(buffer_id, diff_kind));
813
814 let git_store = cx.weak_entity();
815 let diff_state = this
816 .diffs
817 .entry(buffer_id)
818 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
819
820 diff_state.update(cx, |state, _| {
821 if let Some(oid) = oid {
822 if let Some(content) = content {
823 state.oid_texts.insert(oid, content);
824 }
825 }
826 state.oid_diffs.insert(oid, buffer_diff.downgrade());
827 });
828 })?;
829
830 Ok(buffer_diff)
831 }
832 .await;
833 result.map_err(Arc::new)
834 })
835 .shared();
836
837 self.loading_diffs
838 .insert((buffer_id, diff_kind), task.clone());
839 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
840 }
841
842 #[ztracing::instrument(skip_all)]
843 pub fn open_uncommitted_diff(
844 &mut self,
845 buffer: Entity<Buffer>,
846 cx: &mut Context<Self>,
847 ) -> Task<Result<Entity<BufferDiff>>> {
848 let buffer_id = buffer.read(cx).remote_id();
849
850 if let Some(diff_state) = self.diffs.get(&buffer_id)
851 && let Some(uncommitted_diff) = diff_state
852 .read(cx)
853 .uncommitted_diff
854 .as_ref()
855 .and_then(|weak| weak.upgrade())
856 {
857 if let Some(task) =
858 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
859 {
860 return cx.background_executor().spawn(async move {
861 task.await;
862 Ok(uncommitted_diff)
863 });
864 }
865 return Task::ready(Ok(uncommitted_diff));
866 }
867
868 let Some((repo, repo_path)) =
869 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
870 else {
871 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
872 };
873
874 let task = self
875 .loading_diffs
876 .entry((buffer_id, DiffKind::Uncommitted))
877 .or_insert_with(|| {
878 let changes = repo.update(cx, |repo, cx| {
879 repo.load_committed_text(buffer_id, repo_path, cx)
880 });
881
882 // todo(lw): hot foreground spawn
883 cx.spawn(async move |this, cx| {
884 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
885 .await
886 .map_err(Arc::new)
887 })
888 .shared()
889 })
890 .clone();
891
892 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
893 }
894
895 #[ztracing::instrument(skip_all)]
896 async fn open_diff_internal(
897 this: WeakEntity<Self>,
898 kind: DiffKind,
899 texts: Result<DiffBasesChange>,
900 buffer_entity: Entity<Buffer>,
901 cx: &mut AsyncApp,
902 ) -> Result<Entity<BufferDiff>> {
903 let diff_bases_change = match texts {
904 Err(e) => {
905 this.update(cx, |this, cx| {
906 let buffer = buffer_entity.read(cx);
907 let buffer_id = buffer.remote_id();
908 this.loading_diffs.remove(&(buffer_id, kind));
909 })?;
910 return Err(e);
911 }
912 Ok(change) => change,
913 };
914
915 this.update(cx, |this, cx| {
916 let buffer = buffer_entity.read(cx);
917 let buffer_id = buffer.remote_id();
918 let language = buffer.language().cloned();
919 let language_registry = buffer.language_registry();
920 let text_snapshot = buffer.text_snapshot();
921 this.loading_diffs.remove(&(buffer_id, kind));
922
923 let git_store = cx.weak_entity();
924 let diff_state = this
925 .diffs
926 .entry(buffer_id)
927 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
928
929 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
930
931 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
932 diff_state.update(cx, |diff_state, cx| {
933 diff_state.language_changed = true;
934 diff_state.language = language;
935 diff_state.language_registry = language_registry;
936
937 match kind {
938 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
939 DiffKind::Uncommitted => {
940 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
941 diff
942 } else {
943 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
944 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
945 unstaged_diff
946 };
947
948 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
949 diff_state.uncommitted_diff = Some(diff.downgrade())
950 }
951 DiffKind::SinceOid(_) => {
952 unreachable!("open_diff_internal is not used for OID diffs")
953 }
954 }
955
956 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
957 let rx = diff_state.wait_for_recalculation();
958
959 anyhow::Ok(async move {
960 if let Some(rx) = rx {
961 rx.await;
962 }
963 Ok(diff)
964 })
965 })
966 })??
967 .await
968 }
969
970 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
971 let diff_state = self.diffs.get(&buffer_id)?;
972 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
973 }
974
975 pub fn get_uncommitted_diff(
976 &self,
977 buffer_id: BufferId,
978 cx: &App,
979 ) -> Option<Entity<BufferDiff>> {
980 let diff_state = self.diffs.get(&buffer_id)?;
981 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
982 }
983
984 pub fn get_diff_since_oid(
985 &self,
986 buffer_id: BufferId,
987 oid: Option<git::Oid>,
988 cx: &App,
989 ) -> Option<Entity<BufferDiff>> {
990 let diff_state = self.diffs.get(&buffer_id)?;
991 diff_state.read(cx).oid_diff(oid)
992 }
993
994 pub fn open_conflict_set(
995 &mut self,
996 buffer: Entity<Buffer>,
997 cx: &mut Context<Self>,
998 ) -> Entity<ConflictSet> {
999 log::debug!("open conflict set");
1000 let buffer_id = buffer.read(cx).remote_id();
1001
1002 if let Some(git_state) = self.diffs.get(&buffer_id)
1003 && let Some(conflict_set) = git_state
1004 .read(cx)
1005 .conflict_set
1006 .as_ref()
1007 .and_then(|weak| weak.upgrade())
1008 {
1009 let conflict_set = conflict_set;
1010 let buffer_snapshot = buffer.read(cx).text_snapshot();
1011
1012 git_state.update(cx, |state, cx| {
1013 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1014 });
1015
1016 return conflict_set;
1017 }
1018
1019 let is_unmerged = self
1020 .repository_and_path_for_buffer_id(buffer_id, cx)
1021 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1022 let git_store = cx.weak_entity();
1023 let buffer_git_state = self
1024 .diffs
1025 .entry(buffer_id)
1026 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1027 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1028
1029 self._subscriptions
1030 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1031 cx.emit(GitStoreEvent::ConflictsUpdated);
1032 }));
1033
1034 buffer_git_state.update(cx, |state, cx| {
1035 state.conflict_set = Some(conflict_set.downgrade());
1036 let buffer_snapshot = buffer.read(cx).text_snapshot();
1037 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1038 });
1039
1040 conflict_set
1041 }
1042
1043 pub fn project_path_git_status(
1044 &self,
1045 project_path: &ProjectPath,
1046 cx: &App,
1047 ) -> Option<FileStatus> {
1048 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1049 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1050 }
1051
1052 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1053 let mut work_directory_abs_paths = Vec::new();
1054 let mut checkpoints = Vec::new();
1055 for repository in self.repositories.values() {
1056 repository.update(cx, |repository, _| {
1057 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1058 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1059 });
1060 }
1061
1062 cx.background_executor().spawn(async move {
1063 let checkpoints = future::try_join_all(checkpoints).await?;
1064 Ok(GitStoreCheckpoint {
1065 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1066 .into_iter()
1067 .zip(checkpoints)
1068 .collect(),
1069 })
1070 })
1071 }
1072
1073 pub fn restore_checkpoint(
1074 &self,
1075 checkpoint: GitStoreCheckpoint,
1076 cx: &mut App,
1077 ) -> Task<Result<()>> {
1078 let repositories_by_work_dir_abs_path = self
1079 .repositories
1080 .values()
1081 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1082 .collect::<HashMap<_, _>>();
1083
1084 let mut tasks = Vec::new();
1085 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1086 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1087 let restore = repository.update(cx, |repository, _| {
1088 repository.restore_checkpoint(checkpoint)
1089 });
1090 tasks.push(async move { restore.await? });
1091 }
1092 }
1093 cx.background_spawn(async move {
1094 future::try_join_all(tasks).await?;
1095 Ok(())
1096 })
1097 }
1098
1099 /// Compares two checkpoints, returning true if they are equal.
1100 pub fn compare_checkpoints(
1101 &self,
1102 left: GitStoreCheckpoint,
1103 mut right: GitStoreCheckpoint,
1104 cx: &mut App,
1105 ) -> Task<Result<bool>> {
1106 let repositories_by_work_dir_abs_path = self
1107 .repositories
1108 .values()
1109 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1110 .collect::<HashMap<_, _>>();
1111
1112 let mut tasks = Vec::new();
1113 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1114 if let Some(right_checkpoint) = right
1115 .checkpoints_by_work_dir_abs_path
1116 .remove(&work_dir_abs_path)
1117 {
1118 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1119 {
1120 let compare = repository.update(cx, |repository, _| {
1121 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1122 });
1123
1124 tasks.push(async move { compare.await? });
1125 }
1126 } else {
1127 return Task::ready(Ok(false));
1128 }
1129 }
1130 cx.background_spawn(async move {
1131 Ok(future::try_join_all(tasks)
1132 .await?
1133 .into_iter()
1134 .all(|result| result))
1135 })
1136 }
1137
1138 /// Blames a buffer.
1139 pub fn blame_buffer(
1140 &self,
1141 buffer: &Entity<Buffer>,
1142 version: Option<clock::Global>,
1143 cx: &mut Context<Self>,
1144 ) -> Task<Result<Option<Blame>>> {
1145 let buffer = buffer.read(cx);
1146 let Some((repo, repo_path)) =
1147 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1148 else {
1149 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1150 };
1151 let content = match &version {
1152 Some(version) => buffer.rope_for_version(version),
1153 None => buffer.as_rope().clone(),
1154 };
1155 let line_ending = buffer.line_ending();
1156 let version = version.unwrap_or(buffer.version());
1157 let buffer_id = buffer.remote_id();
1158
1159 let repo = repo.downgrade();
1160 cx.spawn(async move |_, cx| {
1161 let repository_state = repo
1162 .update(cx, |repo, _| repo.repository_state.clone())?
1163 .await
1164 .map_err(|err| anyhow::anyhow!(err))?;
1165 match repository_state {
1166 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1167 .blame(repo_path.clone(), content, line_ending)
1168 .await
1169 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1170 .map(Some),
1171 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1172 let response = client
1173 .request(proto::BlameBuffer {
1174 project_id: project_id.to_proto(),
1175 buffer_id: buffer_id.into(),
1176 version: serialize_version(&version),
1177 })
1178 .await?;
1179 Ok(deserialize_blame_buffer_response(response))
1180 }
1181 }
1182 })
1183 }
1184
1185 pub fn file_history(
1186 &self,
1187 repo: &Entity<Repository>,
1188 path: RepoPath,
1189 cx: &mut App,
1190 ) -> Task<Result<git::repository::FileHistory>> {
1191 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1192
1193 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1194 }
1195
1196 pub fn file_history_paginated(
1197 &self,
1198 repo: &Entity<Repository>,
1199 path: RepoPath,
1200 skip: usize,
1201 limit: Option<usize>,
1202 cx: &mut App,
1203 ) -> Task<Result<git::repository::FileHistory>> {
1204 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1205
1206 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1207 }
1208
1209 pub fn get_permalink_to_line(
1210 &self,
1211 buffer: &Entity<Buffer>,
1212 selection: Range<u32>,
1213 cx: &mut App,
1214 ) -> Task<Result<url::Url>> {
1215 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1216 return Task::ready(Err(anyhow!("buffer has no file")));
1217 };
1218
1219 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1220 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1221 cx,
1222 ) else {
1223 // If we're not in a Git repo, check whether this is a Rust source
1224 // file in the Cargo registry (presumably opened with go-to-definition
1225 // from a normal Rust file). If so, we can put together a permalink
1226 // using crate metadata.
1227 if buffer
1228 .read(cx)
1229 .language()
1230 .is_none_or(|lang| lang.name() != "Rust")
1231 {
1232 return Task::ready(Err(anyhow!("no permalink available")));
1233 }
1234 let file_path = file.worktree.read(cx).absolutize(&file.path);
1235 return cx.spawn(async move |cx| {
1236 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1237 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1238 .context("no permalink available")
1239 });
1240 };
1241
1242 let buffer_id = buffer.read(cx).remote_id();
1243 let branch = repo.read(cx).branch.clone();
1244 let remote = branch
1245 .as_ref()
1246 .and_then(|b| b.upstream.as_ref())
1247 .and_then(|b| b.remote_name())
1248 .unwrap_or("origin")
1249 .to_string();
1250
1251 let rx = repo.update(cx, |repo, _| {
1252 repo.send_job(None, move |state, cx| async move {
1253 match state {
1254 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1255 let origin_url = backend
1256 .remote_url(&remote)
1257 .await
1258 .with_context(|| format!("remote \"{remote}\" not found"))?;
1259
1260 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1261
1262 let provider_registry =
1263 cx.update(GitHostingProviderRegistry::default_global);
1264
1265 let (provider, remote) =
1266 parse_git_remote_url(provider_registry, &origin_url)
1267 .context("parsing Git remote URL")?;
1268
1269 Ok(provider.build_permalink(
1270 remote,
1271 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1272 ))
1273 }
1274 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1275 let response = client
1276 .request(proto::GetPermalinkToLine {
1277 project_id: project_id.to_proto(),
1278 buffer_id: buffer_id.into(),
1279 selection: Some(proto::Range {
1280 start: selection.start as u64,
1281 end: selection.end as u64,
1282 }),
1283 })
1284 .await?;
1285
1286 url::Url::parse(&response.permalink).context("failed to parse permalink")
1287 }
1288 }
1289 })
1290 });
1291 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1292 }
1293
1294 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1295 match &self.state {
1296 GitStoreState::Local {
1297 downstream: downstream_client,
1298 ..
1299 } => downstream_client
1300 .as_ref()
1301 .map(|state| (state.client.clone(), state.project_id)),
1302 GitStoreState::Remote {
1303 downstream: downstream_client,
1304 ..
1305 } => downstream_client.clone(),
1306 }
1307 }
1308
1309 fn upstream_client(&self) -> Option<AnyProtoClient> {
1310 match &self.state {
1311 GitStoreState::Local { .. } => None,
1312 GitStoreState::Remote {
1313 upstream_client, ..
1314 } => Some(upstream_client.clone()),
1315 }
1316 }
1317
1318 fn on_worktree_store_event(
1319 &mut self,
1320 worktree_store: Entity<WorktreeStore>,
1321 event: &WorktreeStoreEvent,
1322 cx: &mut Context<Self>,
1323 ) {
1324 let GitStoreState::Local {
1325 project_environment,
1326 downstream,
1327 next_repository_id,
1328 fs,
1329 } = &self.state
1330 else {
1331 return;
1332 };
1333
1334 match event {
1335 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1336 if let Some(worktree) = self
1337 .worktree_store
1338 .read(cx)
1339 .worktree_for_id(*worktree_id, cx)
1340 {
1341 let paths_by_git_repo =
1342 self.process_updated_entries(&worktree, updated_entries, cx);
1343 let downstream = downstream
1344 .as_ref()
1345 .map(|downstream| downstream.updates_tx.clone());
1346 cx.spawn(async move |_, cx| {
1347 let paths_by_git_repo = paths_by_git_repo.await;
1348 for (repo, paths) in paths_by_git_repo {
1349 repo.update(cx, |repo, cx| {
1350 repo.paths_changed(paths, downstream.clone(), cx);
1351 });
1352 }
1353 })
1354 .detach();
1355 }
1356 }
1357 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1358 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1359 else {
1360 return;
1361 };
1362 if !worktree.read(cx).is_visible() {
1363 log::debug!(
1364 "not adding repositories for local worktree {:?} because it's not visible",
1365 worktree.read(cx).abs_path()
1366 );
1367 return;
1368 }
1369 self.update_repositories_from_worktree(
1370 *worktree_id,
1371 project_environment.clone(),
1372 next_repository_id.clone(),
1373 downstream
1374 .as_ref()
1375 .map(|downstream| downstream.updates_tx.clone()),
1376 changed_repos.clone(),
1377 fs.clone(),
1378 cx,
1379 );
1380 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1381 }
1382 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1383 let repos_without_worktree: Vec<RepositoryId> = self
1384 .worktree_ids
1385 .iter_mut()
1386 .filter_map(|(repo_id, worktree_ids)| {
1387 worktree_ids.remove(worktree_id);
1388 if worktree_ids.is_empty() {
1389 Some(*repo_id)
1390 } else {
1391 None
1392 }
1393 })
1394 .collect();
1395 let is_active_repo_removed = repos_without_worktree
1396 .iter()
1397 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1398
1399 for repo_id in repos_without_worktree {
1400 self.repositories.remove(&repo_id);
1401 self.worktree_ids.remove(&repo_id);
1402 if let Some(updates_tx) =
1403 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1404 {
1405 updates_tx
1406 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1407 .ok();
1408 }
1409 }
1410
1411 if is_active_repo_removed {
1412 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1413 self.active_repo_id = Some(repo_id);
1414 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1415 } else {
1416 self.active_repo_id = None;
1417 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1418 }
1419 }
1420 }
1421 _ => {}
1422 }
1423 }
1424 fn on_repository_event(
1425 &mut self,
1426 repo: Entity<Repository>,
1427 event: &RepositoryEvent,
1428 cx: &mut Context<Self>,
1429 ) {
1430 let id = repo.read(cx).id;
1431 let repo_snapshot = repo.read(cx).snapshot.clone();
1432 for (buffer_id, diff) in self.diffs.iter() {
1433 if let Some((buffer_repo, repo_path)) =
1434 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1435 && buffer_repo == repo
1436 {
1437 diff.update(cx, |diff, cx| {
1438 if let Some(conflict_set) = &diff.conflict_set {
1439 let conflict_status_changed =
1440 conflict_set.update(cx, |conflict_set, cx| {
1441 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1442 conflict_set.set_has_conflict(has_conflict, cx)
1443 })?;
1444 if conflict_status_changed {
1445 let buffer_store = self.buffer_store.read(cx);
1446 if let Some(buffer) = buffer_store.get(*buffer_id) {
1447 let _ = diff
1448 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1449 }
1450 }
1451 }
1452 anyhow::Ok(())
1453 })
1454 .ok();
1455 }
1456 }
1457 cx.emit(GitStoreEvent::RepositoryUpdated(
1458 id,
1459 event.clone(),
1460 self.active_repo_id == Some(id),
1461 ))
1462 }
1463
1464 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1465 cx.emit(GitStoreEvent::JobsUpdated)
1466 }
1467
1468 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1469 fn update_repositories_from_worktree(
1470 &mut self,
1471 worktree_id: WorktreeId,
1472 project_environment: Entity<ProjectEnvironment>,
1473 next_repository_id: Arc<AtomicU64>,
1474 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1475 updated_git_repositories: UpdatedGitRepositoriesSet,
1476 fs: Arc<dyn Fs>,
1477 cx: &mut Context<Self>,
1478 ) {
1479 let mut removed_ids = Vec::new();
1480 for update in updated_git_repositories.iter() {
1481 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1482 let existing_work_directory_abs_path =
1483 repo.read(cx).work_directory_abs_path.clone();
1484 Some(&existing_work_directory_abs_path)
1485 == update.old_work_directory_abs_path.as_ref()
1486 || Some(&existing_work_directory_abs_path)
1487 == update.new_work_directory_abs_path.as_ref()
1488 }) {
1489 let repo_id = *id;
1490 if let Some(new_work_directory_abs_path) =
1491 update.new_work_directory_abs_path.clone()
1492 {
1493 self.worktree_ids
1494 .entry(repo_id)
1495 .or_insert_with(HashSet::new)
1496 .insert(worktree_id);
1497 existing.update(cx, |existing, cx| {
1498 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1499 existing.schedule_scan(updates_tx.clone(), cx);
1500 });
1501 } else {
1502 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1503 worktree_ids.remove(&worktree_id);
1504 if worktree_ids.is_empty() {
1505 removed_ids.push(repo_id);
1506 }
1507 }
1508 }
1509 } else if let UpdatedGitRepository {
1510 new_work_directory_abs_path: Some(work_directory_abs_path),
1511 dot_git_abs_path: Some(dot_git_abs_path),
1512 repository_dir_abs_path: Some(_repository_dir_abs_path),
1513 common_dir_abs_path: Some(common_dir_abs_path),
1514 ..
1515 } = update
1516 {
1517 let original_repo_abs_path: Arc<Path> =
1518 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1519 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1520 let git_store = cx.weak_entity();
1521 let repo = cx.new(|cx| {
1522 let mut repo = Repository::local(
1523 id,
1524 work_directory_abs_path.clone(),
1525 original_repo_abs_path.clone(),
1526 dot_git_abs_path.clone(),
1527 project_environment.downgrade(),
1528 fs.clone(),
1529 git_store,
1530 cx,
1531 );
1532 if let Some(updates_tx) = updates_tx.as_ref() {
1533 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1534 updates_tx
1535 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1536 .ok();
1537 }
1538 repo.schedule_scan(updates_tx.clone(), cx);
1539 repo
1540 });
1541 self._subscriptions
1542 .push(cx.subscribe(&repo, Self::on_repository_event));
1543 self._subscriptions
1544 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1545 self.repositories.insert(id, repo);
1546 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1547 cx.emit(GitStoreEvent::RepositoryAdded);
1548 self.active_repo_id.get_or_insert_with(|| {
1549 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1550 id
1551 });
1552 }
1553 }
1554
1555 for id in removed_ids {
1556 if self.active_repo_id == Some(id) {
1557 self.active_repo_id = None;
1558 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1559 }
1560 self.repositories.remove(&id);
1561 if let Some(updates_tx) = updates_tx.as_ref() {
1562 updates_tx
1563 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1564 .ok();
1565 }
1566 }
1567 }
1568
1569 fn on_buffer_store_event(
1570 &mut self,
1571 _: Entity<BufferStore>,
1572 event: &BufferStoreEvent,
1573 cx: &mut Context<Self>,
1574 ) {
1575 match event {
1576 BufferStoreEvent::BufferAdded(buffer) => {
1577 cx.subscribe(buffer, |this, buffer, event, cx| {
1578 if let BufferEvent::LanguageChanged(_) = event {
1579 let buffer_id = buffer.read(cx).remote_id();
1580 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1581 diff_state.update(cx, |diff_state, cx| {
1582 diff_state.buffer_language_changed(buffer, cx);
1583 });
1584 }
1585 }
1586 })
1587 .detach();
1588 }
1589 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1590 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1591 diffs.remove(buffer_id);
1592 }
1593 }
1594 BufferStoreEvent::BufferDropped(buffer_id) => {
1595 self.diffs.remove(buffer_id);
1596 for diffs in self.shared_diffs.values_mut() {
1597 diffs.remove(buffer_id);
1598 }
1599 }
1600 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1601 // Whenever a buffer's file path changes, it's possible that the
1602 // new path is actually a path that is being tracked by a git
1603 // repository. In that case, we'll want to update the buffer's
1604 // `BufferDiffState`, in case it already has one.
1605 let buffer_id = buffer.read(cx).remote_id();
1606 let diff_state = self.diffs.get(&buffer_id);
1607 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1608
1609 if let Some(diff_state) = diff_state
1610 && let Some((repo, repo_path)) = repo
1611 {
1612 let buffer = buffer.clone();
1613 let diff_state = diff_state.clone();
1614
1615 cx.spawn(async move |_git_store, cx| {
1616 async {
1617 let diff_bases_change = repo
1618 .update(cx, |repo, cx| {
1619 repo.load_committed_text(buffer_id, repo_path, cx)
1620 })
1621 .await?;
1622
1623 diff_state.update(cx, |diff_state, cx| {
1624 let buffer_snapshot = buffer.read(cx).text_snapshot();
1625 diff_state.diff_bases_changed(
1626 buffer_snapshot,
1627 Some(diff_bases_change),
1628 cx,
1629 );
1630 });
1631 anyhow::Ok(())
1632 }
1633 .await
1634 .log_err();
1635 })
1636 .detach();
1637 }
1638 }
1639 }
1640 }
1641
1642 pub fn recalculate_buffer_diffs(
1643 &mut self,
1644 buffers: Vec<Entity<Buffer>>,
1645 cx: &mut Context<Self>,
1646 ) -> impl Future<Output = ()> + use<> {
1647 let mut futures = Vec::new();
1648 for buffer in buffers {
1649 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1650 let buffer = buffer.read(cx).text_snapshot();
1651 diff_state.update(cx, |diff_state, cx| {
1652 diff_state.recalculate_diffs(buffer.clone(), cx);
1653 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1654 });
1655 futures.push(diff_state.update(cx, |diff_state, cx| {
1656 diff_state
1657 .reparse_conflict_markers(buffer, cx)
1658 .map(|_| {})
1659 .boxed()
1660 }));
1661 }
1662 }
1663 async move {
1664 futures::future::join_all(futures).await;
1665 }
1666 }
1667
1668 fn on_buffer_diff_event(
1669 &mut self,
1670 diff: Entity<buffer_diff::BufferDiff>,
1671 event: &BufferDiffEvent,
1672 cx: &mut Context<Self>,
1673 ) {
1674 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1675 let buffer_id = diff.read(cx).buffer_id;
1676 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1677 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1678 diff_state.hunk_staging_operation_count += 1;
1679 diff_state.hunk_staging_operation_count
1680 });
1681 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1682 let recv = repo.update(cx, |repo, cx| {
1683 log::debug!("hunks changed for {}", path.as_unix_str());
1684 repo.spawn_set_index_text_job(
1685 path,
1686 new_index_text.as_ref().map(|rope| rope.to_string()),
1687 Some(hunk_staging_operation_count),
1688 cx,
1689 )
1690 });
1691 let diff = diff.downgrade();
1692 cx.spawn(async move |this, cx| {
1693 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1694 diff.update(cx, |diff, cx| {
1695 diff.clear_pending_hunks(cx);
1696 })
1697 .ok();
1698 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1699 .ok();
1700 }
1701 })
1702 .detach();
1703 }
1704 }
1705 }
1706 }
1707
1708 fn local_worktree_git_repos_changed(
1709 &mut self,
1710 worktree: Entity<Worktree>,
1711 changed_repos: &UpdatedGitRepositoriesSet,
1712 cx: &mut Context<Self>,
1713 ) {
1714 log::debug!("local worktree repos changed");
1715 debug_assert!(worktree.read(cx).is_local());
1716
1717 for repository in self.repositories.values() {
1718 repository.update(cx, |repository, cx| {
1719 let repo_abs_path = &repository.work_directory_abs_path;
1720 if changed_repos.iter().any(|update| {
1721 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1722 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1723 }) {
1724 repository.reload_buffer_diff_bases(cx);
1725 }
1726 });
1727 }
1728 }
1729
1730 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1731 &self.repositories
1732 }
1733
1734 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1735 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1736 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1737 Some(status.status)
1738 }
1739
1740 pub fn repository_and_path_for_buffer_id(
1741 &self,
1742 buffer_id: BufferId,
1743 cx: &App,
1744 ) -> Option<(Entity<Repository>, RepoPath)> {
1745 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1746 let project_path = buffer.read(cx).project_path(cx)?;
1747 self.repository_and_path_for_project_path(&project_path, cx)
1748 }
1749
1750 pub fn repository_and_path_for_project_path(
1751 &self,
1752 path: &ProjectPath,
1753 cx: &App,
1754 ) -> Option<(Entity<Repository>, RepoPath)> {
1755 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1756 self.repositories
1757 .values()
1758 .filter_map(|repo| {
1759 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1760 Some((repo.clone(), repo_path))
1761 })
1762 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1763 }
1764
1765 pub fn git_init(
1766 &self,
1767 path: Arc<Path>,
1768 fallback_branch_name: String,
1769 cx: &App,
1770 ) -> Task<Result<()>> {
1771 match &self.state {
1772 GitStoreState::Local { fs, .. } => {
1773 let fs = fs.clone();
1774 cx.background_executor()
1775 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1776 }
1777 GitStoreState::Remote {
1778 upstream_client,
1779 upstream_project_id: project_id,
1780 ..
1781 } => {
1782 let client = upstream_client.clone();
1783 let project_id = *project_id;
1784 cx.background_executor().spawn(async move {
1785 client
1786 .request(proto::GitInit {
1787 project_id: project_id,
1788 abs_path: path.to_string_lossy().into_owned(),
1789 fallback_branch_name,
1790 })
1791 .await?;
1792 Ok(())
1793 })
1794 }
1795 }
1796 }
1797
1798 pub fn git_clone(
1799 &self,
1800 repo: String,
1801 path: impl Into<Arc<std::path::Path>>,
1802 cx: &App,
1803 ) -> Task<Result<()>> {
1804 let path = path.into();
1805 match &self.state {
1806 GitStoreState::Local { fs, .. } => {
1807 let fs = fs.clone();
1808 cx.background_executor()
1809 .spawn(async move { fs.git_clone(&repo, &path).await })
1810 }
1811 GitStoreState::Remote {
1812 upstream_client,
1813 upstream_project_id,
1814 ..
1815 } => {
1816 if upstream_client.is_via_collab() {
1817 return Task::ready(Err(anyhow!(
1818 "Git Clone isn't supported for project guests"
1819 )));
1820 }
1821 let request = upstream_client.request(proto::GitClone {
1822 project_id: *upstream_project_id,
1823 abs_path: path.to_string_lossy().into_owned(),
1824 remote_repo: repo,
1825 });
1826
1827 cx.background_spawn(async move {
1828 let result = request.await?;
1829
1830 match result.success {
1831 true => Ok(()),
1832 false => Err(anyhow!("Git Clone failed")),
1833 }
1834 })
1835 }
1836 }
1837 }
1838
1839 async fn handle_update_repository(
1840 this: Entity<Self>,
1841 envelope: TypedEnvelope<proto::UpdateRepository>,
1842 mut cx: AsyncApp,
1843 ) -> Result<()> {
1844 this.update(&mut cx, |this, cx| {
1845 let path_style = this.worktree_store.read(cx).path_style();
1846 let mut update = envelope.payload;
1847
1848 let id = RepositoryId::from_proto(update.id);
1849 let client = this.upstream_client().context("no upstream client")?;
1850
1851 let original_repo_abs_path: Option<Arc<Path>> = update
1852 .original_repo_abs_path
1853 .as_deref()
1854 .map(|p| Path::new(p).into());
1855
1856 let mut repo_subscription = None;
1857 let repo = this.repositories.entry(id).or_insert_with(|| {
1858 let git_store = cx.weak_entity();
1859 let repo = cx.new(|cx| {
1860 Repository::remote(
1861 id,
1862 Path::new(&update.abs_path).into(),
1863 original_repo_abs_path.clone(),
1864 path_style,
1865 ProjectId(update.project_id),
1866 client,
1867 git_store,
1868 cx,
1869 )
1870 });
1871 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1872 cx.emit(GitStoreEvent::RepositoryAdded);
1873 repo
1874 });
1875 this._subscriptions.extend(repo_subscription);
1876
1877 repo.update(cx, {
1878 let update = update.clone();
1879 |repo, cx| repo.apply_remote_update(update, cx)
1880 })?;
1881
1882 this.active_repo_id.get_or_insert_with(|| {
1883 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1884 id
1885 });
1886
1887 if let Some((client, project_id)) = this.downstream_client() {
1888 update.project_id = project_id.to_proto();
1889 client.send(update).log_err();
1890 }
1891 Ok(())
1892 })
1893 }
1894
1895 async fn handle_remove_repository(
1896 this: Entity<Self>,
1897 envelope: TypedEnvelope<proto::RemoveRepository>,
1898 mut cx: AsyncApp,
1899 ) -> Result<()> {
1900 this.update(&mut cx, |this, cx| {
1901 let mut update = envelope.payload;
1902 let id = RepositoryId::from_proto(update.id);
1903 this.repositories.remove(&id);
1904 if let Some((client, project_id)) = this.downstream_client() {
1905 update.project_id = project_id.to_proto();
1906 client.send(update).log_err();
1907 }
1908 if this.active_repo_id == Some(id) {
1909 this.active_repo_id = None;
1910 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1911 }
1912 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1913 });
1914 Ok(())
1915 }
1916
1917 async fn handle_git_init(
1918 this: Entity<Self>,
1919 envelope: TypedEnvelope<proto::GitInit>,
1920 cx: AsyncApp,
1921 ) -> Result<proto::Ack> {
1922 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1923 let name = envelope.payload.fallback_branch_name;
1924 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1925 .await?;
1926
1927 Ok(proto::Ack {})
1928 }
1929
1930 async fn handle_git_clone(
1931 this: Entity<Self>,
1932 envelope: TypedEnvelope<proto::GitClone>,
1933 cx: AsyncApp,
1934 ) -> Result<proto::GitCloneResponse> {
1935 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1936 let repo_name = envelope.payload.remote_repo;
1937 let result = cx
1938 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
1939 .await;
1940
1941 Ok(proto::GitCloneResponse {
1942 success: result.is_ok(),
1943 })
1944 }
1945
1946 async fn handle_fetch(
1947 this: Entity<Self>,
1948 envelope: TypedEnvelope<proto::Fetch>,
1949 mut cx: AsyncApp,
1950 ) -> Result<proto::RemoteMessageResponse> {
1951 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1952 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1953 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1954 let askpass_id = envelope.payload.askpass_id;
1955
1956 let askpass = make_remote_delegate(
1957 this,
1958 envelope.payload.project_id,
1959 repository_id,
1960 askpass_id,
1961 &mut cx,
1962 );
1963
1964 let remote_output = repository_handle
1965 .update(&mut cx, |repository_handle, cx| {
1966 repository_handle.fetch(fetch_options, askpass, cx)
1967 })
1968 .await??;
1969
1970 Ok(proto::RemoteMessageResponse {
1971 stdout: remote_output.stdout,
1972 stderr: remote_output.stderr,
1973 })
1974 }
1975
1976 async fn handle_push(
1977 this: Entity<Self>,
1978 envelope: TypedEnvelope<proto::Push>,
1979 mut cx: AsyncApp,
1980 ) -> Result<proto::RemoteMessageResponse> {
1981 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1982 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1983
1984 let askpass_id = envelope.payload.askpass_id;
1985 let askpass = make_remote_delegate(
1986 this,
1987 envelope.payload.project_id,
1988 repository_id,
1989 askpass_id,
1990 &mut cx,
1991 );
1992
1993 let options = envelope
1994 .payload
1995 .options
1996 .as_ref()
1997 .map(|_| match envelope.payload.options() {
1998 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1999 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2000 });
2001
2002 let branch_name = envelope.payload.branch_name.into();
2003 let remote_branch_name = envelope.payload.remote_branch_name.into();
2004 let remote_name = envelope.payload.remote_name.into();
2005
2006 let remote_output = repository_handle
2007 .update(&mut cx, |repository_handle, cx| {
2008 repository_handle.push(
2009 branch_name,
2010 remote_branch_name,
2011 remote_name,
2012 options,
2013 askpass,
2014 cx,
2015 )
2016 })
2017 .await??;
2018 Ok(proto::RemoteMessageResponse {
2019 stdout: remote_output.stdout,
2020 stderr: remote_output.stderr,
2021 })
2022 }
2023
2024 async fn handle_pull(
2025 this: Entity<Self>,
2026 envelope: TypedEnvelope<proto::Pull>,
2027 mut cx: AsyncApp,
2028 ) -> Result<proto::RemoteMessageResponse> {
2029 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2030 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2031 let askpass_id = envelope.payload.askpass_id;
2032 let askpass = make_remote_delegate(
2033 this,
2034 envelope.payload.project_id,
2035 repository_id,
2036 askpass_id,
2037 &mut cx,
2038 );
2039
2040 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2041 let remote_name = envelope.payload.remote_name.into();
2042 let rebase = envelope.payload.rebase;
2043
2044 let remote_message = repository_handle
2045 .update(&mut cx, |repository_handle, cx| {
2046 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2047 })
2048 .await??;
2049
2050 Ok(proto::RemoteMessageResponse {
2051 stdout: remote_message.stdout,
2052 stderr: remote_message.stderr,
2053 })
2054 }
2055
2056 async fn handle_stage(
2057 this: Entity<Self>,
2058 envelope: TypedEnvelope<proto::Stage>,
2059 mut cx: AsyncApp,
2060 ) -> Result<proto::Ack> {
2061 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2062 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2063
2064 let entries = envelope
2065 .payload
2066 .paths
2067 .into_iter()
2068 .map(|path| RepoPath::new(&path))
2069 .collect::<Result<Vec<_>>>()?;
2070
2071 repository_handle
2072 .update(&mut cx, |repository_handle, cx| {
2073 repository_handle.stage_entries(entries, cx)
2074 })
2075 .await?;
2076 Ok(proto::Ack {})
2077 }
2078
2079 async fn handle_unstage(
2080 this: Entity<Self>,
2081 envelope: TypedEnvelope<proto::Unstage>,
2082 mut cx: AsyncApp,
2083 ) -> Result<proto::Ack> {
2084 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2085 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2086
2087 let entries = envelope
2088 .payload
2089 .paths
2090 .into_iter()
2091 .map(|path| RepoPath::new(&path))
2092 .collect::<Result<Vec<_>>>()?;
2093
2094 repository_handle
2095 .update(&mut cx, |repository_handle, cx| {
2096 repository_handle.unstage_entries(entries, cx)
2097 })
2098 .await?;
2099
2100 Ok(proto::Ack {})
2101 }
2102
2103 async fn handle_stash(
2104 this: Entity<Self>,
2105 envelope: TypedEnvelope<proto::Stash>,
2106 mut cx: AsyncApp,
2107 ) -> Result<proto::Ack> {
2108 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2109 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2110
2111 let entries = envelope
2112 .payload
2113 .paths
2114 .into_iter()
2115 .map(|path| RepoPath::new(&path))
2116 .collect::<Result<Vec<_>>>()?;
2117
2118 repository_handle
2119 .update(&mut cx, |repository_handle, cx| {
2120 repository_handle.stash_entries(entries, cx)
2121 })
2122 .await?;
2123
2124 Ok(proto::Ack {})
2125 }
2126
2127 async fn handle_stash_pop(
2128 this: Entity<Self>,
2129 envelope: TypedEnvelope<proto::StashPop>,
2130 mut cx: AsyncApp,
2131 ) -> Result<proto::Ack> {
2132 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2133 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2134 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2135
2136 repository_handle
2137 .update(&mut cx, |repository_handle, cx| {
2138 repository_handle.stash_pop(stash_index, cx)
2139 })
2140 .await?;
2141
2142 Ok(proto::Ack {})
2143 }
2144
2145 async fn handle_stash_apply(
2146 this: Entity<Self>,
2147 envelope: TypedEnvelope<proto::StashApply>,
2148 mut cx: AsyncApp,
2149 ) -> Result<proto::Ack> {
2150 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2151 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2152 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2153
2154 repository_handle
2155 .update(&mut cx, |repository_handle, cx| {
2156 repository_handle.stash_apply(stash_index, cx)
2157 })
2158 .await?;
2159
2160 Ok(proto::Ack {})
2161 }
2162
2163 async fn handle_stash_drop(
2164 this: Entity<Self>,
2165 envelope: TypedEnvelope<proto::StashDrop>,
2166 mut cx: AsyncApp,
2167 ) -> Result<proto::Ack> {
2168 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2169 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2170 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2171
2172 repository_handle
2173 .update(&mut cx, |repository_handle, cx| {
2174 repository_handle.stash_drop(stash_index, cx)
2175 })
2176 .await??;
2177
2178 Ok(proto::Ack {})
2179 }
2180
2181 async fn handle_set_index_text(
2182 this: Entity<Self>,
2183 envelope: TypedEnvelope<proto::SetIndexText>,
2184 mut cx: AsyncApp,
2185 ) -> Result<proto::Ack> {
2186 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2187 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2188 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2189
2190 repository_handle
2191 .update(&mut cx, |repository_handle, cx| {
2192 repository_handle.spawn_set_index_text_job(
2193 repo_path,
2194 envelope.payload.text,
2195 None,
2196 cx,
2197 )
2198 })
2199 .await??;
2200 Ok(proto::Ack {})
2201 }
2202
2203 async fn handle_run_hook(
2204 this: Entity<Self>,
2205 envelope: TypedEnvelope<proto::RunGitHook>,
2206 mut cx: AsyncApp,
2207 ) -> Result<proto::Ack> {
2208 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2209 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2210 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2211 repository_handle
2212 .update(&mut cx, |repository_handle, cx| {
2213 repository_handle.run_hook(hook, cx)
2214 })
2215 .await??;
2216 Ok(proto::Ack {})
2217 }
2218
2219 async fn handle_commit(
2220 this: Entity<Self>,
2221 envelope: TypedEnvelope<proto::Commit>,
2222 mut cx: AsyncApp,
2223 ) -> Result<proto::Ack> {
2224 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2225 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2226 let askpass_id = envelope.payload.askpass_id;
2227
2228 let askpass = make_remote_delegate(
2229 this,
2230 envelope.payload.project_id,
2231 repository_id,
2232 askpass_id,
2233 &mut cx,
2234 );
2235
2236 let message = SharedString::from(envelope.payload.message);
2237 let name = envelope.payload.name.map(SharedString::from);
2238 let email = envelope.payload.email.map(SharedString::from);
2239 let options = envelope.payload.options.unwrap_or_default();
2240
2241 repository_handle
2242 .update(&mut cx, |repository_handle, cx| {
2243 repository_handle.commit(
2244 message,
2245 name.zip(email),
2246 CommitOptions {
2247 amend: options.amend,
2248 signoff: options.signoff,
2249 },
2250 askpass,
2251 cx,
2252 )
2253 })
2254 .await??;
2255 Ok(proto::Ack {})
2256 }
2257
2258 async fn handle_get_remotes(
2259 this: Entity<Self>,
2260 envelope: TypedEnvelope<proto::GetRemotes>,
2261 mut cx: AsyncApp,
2262 ) -> Result<proto::GetRemotesResponse> {
2263 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2264 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2265
2266 let branch_name = envelope.payload.branch_name;
2267 let is_push = envelope.payload.is_push;
2268
2269 let remotes = repository_handle
2270 .update(&mut cx, |repository_handle, _| {
2271 repository_handle.get_remotes(branch_name, is_push)
2272 })
2273 .await??;
2274
2275 Ok(proto::GetRemotesResponse {
2276 remotes: remotes
2277 .into_iter()
2278 .map(|remotes| proto::get_remotes_response::Remote {
2279 name: remotes.name.to_string(),
2280 })
2281 .collect::<Vec<_>>(),
2282 })
2283 }
2284
2285 async fn handle_get_worktrees(
2286 this: Entity<Self>,
2287 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2288 mut cx: AsyncApp,
2289 ) -> Result<proto::GitWorktreesResponse> {
2290 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2291 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2292
2293 let worktrees = repository_handle
2294 .update(&mut cx, |repository_handle, _| {
2295 repository_handle.worktrees()
2296 })
2297 .await??;
2298
2299 Ok(proto::GitWorktreesResponse {
2300 worktrees: worktrees
2301 .into_iter()
2302 .map(|worktree| worktree_to_proto(&worktree))
2303 .collect::<Vec<_>>(),
2304 })
2305 }
2306
2307 async fn handle_create_worktree(
2308 this: Entity<Self>,
2309 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2310 mut cx: AsyncApp,
2311 ) -> Result<proto::Ack> {
2312 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2313 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2314 let directory = PathBuf::from(envelope.payload.directory);
2315 let name = envelope.payload.name;
2316 let commit = envelope.payload.commit;
2317
2318 repository_handle
2319 .update(&mut cx, |repository_handle, _| {
2320 repository_handle.create_worktree(name, directory, commit)
2321 })
2322 .await??;
2323
2324 Ok(proto::Ack {})
2325 }
2326
2327 async fn handle_get_branches(
2328 this: Entity<Self>,
2329 envelope: TypedEnvelope<proto::GitGetBranches>,
2330 mut cx: AsyncApp,
2331 ) -> Result<proto::GitBranchesResponse> {
2332 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2333 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2334
2335 let branches = repository_handle
2336 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2337 .await??;
2338
2339 Ok(proto::GitBranchesResponse {
2340 branches: branches
2341 .into_iter()
2342 .map(|branch| branch_to_proto(&branch))
2343 .collect::<Vec<_>>(),
2344 })
2345 }
2346 async fn handle_get_default_branch(
2347 this: Entity<Self>,
2348 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2349 mut cx: AsyncApp,
2350 ) -> Result<proto::GetDefaultBranchResponse> {
2351 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2352 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2353
2354 let branch = repository_handle
2355 .update(&mut cx, |repository_handle, _| {
2356 repository_handle.default_branch(false)
2357 })
2358 .await??
2359 .map(Into::into);
2360
2361 Ok(proto::GetDefaultBranchResponse { branch })
2362 }
2363 async fn handle_create_branch(
2364 this: Entity<Self>,
2365 envelope: TypedEnvelope<proto::GitCreateBranch>,
2366 mut cx: AsyncApp,
2367 ) -> Result<proto::Ack> {
2368 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2369 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2370 let branch_name = envelope.payload.branch_name;
2371
2372 repository_handle
2373 .update(&mut cx, |repository_handle, _| {
2374 repository_handle.create_branch(branch_name, None)
2375 })
2376 .await??;
2377
2378 Ok(proto::Ack {})
2379 }
2380
2381 async fn handle_change_branch(
2382 this: Entity<Self>,
2383 envelope: TypedEnvelope<proto::GitChangeBranch>,
2384 mut cx: AsyncApp,
2385 ) -> Result<proto::Ack> {
2386 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2387 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2388 let branch_name = envelope.payload.branch_name;
2389
2390 repository_handle
2391 .update(&mut cx, |repository_handle, _| {
2392 repository_handle.change_branch(branch_name)
2393 })
2394 .await??;
2395
2396 Ok(proto::Ack {})
2397 }
2398
2399 async fn handle_rename_branch(
2400 this: Entity<Self>,
2401 envelope: TypedEnvelope<proto::GitRenameBranch>,
2402 mut cx: AsyncApp,
2403 ) -> Result<proto::Ack> {
2404 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2405 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2406 let branch = envelope.payload.branch;
2407 let new_name = envelope.payload.new_name;
2408
2409 repository_handle
2410 .update(&mut cx, |repository_handle, _| {
2411 repository_handle.rename_branch(branch, new_name)
2412 })
2413 .await??;
2414
2415 Ok(proto::Ack {})
2416 }
2417
2418 async fn handle_create_remote(
2419 this: Entity<Self>,
2420 envelope: TypedEnvelope<proto::GitCreateRemote>,
2421 mut cx: AsyncApp,
2422 ) -> Result<proto::Ack> {
2423 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2424 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2425 let remote_name = envelope.payload.remote_name;
2426 let remote_url = envelope.payload.remote_url;
2427
2428 repository_handle
2429 .update(&mut cx, |repository_handle, _| {
2430 repository_handle.create_remote(remote_name, remote_url)
2431 })
2432 .await??;
2433
2434 Ok(proto::Ack {})
2435 }
2436
2437 async fn handle_delete_branch(
2438 this: Entity<Self>,
2439 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2440 mut cx: AsyncApp,
2441 ) -> Result<proto::Ack> {
2442 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2443 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2444 let branch_name = envelope.payload.branch_name;
2445
2446 repository_handle
2447 .update(&mut cx, |repository_handle, _| {
2448 repository_handle.delete_branch(branch_name)
2449 })
2450 .await??;
2451
2452 Ok(proto::Ack {})
2453 }
2454
2455 async fn handle_remove_remote(
2456 this: Entity<Self>,
2457 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2458 mut cx: AsyncApp,
2459 ) -> Result<proto::Ack> {
2460 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2461 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2462 let remote_name = envelope.payload.remote_name;
2463
2464 repository_handle
2465 .update(&mut cx, |repository_handle, _| {
2466 repository_handle.remove_remote(remote_name)
2467 })
2468 .await??;
2469
2470 Ok(proto::Ack {})
2471 }
2472
2473 async fn handle_show(
2474 this: Entity<Self>,
2475 envelope: TypedEnvelope<proto::GitShow>,
2476 mut cx: AsyncApp,
2477 ) -> Result<proto::GitCommitDetails> {
2478 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2479 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2480
2481 let commit = repository_handle
2482 .update(&mut cx, |repository_handle, _| {
2483 repository_handle.show(envelope.payload.commit)
2484 })
2485 .await??;
2486 Ok(proto::GitCommitDetails {
2487 sha: commit.sha.into(),
2488 message: commit.message.into(),
2489 commit_timestamp: commit.commit_timestamp,
2490 author_email: commit.author_email.into(),
2491 author_name: commit.author_name.into(),
2492 })
2493 }
2494
2495 async fn handle_load_commit_diff(
2496 this: Entity<Self>,
2497 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2498 mut cx: AsyncApp,
2499 ) -> Result<proto::LoadCommitDiffResponse> {
2500 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2501 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2502
2503 let commit_diff = repository_handle
2504 .update(&mut cx, |repository_handle, _| {
2505 repository_handle.load_commit_diff(envelope.payload.commit)
2506 })
2507 .await??;
2508 Ok(proto::LoadCommitDiffResponse {
2509 files: commit_diff
2510 .files
2511 .into_iter()
2512 .map(|file| proto::CommitFile {
2513 path: file.path.to_proto(),
2514 old_text: file.old_text,
2515 new_text: file.new_text,
2516 is_binary: file.is_binary,
2517 })
2518 .collect(),
2519 })
2520 }
2521
2522 async fn handle_file_history(
2523 this: Entity<Self>,
2524 envelope: TypedEnvelope<proto::GitFileHistory>,
2525 mut cx: AsyncApp,
2526 ) -> Result<proto::GitFileHistoryResponse> {
2527 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2528 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2529 let path = RepoPath::from_proto(&envelope.payload.path)?;
2530 let skip = envelope.payload.skip as usize;
2531 let limit = envelope.payload.limit.map(|l| l as usize);
2532
2533 let file_history = repository_handle
2534 .update(&mut cx, |repository_handle, _| {
2535 repository_handle.file_history_paginated(path, skip, limit)
2536 })
2537 .await??;
2538
2539 Ok(proto::GitFileHistoryResponse {
2540 entries: file_history
2541 .entries
2542 .into_iter()
2543 .map(|entry| proto::FileHistoryEntry {
2544 sha: entry.sha.to_string(),
2545 subject: entry.subject.to_string(),
2546 message: entry.message.to_string(),
2547 commit_timestamp: entry.commit_timestamp,
2548 author_name: entry.author_name.to_string(),
2549 author_email: entry.author_email.to_string(),
2550 })
2551 .collect(),
2552 path: file_history.path.to_proto(),
2553 })
2554 }
2555
2556 async fn handle_reset(
2557 this: Entity<Self>,
2558 envelope: TypedEnvelope<proto::GitReset>,
2559 mut cx: AsyncApp,
2560 ) -> Result<proto::Ack> {
2561 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2562 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2563
2564 let mode = match envelope.payload.mode() {
2565 git_reset::ResetMode::Soft => ResetMode::Soft,
2566 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2567 };
2568
2569 repository_handle
2570 .update(&mut cx, |repository_handle, cx| {
2571 repository_handle.reset(envelope.payload.commit, mode, cx)
2572 })
2573 .await??;
2574 Ok(proto::Ack {})
2575 }
2576
2577 async fn handle_checkout_files(
2578 this: Entity<Self>,
2579 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2580 mut cx: AsyncApp,
2581 ) -> Result<proto::Ack> {
2582 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2583 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2584 let paths = envelope
2585 .payload
2586 .paths
2587 .iter()
2588 .map(|s| RepoPath::from_proto(s))
2589 .collect::<Result<Vec<_>>>()?;
2590
2591 repository_handle
2592 .update(&mut cx, |repository_handle, cx| {
2593 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2594 })
2595 .await?;
2596 Ok(proto::Ack {})
2597 }
2598
2599 async fn handle_open_commit_message_buffer(
2600 this: Entity<Self>,
2601 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2602 mut cx: AsyncApp,
2603 ) -> Result<proto::OpenBufferResponse> {
2604 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2605 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2606 let buffer = repository
2607 .update(&mut cx, |repository, cx| {
2608 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2609 })
2610 .await?;
2611
2612 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2613 this.update(&mut cx, |this, cx| {
2614 this.buffer_store.update(cx, |buffer_store, cx| {
2615 buffer_store
2616 .create_buffer_for_peer(
2617 &buffer,
2618 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2619 cx,
2620 )
2621 .detach_and_log_err(cx);
2622 })
2623 });
2624
2625 Ok(proto::OpenBufferResponse {
2626 buffer_id: buffer_id.to_proto(),
2627 })
2628 }
2629
2630 async fn handle_askpass(
2631 this: Entity<Self>,
2632 envelope: TypedEnvelope<proto::AskPassRequest>,
2633 mut cx: AsyncApp,
2634 ) -> Result<proto::AskPassResponse> {
2635 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2636 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2637
2638 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2639 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2640 debug_panic!("no askpass found");
2641 anyhow::bail!("no askpass found");
2642 };
2643
2644 let response = askpass
2645 .ask_password(envelope.payload.prompt)
2646 .await
2647 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2648
2649 delegates
2650 .lock()
2651 .insert(envelope.payload.askpass_id, askpass);
2652
2653 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2654 Ok(proto::AskPassResponse {
2655 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2656 })
2657 }
2658
2659 async fn handle_check_for_pushed_commits(
2660 this: Entity<Self>,
2661 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2662 mut cx: AsyncApp,
2663 ) -> Result<proto::CheckForPushedCommitsResponse> {
2664 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2665 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2666
2667 let branches = repository_handle
2668 .update(&mut cx, |repository_handle, _| {
2669 repository_handle.check_for_pushed_commits()
2670 })
2671 .await??;
2672 Ok(proto::CheckForPushedCommitsResponse {
2673 pushed_to: branches
2674 .into_iter()
2675 .map(|commit| commit.to_string())
2676 .collect(),
2677 })
2678 }
2679
2680 async fn handle_git_diff(
2681 this: Entity<Self>,
2682 envelope: TypedEnvelope<proto::GitDiff>,
2683 mut cx: AsyncApp,
2684 ) -> Result<proto::GitDiffResponse> {
2685 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2686 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2687 let diff_type = match envelope.payload.diff_type() {
2688 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2689 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2690 proto::git_diff::DiffType::MergeBase => {
2691 let base_ref = envelope
2692 .payload
2693 .merge_base_ref
2694 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2695 DiffType::MergeBase {
2696 base_ref: base_ref.into(),
2697 }
2698 }
2699 };
2700
2701 let mut diff = repository_handle
2702 .update(&mut cx, |repository_handle, cx| {
2703 repository_handle.diff(diff_type, cx)
2704 })
2705 .await??;
2706 const ONE_MB: usize = 1_000_000;
2707 if diff.len() > ONE_MB {
2708 diff = diff.chars().take(ONE_MB).collect()
2709 }
2710
2711 Ok(proto::GitDiffResponse { diff })
2712 }
2713
2714 async fn handle_git_diff_stat(
2715 this: Entity<Self>,
2716 envelope: TypedEnvelope<proto::GitDiffStat>,
2717 mut cx: AsyncApp,
2718 ) -> Result<proto::GitDiffStatResponse> {
2719 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2720 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2721 let diff_type = match envelope.payload.diff_type() {
2722 proto::git_diff_stat::DiffType::HeadToIndex => DiffType::HeadToIndex,
2723 proto::git_diff_stat::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2724 proto::git_diff_stat::DiffType::MergeBase => {
2725 let base_ref = envelope
2726 .payload
2727 .merge_base_ref
2728 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2729 DiffType::MergeBase {
2730 base_ref: base_ref.into(),
2731 }
2732 }
2733 };
2734
2735 let stats = repository_handle
2736 .update(&mut cx, |repository_handle, cx| {
2737 repository_handle.diff_stat(diff_type, cx)
2738 })
2739 .await??;
2740
2741 let entries = stats
2742 .into_iter()
2743 .map(|(path, stat)| proto::GitDiffStatEntry {
2744 path: path.to_proto(),
2745 added: stat.added,
2746 deleted: stat.deleted,
2747 })
2748 .collect();
2749
2750 Ok(proto::GitDiffStatResponse { entries })
2751 }
2752
2753 async fn handle_tree_diff(
2754 this: Entity<Self>,
2755 request: TypedEnvelope<proto::GetTreeDiff>,
2756 mut cx: AsyncApp,
2757 ) -> Result<proto::GetTreeDiffResponse> {
2758 let repository_id = RepositoryId(request.payload.repository_id);
2759 let diff_type = if request.payload.is_merge {
2760 DiffTreeType::MergeBase {
2761 base: request.payload.base.into(),
2762 head: request.payload.head.into(),
2763 }
2764 } else {
2765 DiffTreeType::Since {
2766 base: request.payload.base.into(),
2767 head: request.payload.head.into(),
2768 }
2769 };
2770
2771 let diff = this
2772 .update(&mut cx, |this, cx| {
2773 let repository = this.repositories().get(&repository_id)?;
2774 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2775 })
2776 .context("missing repository")?
2777 .await??;
2778
2779 Ok(proto::GetTreeDiffResponse {
2780 entries: diff
2781 .entries
2782 .into_iter()
2783 .map(|(path, status)| proto::TreeDiffStatus {
2784 path: path.as_ref().to_proto(),
2785 status: match status {
2786 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2787 TreeDiffStatus::Modified { .. } => {
2788 proto::tree_diff_status::Status::Modified.into()
2789 }
2790 TreeDiffStatus::Deleted { .. } => {
2791 proto::tree_diff_status::Status::Deleted.into()
2792 }
2793 },
2794 oid: match status {
2795 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2796 Some(old.to_string())
2797 }
2798 TreeDiffStatus::Added => None,
2799 },
2800 })
2801 .collect(),
2802 })
2803 }
2804
2805 async fn handle_get_blob_content(
2806 this: Entity<Self>,
2807 request: TypedEnvelope<proto::GetBlobContent>,
2808 mut cx: AsyncApp,
2809 ) -> Result<proto::GetBlobContentResponse> {
2810 let oid = git::Oid::from_str(&request.payload.oid)?;
2811 let repository_id = RepositoryId(request.payload.repository_id);
2812 let content = this
2813 .update(&mut cx, |this, cx| {
2814 let repository = this.repositories().get(&repository_id)?;
2815 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2816 })
2817 .context("missing repository")?
2818 .await?;
2819 Ok(proto::GetBlobContentResponse { content })
2820 }
2821
2822 async fn handle_open_unstaged_diff(
2823 this: Entity<Self>,
2824 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2825 mut cx: AsyncApp,
2826 ) -> Result<proto::OpenUnstagedDiffResponse> {
2827 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2828 let diff = this
2829 .update(&mut cx, |this, cx| {
2830 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2831 Some(this.open_unstaged_diff(buffer, cx))
2832 })
2833 .context("missing buffer")?
2834 .await?;
2835 this.update(&mut cx, |this, _| {
2836 let shared_diffs = this
2837 .shared_diffs
2838 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2839 .or_default();
2840 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2841 });
2842 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2843 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2844 }
2845
2846 async fn handle_open_uncommitted_diff(
2847 this: Entity<Self>,
2848 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2849 mut cx: AsyncApp,
2850 ) -> Result<proto::OpenUncommittedDiffResponse> {
2851 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2852 let diff = this
2853 .update(&mut cx, |this, cx| {
2854 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2855 Some(this.open_uncommitted_diff(buffer, cx))
2856 })
2857 .context("missing buffer")?
2858 .await?;
2859 this.update(&mut cx, |this, _| {
2860 let shared_diffs = this
2861 .shared_diffs
2862 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2863 .or_default();
2864 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2865 });
2866 Ok(diff.read_with(&cx, |diff, cx| {
2867 use proto::open_uncommitted_diff_response::Mode;
2868
2869 let unstaged_diff = diff.secondary_diff();
2870 let index_snapshot = unstaged_diff.and_then(|diff| {
2871 let diff = diff.read(cx);
2872 diff.base_text_exists().then(|| diff.base_text(cx))
2873 });
2874
2875 let mode;
2876 let staged_text;
2877 let committed_text;
2878 if diff.base_text_exists() {
2879 let committed_snapshot = diff.base_text(cx);
2880 committed_text = Some(committed_snapshot.text());
2881 if let Some(index_text) = index_snapshot {
2882 if index_text.remote_id() == committed_snapshot.remote_id() {
2883 mode = Mode::IndexMatchesHead;
2884 staged_text = None;
2885 } else {
2886 mode = Mode::IndexAndHead;
2887 staged_text = Some(index_text.text());
2888 }
2889 } else {
2890 mode = Mode::IndexAndHead;
2891 staged_text = None;
2892 }
2893 } else {
2894 mode = Mode::IndexAndHead;
2895 committed_text = None;
2896 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2897 }
2898
2899 proto::OpenUncommittedDiffResponse {
2900 committed_text,
2901 staged_text,
2902 mode: mode.into(),
2903 }
2904 }))
2905 }
2906
2907 async fn handle_update_diff_bases(
2908 this: Entity<Self>,
2909 request: TypedEnvelope<proto::UpdateDiffBases>,
2910 mut cx: AsyncApp,
2911 ) -> Result<()> {
2912 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2913 this.update(&mut cx, |this, cx| {
2914 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2915 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2916 {
2917 let buffer = buffer.read(cx).text_snapshot();
2918 diff_state.update(cx, |diff_state, cx| {
2919 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2920 })
2921 }
2922 });
2923 Ok(())
2924 }
2925
2926 async fn handle_blame_buffer(
2927 this: Entity<Self>,
2928 envelope: TypedEnvelope<proto::BlameBuffer>,
2929 mut cx: AsyncApp,
2930 ) -> Result<proto::BlameBufferResponse> {
2931 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2932 let version = deserialize_version(&envelope.payload.version);
2933 let buffer = this.read_with(&cx, |this, cx| {
2934 this.buffer_store.read(cx).get_existing(buffer_id)
2935 })?;
2936 buffer
2937 .update(&mut cx, |buffer, _| {
2938 buffer.wait_for_version(version.clone())
2939 })
2940 .await?;
2941 let blame = this
2942 .update(&mut cx, |this, cx| {
2943 this.blame_buffer(&buffer, Some(version), cx)
2944 })
2945 .await?;
2946 Ok(serialize_blame_buffer_response(blame))
2947 }
2948
2949 async fn handle_get_permalink_to_line(
2950 this: Entity<Self>,
2951 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2952 mut cx: AsyncApp,
2953 ) -> Result<proto::GetPermalinkToLineResponse> {
2954 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2955 // let version = deserialize_version(&envelope.payload.version);
2956 let selection = {
2957 let proto_selection = envelope
2958 .payload
2959 .selection
2960 .context("no selection to get permalink for defined")?;
2961 proto_selection.start as u32..proto_selection.end as u32
2962 };
2963 let buffer = this.read_with(&cx, |this, cx| {
2964 this.buffer_store.read(cx).get_existing(buffer_id)
2965 })?;
2966 let permalink = this
2967 .update(&mut cx, |this, cx| {
2968 this.get_permalink_to_line(&buffer, selection, cx)
2969 })
2970 .await?;
2971 Ok(proto::GetPermalinkToLineResponse {
2972 permalink: permalink.to_string(),
2973 })
2974 }
2975
2976 fn repository_for_request(
2977 this: &Entity<Self>,
2978 id: RepositoryId,
2979 cx: &mut AsyncApp,
2980 ) -> Result<Entity<Repository>> {
2981 this.read_with(cx, |this, _| {
2982 this.repositories
2983 .get(&id)
2984 .context("missing repository handle")
2985 .cloned()
2986 })
2987 }
2988
2989 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2990 self.repositories
2991 .iter()
2992 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2993 .collect()
2994 }
2995
2996 fn process_updated_entries(
2997 &self,
2998 worktree: &Entity<Worktree>,
2999 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3000 cx: &mut App,
3001 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3002 let path_style = worktree.read(cx).path_style();
3003 let mut repo_paths = self
3004 .repositories
3005 .values()
3006 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3007 .collect::<Vec<_>>();
3008 let mut entries: Vec<_> = updated_entries
3009 .iter()
3010 .map(|(path, _, _)| path.clone())
3011 .collect();
3012 entries.sort();
3013 let worktree = worktree.read(cx);
3014
3015 let entries = entries
3016 .into_iter()
3017 .map(|path| worktree.absolutize(&path))
3018 .collect::<Arc<[_]>>();
3019
3020 let executor = cx.background_executor().clone();
3021 cx.background_executor().spawn(async move {
3022 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3023 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3024 let mut tasks = FuturesOrdered::new();
3025 for (repo_path, repo) in repo_paths.into_iter().rev() {
3026 let entries = entries.clone();
3027 let task = executor.spawn(async move {
3028 // Find all repository paths that belong to this repo
3029 let mut ix = entries.partition_point(|path| path < &*repo_path);
3030 if ix == entries.len() {
3031 return None;
3032 };
3033
3034 let mut paths = Vec::new();
3035 // All paths prefixed by a given repo will constitute a continuous range.
3036 while let Some(path) = entries.get(ix)
3037 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3038 &repo_path, path, path_style,
3039 )
3040 {
3041 paths.push((repo_path, ix));
3042 ix += 1;
3043 }
3044 if paths.is_empty() {
3045 None
3046 } else {
3047 Some((repo, paths))
3048 }
3049 });
3050 tasks.push_back(task);
3051 }
3052
3053 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3054 let mut path_was_used = vec![false; entries.len()];
3055 let tasks = tasks.collect::<Vec<_>>().await;
3056 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3057 // We always want to assign a path to it's innermost repository.
3058 for t in tasks {
3059 let Some((repo, paths)) = t else {
3060 continue;
3061 };
3062 let entry = paths_by_git_repo.entry(repo).or_default();
3063 for (repo_path, ix) in paths {
3064 if path_was_used[ix] {
3065 continue;
3066 }
3067 path_was_used[ix] = true;
3068 entry.push(repo_path);
3069 }
3070 }
3071
3072 paths_by_git_repo
3073 })
3074 }
3075}
3076
3077impl BufferGitState {
3078 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3079 Self {
3080 unstaged_diff: Default::default(),
3081 uncommitted_diff: Default::default(),
3082 oid_diffs: Default::default(),
3083 recalculate_diff_task: Default::default(),
3084 language: Default::default(),
3085 language_registry: Default::default(),
3086 recalculating_tx: postage::watch::channel_with(false).0,
3087 hunk_staging_operation_count: 0,
3088 hunk_staging_operation_count_as_of_write: 0,
3089 head_text: Default::default(),
3090 index_text: Default::default(),
3091 oid_texts: Default::default(),
3092 head_changed: Default::default(),
3093 index_changed: Default::default(),
3094 language_changed: Default::default(),
3095 conflict_updated_futures: Default::default(),
3096 conflict_set: Default::default(),
3097 reparse_conflict_markers_task: Default::default(),
3098 }
3099 }
3100
3101 #[ztracing::instrument(skip_all)]
3102 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3103 self.language = buffer.read(cx).language().cloned();
3104 self.language_changed = true;
3105 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3106 }
3107
3108 fn reparse_conflict_markers(
3109 &mut self,
3110 buffer: text::BufferSnapshot,
3111 cx: &mut Context<Self>,
3112 ) -> oneshot::Receiver<()> {
3113 let (tx, rx) = oneshot::channel();
3114
3115 let Some(conflict_set) = self
3116 .conflict_set
3117 .as_ref()
3118 .and_then(|conflict_set| conflict_set.upgrade())
3119 else {
3120 return rx;
3121 };
3122
3123 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3124 if conflict_set.has_conflict {
3125 Some(conflict_set.snapshot())
3126 } else {
3127 None
3128 }
3129 });
3130
3131 if let Some(old_snapshot) = old_snapshot {
3132 self.conflict_updated_futures.push(tx);
3133 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3134 let (snapshot, changed_range) = cx
3135 .background_spawn(async move {
3136 let new_snapshot = ConflictSet::parse(&buffer);
3137 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3138 (new_snapshot, changed_range)
3139 })
3140 .await;
3141 this.update(cx, |this, cx| {
3142 if let Some(conflict_set) = &this.conflict_set {
3143 conflict_set
3144 .update(cx, |conflict_set, cx| {
3145 conflict_set.set_snapshot(snapshot, changed_range, cx);
3146 })
3147 .ok();
3148 }
3149 let futures = std::mem::take(&mut this.conflict_updated_futures);
3150 for tx in futures {
3151 tx.send(()).ok();
3152 }
3153 })
3154 }))
3155 }
3156
3157 rx
3158 }
3159
3160 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3161 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3162 }
3163
3164 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3165 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3166 }
3167
3168 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3169 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3170 }
3171
3172 fn handle_base_texts_updated(
3173 &mut self,
3174 buffer: text::BufferSnapshot,
3175 message: proto::UpdateDiffBases,
3176 cx: &mut Context<Self>,
3177 ) {
3178 use proto::update_diff_bases::Mode;
3179
3180 let Some(mode) = Mode::from_i32(message.mode) else {
3181 return;
3182 };
3183
3184 let diff_bases_change = match mode {
3185 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3186 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3187 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3188 Mode::IndexAndHead => DiffBasesChange::SetEach {
3189 index: message.staged_text,
3190 head: message.committed_text,
3191 },
3192 };
3193
3194 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3195 }
3196
3197 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3198 if *self.recalculating_tx.borrow() {
3199 let mut rx = self.recalculating_tx.subscribe();
3200 Some(async move {
3201 loop {
3202 let is_recalculating = rx.recv().await;
3203 if is_recalculating != Some(true) {
3204 break;
3205 }
3206 }
3207 })
3208 } else {
3209 None
3210 }
3211 }
3212
3213 fn diff_bases_changed(
3214 &mut self,
3215 buffer: text::BufferSnapshot,
3216 diff_bases_change: Option<DiffBasesChange>,
3217 cx: &mut Context<Self>,
3218 ) {
3219 match diff_bases_change {
3220 Some(DiffBasesChange::SetIndex(index)) => {
3221 self.index_text = index.map(|mut index| {
3222 text::LineEnding::normalize(&mut index);
3223 Arc::from(index.as_str())
3224 });
3225 self.index_changed = true;
3226 }
3227 Some(DiffBasesChange::SetHead(head)) => {
3228 self.head_text = head.map(|mut head| {
3229 text::LineEnding::normalize(&mut head);
3230 Arc::from(head.as_str())
3231 });
3232 self.head_changed = true;
3233 }
3234 Some(DiffBasesChange::SetBoth(text)) => {
3235 let text = text.map(|mut text| {
3236 text::LineEnding::normalize(&mut text);
3237 Arc::from(text.as_str())
3238 });
3239 self.head_text = text.clone();
3240 self.index_text = text;
3241 self.head_changed = true;
3242 self.index_changed = true;
3243 }
3244 Some(DiffBasesChange::SetEach { index, head }) => {
3245 self.index_text = index.map(|mut index| {
3246 text::LineEnding::normalize(&mut index);
3247 Arc::from(index.as_str())
3248 });
3249 self.index_changed = true;
3250 self.head_text = head.map(|mut head| {
3251 text::LineEnding::normalize(&mut head);
3252 Arc::from(head.as_str())
3253 });
3254 self.head_changed = true;
3255 }
3256 None => {}
3257 }
3258
3259 self.recalculate_diffs(buffer, cx)
3260 }
3261
3262 #[ztracing::instrument(skip_all)]
3263 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3264 *self.recalculating_tx.borrow_mut() = true;
3265
3266 let language = self.language.clone();
3267 let language_registry = self.language_registry.clone();
3268 let unstaged_diff = self.unstaged_diff();
3269 let uncommitted_diff = self.uncommitted_diff();
3270 let head = self.head_text.clone();
3271 let index = self.index_text.clone();
3272 let index_changed = self.index_changed;
3273 let head_changed = self.head_changed;
3274 let language_changed = self.language_changed;
3275 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3276 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3277 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3278 (None, None) => true,
3279 _ => false,
3280 };
3281
3282 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3283 .oid_diffs
3284 .iter()
3285 .filter_map(|(oid, weak)| {
3286 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3287 weak.upgrade().map(|diff| (*oid, diff, base_text))
3288 })
3289 .collect();
3290
3291 self.oid_diffs.retain(|oid, weak| {
3292 let alive = weak.upgrade().is_some();
3293 if !alive {
3294 if let Some(oid) = oid {
3295 self.oid_texts.remove(oid);
3296 }
3297 }
3298 alive
3299 });
3300 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3301 log::debug!(
3302 "start recalculating diffs for buffer {}",
3303 buffer.remote_id()
3304 );
3305
3306 let mut new_unstaged_diff = None;
3307 if let Some(unstaged_diff) = &unstaged_diff {
3308 new_unstaged_diff = Some(
3309 cx.update(|cx| {
3310 unstaged_diff.read(cx).update_diff(
3311 buffer.clone(),
3312 index,
3313 index_changed.then_some(false),
3314 language.clone(),
3315 cx,
3316 )
3317 })
3318 .await,
3319 );
3320 }
3321
3322 // Dropping BufferDiff can be expensive, so yield back to the event loop
3323 // for a bit
3324 yield_now().await;
3325
3326 let mut new_uncommitted_diff = None;
3327 if let Some(uncommitted_diff) = &uncommitted_diff {
3328 new_uncommitted_diff = if index_matches_head {
3329 new_unstaged_diff.clone()
3330 } else {
3331 Some(
3332 cx.update(|cx| {
3333 uncommitted_diff.read(cx).update_diff(
3334 buffer.clone(),
3335 head,
3336 head_changed.then_some(true),
3337 language.clone(),
3338 cx,
3339 )
3340 })
3341 .await,
3342 )
3343 }
3344 }
3345
3346 // Dropping BufferDiff can be expensive, so yield back to the event loop
3347 // for a bit
3348 yield_now().await;
3349
3350 let cancel = this.update(cx, |this, _| {
3351 // This checks whether all pending stage/unstage operations
3352 // have quiesced (i.e. both the corresponding write and the
3353 // read of that write have completed). If not, then we cancel
3354 // this recalculation attempt to avoid invalidating pending
3355 // state too quickly; another recalculation will come along
3356 // later and clear the pending state once the state of the index has settled.
3357 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3358 *this.recalculating_tx.borrow_mut() = false;
3359 true
3360 } else {
3361 false
3362 }
3363 })?;
3364 if cancel {
3365 log::debug!(
3366 concat!(
3367 "aborting recalculating diffs for buffer {}",
3368 "due to subsequent hunk operations",
3369 ),
3370 buffer.remote_id()
3371 );
3372 return Ok(());
3373 }
3374
3375 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3376 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3377 {
3378 let task = unstaged_diff.update(cx, |diff, cx| {
3379 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3380 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3381 // view multibuffers.
3382 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3383 });
3384 Some(task.await)
3385 } else {
3386 None
3387 };
3388
3389 yield_now().await;
3390
3391 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3392 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3393 {
3394 uncommitted_diff
3395 .update(cx, |diff, cx| {
3396 if language_changed {
3397 diff.language_changed(language.clone(), language_registry, cx);
3398 }
3399 diff.set_snapshot_with_secondary(
3400 new_uncommitted_diff,
3401 &buffer,
3402 unstaged_changed_range.flatten(),
3403 true,
3404 cx,
3405 )
3406 })
3407 .await;
3408 }
3409
3410 yield_now().await;
3411
3412 for (oid, oid_diff, base_text) in oid_diffs {
3413 let new_oid_diff = cx
3414 .update(|cx| {
3415 oid_diff.read(cx).update_diff(
3416 buffer.clone(),
3417 base_text,
3418 None,
3419 language.clone(),
3420 cx,
3421 )
3422 })
3423 .await;
3424
3425 oid_diff
3426 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3427 .await;
3428
3429 log::debug!(
3430 "finished recalculating oid diff for buffer {} oid {:?}",
3431 buffer.remote_id(),
3432 oid
3433 );
3434
3435 yield_now().await;
3436 }
3437
3438 log::debug!(
3439 "finished recalculating diffs for buffer {}",
3440 buffer.remote_id()
3441 );
3442
3443 if let Some(this) = this.upgrade() {
3444 this.update(cx, |this, _| {
3445 this.index_changed = false;
3446 this.head_changed = false;
3447 this.language_changed = false;
3448 *this.recalculating_tx.borrow_mut() = false;
3449 });
3450 }
3451
3452 Ok(())
3453 }));
3454 }
3455}
3456
3457fn make_remote_delegate(
3458 this: Entity<GitStore>,
3459 project_id: u64,
3460 repository_id: RepositoryId,
3461 askpass_id: u64,
3462 cx: &mut AsyncApp,
3463) -> AskPassDelegate {
3464 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3465 this.update(cx, |this, cx| {
3466 let Some((client, _)) = this.downstream_client() else {
3467 return;
3468 };
3469 let response = client.request(proto::AskPassRequest {
3470 project_id,
3471 repository_id: repository_id.to_proto(),
3472 askpass_id,
3473 prompt,
3474 });
3475 cx.spawn(async move |_, _| {
3476 let mut response = response.await?.response;
3477 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3478 .ok();
3479 response.zeroize();
3480 anyhow::Ok(())
3481 })
3482 .detach_and_log_err(cx);
3483 });
3484 })
3485}
3486
3487impl RepositoryId {
3488 pub fn to_proto(self) -> u64 {
3489 self.0
3490 }
3491
3492 pub fn from_proto(id: u64) -> Self {
3493 RepositoryId(id)
3494 }
3495}
3496
3497impl RepositorySnapshot {
3498 fn empty(
3499 id: RepositoryId,
3500 work_directory_abs_path: Arc<Path>,
3501 original_repo_abs_path: Option<Arc<Path>>,
3502 path_style: PathStyle,
3503 ) -> Self {
3504 Self {
3505 id,
3506 statuses_by_path: Default::default(),
3507 original_repo_abs_path: original_repo_abs_path
3508 .unwrap_or_else(|| work_directory_abs_path.clone()),
3509 work_directory_abs_path,
3510 branch: None,
3511 head_commit: None,
3512 scan_id: 0,
3513 merge: Default::default(),
3514 remote_origin_url: None,
3515 remote_upstream_url: None,
3516 stash_entries: Default::default(),
3517 path_style,
3518 }
3519 }
3520
3521 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3522 proto::UpdateRepository {
3523 branch_summary: self.branch.as_ref().map(branch_to_proto),
3524 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3525 updated_statuses: self
3526 .statuses_by_path
3527 .iter()
3528 .map(|entry| entry.to_proto())
3529 .collect(),
3530 removed_statuses: Default::default(),
3531 current_merge_conflicts: self
3532 .merge
3533 .merge_heads_by_conflicted_path
3534 .iter()
3535 .map(|(repo_path, _)| repo_path.to_proto())
3536 .collect(),
3537 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3538 project_id,
3539 id: self.id.to_proto(),
3540 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3541 entry_ids: vec![self.id.to_proto()],
3542 scan_id: self.scan_id,
3543 is_last_update: true,
3544 stash_entries: self
3545 .stash_entries
3546 .entries
3547 .iter()
3548 .map(stash_to_proto)
3549 .collect(),
3550 remote_upstream_url: self.remote_upstream_url.clone(),
3551 remote_origin_url: self.remote_origin_url.clone(),
3552 original_repo_abs_path: Some(
3553 self.original_repo_abs_path.to_string_lossy().into_owned(),
3554 ),
3555 }
3556 }
3557
3558 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3559 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3560 let mut removed_statuses: Vec<String> = Vec::new();
3561
3562 let mut new_statuses = self.statuses_by_path.iter().peekable();
3563 let mut old_statuses = old.statuses_by_path.iter().peekable();
3564
3565 let mut current_new_entry = new_statuses.next();
3566 let mut current_old_entry = old_statuses.next();
3567 loop {
3568 match (current_new_entry, current_old_entry) {
3569 (Some(new_entry), Some(old_entry)) => {
3570 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3571 Ordering::Less => {
3572 updated_statuses.push(new_entry.to_proto());
3573 current_new_entry = new_statuses.next();
3574 }
3575 Ordering::Equal => {
3576 if new_entry.status != old_entry.status {
3577 updated_statuses.push(new_entry.to_proto());
3578 }
3579 current_old_entry = old_statuses.next();
3580 current_new_entry = new_statuses.next();
3581 }
3582 Ordering::Greater => {
3583 removed_statuses.push(old_entry.repo_path.to_proto());
3584 current_old_entry = old_statuses.next();
3585 }
3586 }
3587 }
3588 (None, Some(old_entry)) => {
3589 removed_statuses.push(old_entry.repo_path.to_proto());
3590 current_old_entry = old_statuses.next();
3591 }
3592 (Some(new_entry), None) => {
3593 updated_statuses.push(new_entry.to_proto());
3594 current_new_entry = new_statuses.next();
3595 }
3596 (None, None) => break,
3597 }
3598 }
3599
3600 proto::UpdateRepository {
3601 branch_summary: self.branch.as_ref().map(branch_to_proto),
3602 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3603 updated_statuses,
3604 removed_statuses,
3605 current_merge_conflicts: self
3606 .merge
3607 .merge_heads_by_conflicted_path
3608 .iter()
3609 .map(|(path, _)| path.to_proto())
3610 .collect(),
3611 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3612 project_id,
3613 id: self.id.to_proto(),
3614 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3615 entry_ids: vec![],
3616 scan_id: self.scan_id,
3617 is_last_update: true,
3618 stash_entries: self
3619 .stash_entries
3620 .entries
3621 .iter()
3622 .map(stash_to_proto)
3623 .collect(),
3624 remote_upstream_url: self.remote_upstream_url.clone(),
3625 remote_origin_url: self.remote_origin_url.clone(),
3626 original_repo_abs_path: Some(
3627 self.original_repo_abs_path.to_string_lossy().into_owned(),
3628 ),
3629 }
3630 }
3631
3632 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3633 self.statuses_by_path.iter().cloned()
3634 }
3635
3636 pub fn status_summary(&self) -> GitSummary {
3637 self.statuses_by_path.summary().item_summary
3638 }
3639
3640 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3641 self.statuses_by_path
3642 .get(&PathKey(path.as_ref().clone()), ())
3643 .cloned()
3644 }
3645
3646 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3647 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3648 }
3649
3650 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3651 self.path_style
3652 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3653 .unwrap()
3654 .into()
3655 }
3656
3657 #[inline]
3658 fn abs_path_to_repo_path_inner(
3659 work_directory_abs_path: &Path,
3660 abs_path: &Path,
3661 path_style: PathStyle,
3662 ) -> Option<RepoPath> {
3663 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3664 Some(RepoPath::from_rel_path(&rel_path))
3665 }
3666
3667 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3668 self.merge
3669 .merge_heads_by_conflicted_path
3670 .contains_key(repo_path)
3671 }
3672
3673 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3674 let had_conflict_on_last_merge_head_change = self
3675 .merge
3676 .merge_heads_by_conflicted_path
3677 .contains_key(repo_path);
3678 let has_conflict_currently = self
3679 .status_for_path(repo_path)
3680 .is_some_and(|entry| entry.status.is_conflicted());
3681 had_conflict_on_last_merge_head_change || has_conflict_currently
3682 }
3683
3684 /// This is the name that will be displayed in the repository selector for this repository.
3685 pub fn display_name(&self) -> SharedString {
3686 self.work_directory_abs_path
3687 .file_name()
3688 .unwrap_or_default()
3689 .to_string_lossy()
3690 .to_string()
3691 .into()
3692 }
3693}
3694
3695pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3696 proto::StashEntry {
3697 oid: entry.oid.as_bytes().to_vec(),
3698 message: entry.message.clone(),
3699 branch: entry.branch.clone(),
3700 index: entry.index as u64,
3701 timestamp: entry.timestamp,
3702 }
3703}
3704
3705pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3706 Ok(StashEntry {
3707 oid: Oid::from_bytes(&entry.oid)?,
3708 message: entry.message.clone(),
3709 index: entry.index as usize,
3710 branch: entry.branch.clone(),
3711 timestamp: entry.timestamp,
3712 })
3713}
3714
3715impl MergeDetails {
3716 async fn update(
3717 &mut self,
3718 backend: &Arc<dyn GitRepository>,
3719 current_conflicted_paths: Vec<RepoPath>,
3720 ) -> Result<bool> {
3721 log::debug!("load merge details");
3722 self.message = backend.merge_message().await.map(SharedString::from);
3723 let heads = backend
3724 .revparse_batch(vec![
3725 "MERGE_HEAD".into(),
3726 "CHERRY_PICK_HEAD".into(),
3727 "REBASE_HEAD".into(),
3728 "REVERT_HEAD".into(),
3729 "APPLY_HEAD".into(),
3730 ])
3731 .await
3732 .log_err()
3733 .unwrap_or_default()
3734 .into_iter()
3735 .map(|opt| opt.map(SharedString::from))
3736 .collect::<Vec<_>>();
3737
3738 let mut conflicts_changed = false;
3739
3740 // Record the merge state for newly conflicted paths
3741 for path in ¤t_conflicted_paths {
3742 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3743 conflicts_changed = true;
3744 self.merge_heads_by_conflicted_path
3745 .insert(path.clone(), heads.clone());
3746 }
3747 }
3748
3749 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3750 self.merge_heads_by_conflicted_path
3751 .retain(|path, old_merge_heads| {
3752 let keep = current_conflicted_paths.contains(path)
3753 || (old_merge_heads == &heads
3754 && old_merge_heads.iter().any(|head| head.is_some()));
3755 if !keep {
3756 conflicts_changed = true;
3757 }
3758 keep
3759 });
3760
3761 Ok(conflicts_changed)
3762 }
3763}
3764
3765impl Repository {
3766 pub fn snapshot(&self) -> RepositorySnapshot {
3767 self.snapshot.clone()
3768 }
3769
3770 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3771 self.pending_ops.iter().cloned()
3772 }
3773
3774 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3775 self.pending_ops.summary().clone()
3776 }
3777
3778 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3779 self.pending_ops
3780 .get(&PathKey(path.as_ref().clone()), ())
3781 .cloned()
3782 }
3783
3784 fn local(
3785 id: RepositoryId,
3786 work_directory_abs_path: Arc<Path>,
3787 original_repo_abs_path: Arc<Path>,
3788 dot_git_abs_path: Arc<Path>,
3789 project_environment: WeakEntity<ProjectEnvironment>,
3790 fs: Arc<dyn Fs>,
3791 git_store: WeakEntity<GitStore>,
3792 cx: &mut Context<Self>,
3793 ) -> Self {
3794 let snapshot = RepositorySnapshot::empty(
3795 id,
3796 work_directory_abs_path.clone(),
3797 Some(original_repo_abs_path),
3798 PathStyle::local(),
3799 );
3800 let state = cx
3801 .spawn(async move |_, cx| {
3802 LocalRepositoryState::new(
3803 work_directory_abs_path,
3804 dot_git_abs_path,
3805 project_environment,
3806 fs,
3807 cx,
3808 )
3809 .await
3810 .map_err(|err| err.to_string())
3811 })
3812 .shared();
3813 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3814 let state = cx
3815 .spawn(async move |_, _| {
3816 let state = state.await?;
3817 Ok(RepositoryState::Local(state))
3818 })
3819 .shared();
3820
3821 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3822 RepositoryEvent::BranchChanged => {
3823 if this.scan_id > 1 {
3824 this.initial_graph_data.clear();
3825 }
3826 }
3827 _ => {}
3828 })
3829 .detach();
3830
3831 Repository {
3832 this: cx.weak_entity(),
3833 git_store,
3834 snapshot,
3835 pending_ops: Default::default(),
3836 repository_state: state,
3837 commit_message_buffer: None,
3838 askpass_delegates: Default::default(),
3839 paths_needing_status_update: Default::default(),
3840 latest_askpass_id: 0,
3841 job_sender,
3842 job_id: 0,
3843 active_jobs: Default::default(),
3844 initial_graph_data: Default::default(),
3845 commit_data: Default::default(),
3846 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3847 }
3848 }
3849
3850 fn remote(
3851 id: RepositoryId,
3852 work_directory_abs_path: Arc<Path>,
3853 original_repo_abs_path: Option<Arc<Path>>,
3854 path_style: PathStyle,
3855 project_id: ProjectId,
3856 client: AnyProtoClient,
3857 git_store: WeakEntity<GitStore>,
3858 cx: &mut Context<Self>,
3859 ) -> Self {
3860 let snapshot = RepositorySnapshot::empty(
3861 id,
3862 work_directory_abs_path,
3863 original_repo_abs_path,
3864 path_style,
3865 );
3866 let repository_state = RemoteRepositoryState { project_id, client };
3867 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3868 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3869 Self {
3870 this: cx.weak_entity(),
3871 snapshot,
3872 commit_message_buffer: None,
3873 git_store,
3874 pending_ops: Default::default(),
3875 paths_needing_status_update: Default::default(),
3876 job_sender,
3877 repository_state,
3878 askpass_delegates: Default::default(),
3879 latest_askpass_id: 0,
3880 active_jobs: Default::default(),
3881 job_id: 0,
3882 initial_graph_data: Default::default(),
3883 commit_data: Default::default(),
3884 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3885 }
3886 }
3887
3888 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3889 self.git_store.upgrade()
3890 }
3891
3892 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3893 let this = cx.weak_entity();
3894 let git_store = self.git_store.clone();
3895 let _ = self.send_keyed_job(
3896 Some(GitJobKey::ReloadBufferDiffBases),
3897 None,
3898 |state, mut cx| async move {
3899 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3900 log::error!("tried to recompute diffs for a non-local repository");
3901 return Ok(());
3902 };
3903
3904 let Some(this) = this.upgrade() else {
3905 return Ok(());
3906 };
3907
3908 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3909 git_store.update(cx, |git_store, cx| {
3910 git_store
3911 .diffs
3912 .iter()
3913 .filter_map(|(buffer_id, diff_state)| {
3914 let buffer_store = git_store.buffer_store.read(cx);
3915 let buffer = buffer_store.get(*buffer_id)?;
3916 let file = File::from_dyn(buffer.read(cx).file())?;
3917 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3918 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3919 log::debug!(
3920 "start reload diff bases for repo path {}",
3921 repo_path.as_unix_str()
3922 );
3923 diff_state.update(cx, |diff_state, _| {
3924 let has_unstaged_diff = diff_state
3925 .unstaged_diff
3926 .as_ref()
3927 .is_some_and(|diff| diff.is_upgradable());
3928 let has_uncommitted_diff = diff_state
3929 .uncommitted_diff
3930 .as_ref()
3931 .is_some_and(|set| set.is_upgradable());
3932
3933 Some((
3934 buffer,
3935 repo_path,
3936 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3937 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3938 ))
3939 })
3940 })
3941 .collect::<Vec<_>>()
3942 })
3943 })?;
3944
3945 let buffer_diff_base_changes = cx
3946 .background_spawn(async move {
3947 let mut changes = Vec::new();
3948 for (buffer, repo_path, current_index_text, current_head_text) in
3949 &repo_diff_state_updates
3950 {
3951 let index_text = if current_index_text.is_some() {
3952 backend.load_index_text(repo_path.clone()).await
3953 } else {
3954 None
3955 };
3956 let head_text = if current_head_text.is_some() {
3957 backend.load_committed_text(repo_path.clone()).await
3958 } else {
3959 None
3960 };
3961
3962 let change =
3963 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3964 (Some(current_index), Some(current_head)) => {
3965 let index_changed =
3966 index_text.as_deref() != current_index.as_deref();
3967 let head_changed =
3968 head_text.as_deref() != current_head.as_deref();
3969 if index_changed && head_changed {
3970 if index_text == head_text {
3971 Some(DiffBasesChange::SetBoth(head_text))
3972 } else {
3973 Some(DiffBasesChange::SetEach {
3974 index: index_text,
3975 head: head_text,
3976 })
3977 }
3978 } else if index_changed {
3979 Some(DiffBasesChange::SetIndex(index_text))
3980 } else if head_changed {
3981 Some(DiffBasesChange::SetHead(head_text))
3982 } else {
3983 None
3984 }
3985 }
3986 (Some(current_index), None) => {
3987 let index_changed =
3988 index_text.as_deref() != current_index.as_deref();
3989 index_changed
3990 .then_some(DiffBasesChange::SetIndex(index_text))
3991 }
3992 (None, Some(current_head)) => {
3993 let head_changed =
3994 head_text.as_deref() != current_head.as_deref();
3995 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3996 }
3997 (None, None) => None,
3998 };
3999
4000 changes.push((buffer.clone(), change))
4001 }
4002 changes
4003 })
4004 .await;
4005
4006 git_store.update(&mut cx, |git_store, cx| {
4007 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4008 let buffer_snapshot = buffer.read(cx).text_snapshot();
4009 let buffer_id = buffer_snapshot.remote_id();
4010 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4011 continue;
4012 };
4013
4014 let downstream_client = git_store.downstream_client();
4015 diff_state.update(cx, |diff_state, cx| {
4016 use proto::update_diff_bases::Mode;
4017
4018 if let Some((diff_bases_change, (client, project_id))) =
4019 diff_bases_change.clone().zip(downstream_client)
4020 {
4021 let (staged_text, committed_text, mode) = match diff_bases_change {
4022 DiffBasesChange::SetIndex(index) => {
4023 (index, None, Mode::IndexOnly)
4024 }
4025 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4026 DiffBasesChange::SetEach { index, head } => {
4027 (index, head, Mode::IndexAndHead)
4028 }
4029 DiffBasesChange::SetBoth(text) => {
4030 (None, text, Mode::IndexMatchesHead)
4031 }
4032 };
4033 client
4034 .send(proto::UpdateDiffBases {
4035 project_id: project_id.to_proto(),
4036 buffer_id: buffer_id.to_proto(),
4037 staged_text,
4038 committed_text,
4039 mode: mode as i32,
4040 })
4041 .log_err();
4042 }
4043
4044 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4045 });
4046 }
4047 })
4048 },
4049 );
4050 }
4051
4052 pub fn send_job<F, Fut, R>(
4053 &mut self,
4054 status: Option<SharedString>,
4055 job: F,
4056 ) -> oneshot::Receiver<R>
4057 where
4058 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4059 Fut: Future<Output = R> + 'static,
4060 R: Send + 'static,
4061 {
4062 self.send_keyed_job(None, status, job)
4063 }
4064
4065 fn send_keyed_job<F, Fut, R>(
4066 &mut self,
4067 key: Option<GitJobKey>,
4068 status: Option<SharedString>,
4069 job: F,
4070 ) -> oneshot::Receiver<R>
4071 where
4072 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4073 Fut: Future<Output = R> + 'static,
4074 R: Send + 'static,
4075 {
4076 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4077 let job_id = post_inc(&mut self.job_id);
4078 let this = self.this.clone();
4079 self.job_sender
4080 .unbounded_send(GitJob {
4081 key,
4082 job: Box::new(move |state, cx: &mut AsyncApp| {
4083 let job = job(state, cx.clone());
4084 cx.spawn(async move |cx| {
4085 if let Some(s) = status.clone() {
4086 this.update(cx, |this, cx| {
4087 this.active_jobs.insert(
4088 job_id,
4089 JobInfo {
4090 start: Instant::now(),
4091 message: s.clone(),
4092 },
4093 );
4094
4095 cx.notify();
4096 })
4097 .ok();
4098 }
4099 let result = job.await;
4100
4101 this.update(cx, |this, cx| {
4102 this.active_jobs.remove(&job_id);
4103 cx.notify();
4104 })
4105 .ok();
4106
4107 result_tx.send(result).ok();
4108 })
4109 }),
4110 })
4111 .ok();
4112 result_rx
4113 }
4114
4115 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4116 let Some(git_store) = self.git_store.upgrade() else {
4117 return;
4118 };
4119 let entity = cx.entity();
4120 git_store.update(cx, |git_store, cx| {
4121 let Some((&id, _)) = git_store
4122 .repositories
4123 .iter()
4124 .find(|(_, handle)| *handle == &entity)
4125 else {
4126 return;
4127 };
4128 git_store.active_repo_id = Some(id);
4129 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4130 });
4131 }
4132
4133 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4134 self.snapshot.status()
4135 }
4136
4137 pub fn cached_stash(&self) -> GitStash {
4138 self.snapshot.stash_entries.clone()
4139 }
4140
4141 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4142 let git_store = self.git_store.upgrade()?;
4143 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4144 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4145 let abs_path = SanitizedPath::new(&abs_path);
4146 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4147 Some(ProjectPath {
4148 worktree_id: worktree.read(cx).id(),
4149 path: relative_path,
4150 })
4151 }
4152
4153 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4154 let git_store = self.git_store.upgrade()?;
4155 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4156 let abs_path = worktree_store.absolutize(path, cx)?;
4157 self.snapshot.abs_path_to_repo_path(&abs_path)
4158 }
4159
4160 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4161 other
4162 .read(cx)
4163 .snapshot
4164 .work_directory_abs_path
4165 .starts_with(&self.snapshot.work_directory_abs_path)
4166 }
4167
4168 pub fn open_commit_buffer(
4169 &mut self,
4170 languages: Option<Arc<LanguageRegistry>>,
4171 buffer_store: Entity<BufferStore>,
4172 cx: &mut Context<Self>,
4173 ) -> Task<Result<Entity<Buffer>>> {
4174 let id = self.id;
4175 if let Some(buffer) = self.commit_message_buffer.clone() {
4176 return Task::ready(Ok(buffer));
4177 }
4178 let this = cx.weak_entity();
4179
4180 let rx = self.send_job(None, move |state, mut cx| async move {
4181 let Some(this) = this.upgrade() else {
4182 bail!("git store was dropped");
4183 };
4184 match state {
4185 RepositoryState::Local(..) => {
4186 this.update(&mut cx, |_, cx| {
4187 Self::open_local_commit_buffer(languages, buffer_store, cx)
4188 })
4189 .await
4190 }
4191 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4192 let request = client.request(proto::OpenCommitMessageBuffer {
4193 project_id: project_id.0,
4194 repository_id: id.to_proto(),
4195 });
4196 let response = request.await.context("requesting to open commit buffer")?;
4197 let buffer_id = BufferId::new(response.buffer_id)?;
4198 let buffer = buffer_store
4199 .update(&mut cx, |buffer_store, cx| {
4200 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4201 })
4202 .await?;
4203 if let Some(language_registry) = languages {
4204 let git_commit_language =
4205 language_registry.language_for_name("Git Commit").await?;
4206 buffer.update(&mut cx, |buffer, cx| {
4207 buffer.set_language(Some(git_commit_language), cx);
4208 });
4209 }
4210 this.update(&mut cx, |this, _| {
4211 this.commit_message_buffer = Some(buffer.clone());
4212 });
4213 Ok(buffer)
4214 }
4215 }
4216 });
4217
4218 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4219 }
4220
4221 fn open_local_commit_buffer(
4222 language_registry: Option<Arc<LanguageRegistry>>,
4223 buffer_store: Entity<BufferStore>,
4224 cx: &mut Context<Self>,
4225 ) -> Task<Result<Entity<Buffer>>> {
4226 cx.spawn(async move |repository, cx| {
4227 let git_commit_language = match language_registry {
4228 Some(language_registry) => {
4229 Some(language_registry.language_for_name("Git Commit").await?)
4230 }
4231 None => None,
4232 };
4233 let buffer = buffer_store
4234 .update(cx, |buffer_store, cx| {
4235 buffer_store.create_buffer(git_commit_language, false, cx)
4236 })
4237 .await?;
4238
4239 repository.update(cx, |repository, _| {
4240 repository.commit_message_buffer = Some(buffer.clone());
4241 })?;
4242 Ok(buffer)
4243 })
4244 }
4245
4246 pub fn checkout_files(
4247 &mut self,
4248 commit: &str,
4249 paths: Vec<RepoPath>,
4250 cx: &mut Context<Self>,
4251 ) -> Task<Result<()>> {
4252 let commit = commit.to_string();
4253 let id = self.id;
4254
4255 self.spawn_job_with_tracking(
4256 paths.clone(),
4257 pending_op::GitStatus::Reverted,
4258 cx,
4259 async move |this, cx| {
4260 this.update(cx, |this, _cx| {
4261 this.send_job(
4262 Some(format!("git checkout {}", commit).into()),
4263 move |git_repo, _| async move {
4264 match git_repo {
4265 RepositoryState::Local(LocalRepositoryState {
4266 backend,
4267 environment,
4268 ..
4269 }) => {
4270 backend
4271 .checkout_files(commit, paths, environment.clone())
4272 .await
4273 }
4274 RepositoryState::Remote(RemoteRepositoryState {
4275 project_id,
4276 client,
4277 }) => {
4278 client
4279 .request(proto::GitCheckoutFiles {
4280 project_id: project_id.0,
4281 repository_id: id.to_proto(),
4282 commit,
4283 paths: paths
4284 .into_iter()
4285 .map(|p| p.to_proto())
4286 .collect(),
4287 })
4288 .await?;
4289
4290 Ok(())
4291 }
4292 }
4293 },
4294 )
4295 })?
4296 .await?
4297 },
4298 )
4299 }
4300
4301 pub fn reset(
4302 &mut self,
4303 commit: String,
4304 reset_mode: ResetMode,
4305 _cx: &mut App,
4306 ) -> oneshot::Receiver<Result<()>> {
4307 let id = self.id;
4308
4309 self.send_job(None, move |git_repo, _| async move {
4310 match git_repo {
4311 RepositoryState::Local(LocalRepositoryState {
4312 backend,
4313 environment,
4314 ..
4315 }) => backend.reset(commit, reset_mode, environment).await,
4316 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4317 client
4318 .request(proto::GitReset {
4319 project_id: project_id.0,
4320 repository_id: id.to_proto(),
4321 commit,
4322 mode: match reset_mode {
4323 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4324 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4325 },
4326 })
4327 .await?;
4328
4329 Ok(())
4330 }
4331 }
4332 })
4333 }
4334
4335 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4336 let id = self.id;
4337 self.send_job(None, move |git_repo, _cx| async move {
4338 match git_repo {
4339 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4340 backend.show(commit).await
4341 }
4342 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4343 let resp = client
4344 .request(proto::GitShow {
4345 project_id: project_id.0,
4346 repository_id: id.to_proto(),
4347 commit,
4348 })
4349 .await?;
4350
4351 Ok(CommitDetails {
4352 sha: resp.sha.into(),
4353 message: resp.message.into(),
4354 commit_timestamp: resp.commit_timestamp,
4355 author_email: resp.author_email.into(),
4356 author_name: resp.author_name.into(),
4357 })
4358 }
4359 }
4360 })
4361 }
4362
4363 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4364 let id = self.id;
4365 self.send_job(None, move |git_repo, cx| async move {
4366 match git_repo {
4367 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4368 backend.load_commit(commit, cx).await
4369 }
4370 RepositoryState::Remote(RemoteRepositoryState {
4371 client, project_id, ..
4372 }) => {
4373 let response = client
4374 .request(proto::LoadCommitDiff {
4375 project_id: project_id.0,
4376 repository_id: id.to_proto(),
4377 commit,
4378 })
4379 .await?;
4380 Ok(CommitDiff {
4381 files: response
4382 .files
4383 .into_iter()
4384 .map(|file| {
4385 Ok(CommitFile {
4386 path: RepoPath::from_proto(&file.path)?,
4387 old_text: file.old_text,
4388 new_text: file.new_text,
4389 is_binary: file.is_binary,
4390 })
4391 })
4392 .collect::<Result<Vec<_>>>()?,
4393 })
4394 }
4395 }
4396 })
4397 }
4398
4399 pub fn file_history(
4400 &mut self,
4401 path: RepoPath,
4402 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4403 self.file_history_paginated(path, 0, None)
4404 }
4405
4406 pub fn file_history_paginated(
4407 &mut self,
4408 path: RepoPath,
4409 skip: usize,
4410 limit: Option<usize>,
4411 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4412 let id = self.id;
4413 self.send_job(None, move |git_repo, _cx| async move {
4414 match git_repo {
4415 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4416 backend.file_history_paginated(path, skip, limit).await
4417 }
4418 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4419 let response = client
4420 .request(proto::GitFileHistory {
4421 project_id: project_id.0,
4422 repository_id: id.to_proto(),
4423 path: path.to_proto(),
4424 skip: skip as u64,
4425 limit: limit.map(|l| l as u64),
4426 })
4427 .await?;
4428 Ok(git::repository::FileHistory {
4429 entries: response
4430 .entries
4431 .into_iter()
4432 .map(|entry| git::repository::FileHistoryEntry {
4433 sha: entry.sha.into(),
4434 subject: entry.subject.into(),
4435 message: entry.message.into(),
4436 commit_timestamp: entry.commit_timestamp,
4437 author_name: entry.author_name.into(),
4438 author_email: entry.author_email.into(),
4439 })
4440 .collect(),
4441 path: RepoPath::from_proto(&response.path)?,
4442 })
4443 }
4444 }
4445 })
4446 }
4447
4448 pub fn get_graph_data(
4449 &self,
4450 log_source: LogSource,
4451 log_order: LogOrder,
4452 ) -> Option<&InitialGitGraphData> {
4453 self.initial_graph_data.get(&(log_source, log_order))
4454 }
4455
4456 pub fn graph_data(
4457 &mut self,
4458 log_source: LogSource,
4459 log_order: LogOrder,
4460 range: Range<usize>,
4461 cx: &mut Context<Self>,
4462 ) -> GraphDataResponse<'_> {
4463 let initial_commit_data = self
4464 .initial_graph_data
4465 .entry((log_source.clone(), log_order))
4466 .or_insert_with(|| {
4467 let state = self.repository_state.clone();
4468 let log_source = log_source.clone();
4469
4470 let fetch_task = cx.spawn(async move |repository, cx| {
4471 let state = state.await;
4472 let result = match state {
4473 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4474 Self::local_git_graph_data(
4475 repository.clone(),
4476 backend,
4477 log_source.clone(),
4478 log_order,
4479 cx,
4480 )
4481 .await
4482 }
4483 Ok(RepositoryState::Remote(_)) => {
4484 Err("Git graph is not supported for collab yet".into())
4485 }
4486 Err(e) => Err(SharedString::from(e)),
4487 };
4488
4489 if let Err(fetch_task_error) = result {
4490 repository
4491 .update(cx, |repository, _| {
4492 if let Some(data) = repository
4493 .initial_graph_data
4494 .get_mut(&(log_source, log_order))
4495 {
4496 data.error = Some(fetch_task_error);
4497 } else {
4498 debug_panic!(
4499 "This task would be dropped if this entry doesn't exist"
4500 );
4501 }
4502 })
4503 .ok();
4504 }
4505 });
4506
4507 InitialGitGraphData {
4508 fetch_task,
4509 error: None,
4510 commit_data: Vec::new(),
4511 commit_oid_to_index: HashMap::default(),
4512 }
4513 });
4514
4515 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4516 let max_end = initial_commit_data.commit_data.len();
4517
4518 GraphDataResponse {
4519 commits: &initial_commit_data.commit_data
4520 [range.start.min(max_start)..range.end.min(max_end)],
4521 is_loading: !initial_commit_data.fetch_task.is_ready(),
4522 error: initial_commit_data.error.clone(),
4523 }
4524 }
4525
4526 async fn local_git_graph_data(
4527 this: WeakEntity<Self>,
4528 backend: Arc<dyn GitRepository>,
4529 log_source: LogSource,
4530 log_order: LogOrder,
4531 cx: &mut AsyncApp,
4532 ) -> Result<(), SharedString> {
4533 let (request_tx, request_rx) =
4534 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4535
4536 let task = cx.background_executor().spawn({
4537 let log_source = log_source.clone();
4538 async move {
4539 backend
4540 .initial_graph_data(log_source, log_order, request_tx)
4541 .await
4542 .map_err(|err| SharedString::from(err.to_string()))
4543 }
4544 });
4545
4546 let graph_data_key = (log_source, log_order);
4547
4548 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4549 this.update(cx, |repository, cx| {
4550 let graph_data = repository
4551 .initial_graph_data
4552 .entry(graph_data_key.clone())
4553 .and_modify(|graph_data| {
4554 for commit_data in initial_graph_commit_data {
4555 graph_data
4556 .commit_oid_to_index
4557 .insert(commit_data.sha, graph_data.commit_data.len());
4558 graph_data.commit_data.push(commit_data);
4559
4560 cx.emit(RepositoryEvent::GraphEvent(
4561 graph_data_key.clone(),
4562 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4563 ));
4564 }
4565 });
4566
4567 match &graph_data {
4568 Entry::Occupied(_) => {}
4569 Entry::Vacant(_) => {
4570 debug_panic!("This task should be dropped if data doesn't exist");
4571 }
4572 }
4573 })
4574 .ok();
4575 }
4576
4577 task.await?;
4578 Ok(())
4579 }
4580
4581 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4582 if !self.commit_data.contains_key(&sha) {
4583 match &self.graph_commit_data_handler {
4584 GraphCommitHandlerState::Open(handler) => {
4585 if handler.commit_data_request.try_send(sha).is_ok() {
4586 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4587 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4588 }
4589 }
4590 GraphCommitHandlerState::Closed => {
4591 self.open_graph_commit_data_handler(cx);
4592 }
4593 GraphCommitHandlerState::Starting => {}
4594 }
4595 }
4596
4597 self.commit_data
4598 .get(&sha)
4599 .unwrap_or(&CommitDataState::Loading)
4600 }
4601
4602 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4603 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4604
4605 let state = self.repository_state.clone();
4606 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4607 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4608
4609 let foreground_task = cx.spawn(async move |this, cx| {
4610 while let Ok((sha, commit_data)) = result_rx.recv().await {
4611 let result = this.update(cx, |this, cx| {
4612 let old_value = this
4613 .commit_data
4614 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4615 debug_assert!(
4616 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4617 "We should never overwrite commit data"
4618 );
4619
4620 cx.notify();
4621 });
4622 if result.is_err() {
4623 break;
4624 }
4625 }
4626
4627 this.update(cx, |this, _cx| {
4628 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4629 })
4630 .ok();
4631 });
4632
4633 let request_tx_for_handler = request_tx;
4634 let background_executor = cx.background_executor().clone();
4635
4636 cx.background_spawn(async move {
4637 let backend = match state.await {
4638 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4639 Ok(RepositoryState::Remote(_)) => {
4640 log::error!("commit_data_reader not supported for remote repositories");
4641 return;
4642 }
4643 Err(error) => {
4644 log::error!("failed to get repository state: {error}");
4645 return;
4646 }
4647 };
4648
4649 let reader = match backend.commit_data_reader() {
4650 Ok(reader) => reader,
4651 Err(error) => {
4652 log::error!("failed to create commit data reader: {error:?}");
4653 return;
4654 }
4655 };
4656
4657 loop {
4658 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4659
4660 futures::select_biased! {
4661 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4662 let Ok(sha) = sha else {
4663 break;
4664 };
4665
4666 match reader.read(sha).await {
4667 Ok(commit_data) => {
4668 if result_tx.send((sha, commit_data)).await.is_err() {
4669 break;
4670 }
4671 }
4672 Err(error) => {
4673 log::error!("failed to read commit data for {sha}: {error:?}");
4674 }
4675 }
4676 }
4677 _ = futures::FutureExt::fuse(timeout) => {
4678 break;
4679 }
4680 }
4681 }
4682
4683 drop(result_tx);
4684 })
4685 .detach();
4686
4687 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4688 _task: foreground_task,
4689 commit_data_request: request_tx_for_handler,
4690 });
4691 }
4692
4693 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4694 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4695 }
4696
4697 fn save_buffers<'a>(
4698 &self,
4699 entries: impl IntoIterator<Item = &'a RepoPath>,
4700 cx: &mut Context<Self>,
4701 ) -> Vec<Task<anyhow::Result<()>>> {
4702 let mut save_futures = Vec::new();
4703 if let Some(buffer_store) = self.buffer_store(cx) {
4704 buffer_store.update(cx, |buffer_store, cx| {
4705 for path in entries {
4706 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4707 continue;
4708 };
4709 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4710 && buffer
4711 .read(cx)
4712 .file()
4713 .is_some_and(|file| file.disk_state().exists())
4714 && buffer.read(cx).has_unsaved_edits()
4715 {
4716 save_futures.push(buffer_store.save_buffer(buffer, cx));
4717 }
4718 }
4719 })
4720 }
4721 save_futures
4722 }
4723
4724 pub fn stage_entries(
4725 &mut self,
4726 entries: Vec<RepoPath>,
4727 cx: &mut Context<Self>,
4728 ) -> Task<anyhow::Result<()>> {
4729 self.stage_or_unstage_entries(true, entries, cx)
4730 }
4731
4732 pub fn unstage_entries(
4733 &mut self,
4734 entries: Vec<RepoPath>,
4735 cx: &mut Context<Self>,
4736 ) -> Task<anyhow::Result<()>> {
4737 self.stage_or_unstage_entries(false, entries, cx)
4738 }
4739
4740 fn stage_or_unstage_entries(
4741 &mut self,
4742 stage: bool,
4743 entries: Vec<RepoPath>,
4744 cx: &mut Context<Self>,
4745 ) -> Task<anyhow::Result<()>> {
4746 if entries.is_empty() {
4747 return Task::ready(Ok(()));
4748 }
4749 let Some(git_store) = self.git_store.upgrade() else {
4750 return Task::ready(Ok(()));
4751 };
4752 let id = self.id;
4753 let save_tasks = self.save_buffers(&entries, cx);
4754 let paths = entries
4755 .iter()
4756 .map(|p| p.as_unix_str())
4757 .collect::<Vec<_>>()
4758 .join(" ");
4759 let status = if stage {
4760 format!("git add {paths}")
4761 } else {
4762 format!("git reset {paths}")
4763 };
4764 let job_key = GitJobKey::WriteIndex(entries.clone());
4765
4766 self.spawn_job_with_tracking(
4767 entries.clone(),
4768 if stage {
4769 pending_op::GitStatus::Staged
4770 } else {
4771 pending_op::GitStatus::Unstaged
4772 },
4773 cx,
4774 async move |this, cx| {
4775 for save_task in save_tasks {
4776 save_task.await?;
4777 }
4778
4779 this.update(cx, |this, cx| {
4780 let weak_this = cx.weak_entity();
4781 this.send_keyed_job(
4782 Some(job_key),
4783 Some(status.into()),
4784 move |git_repo, mut cx| async move {
4785 let hunk_staging_operation_counts = weak_this
4786 .update(&mut cx, |this, cx| {
4787 let mut hunk_staging_operation_counts = HashMap::default();
4788 for path in &entries {
4789 let Some(project_path) =
4790 this.repo_path_to_project_path(path, cx)
4791 else {
4792 continue;
4793 };
4794 let Some(buffer) = git_store
4795 .read(cx)
4796 .buffer_store
4797 .read(cx)
4798 .get_by_path(&project_path)
4799 else {
4800 continue;
4801 };
4802 let Some(diff_state) = git_store
4803 .read(cx)
4804 .diffs
4805 .get(&buffer.read(cx).remote_id())
4806 .cloned()
4807 else {
4808 continue;
4809 };
4810 let Some(uncommitted_diff) =
4811 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4812 |uncommitted_diff| uncommitted_diff.upgrade(),
4813 )
4814 else {
4815 continue;
4816 };
4817 let buffer_snapshot = buffer.read(cx).text_snapshot();
4818 let file_exists = buffer
4819 .read(cx)
4820 .file()
4821 .is_some_and(|file| file.disk_state().exists());
4822 let hunk_staging_operation_count =
4823 diff_state.update(cx, |diff_state, cx| {
4824 uncommitted_diff.update(
4825 cx,
4826 |uncommitted_diff, cx| {
4827 uncommitted_diff
4828 .stage_or_unstage_all_hunks(
4829 stage,
4830 &buffer_snapshot,
4831 file_exists,
4832 cx,
4833 );
4834 },
4835 );
4836
4837 diff_state.hunk_staging_operation_count += 1;
4838 diff_state.hunk_staging_operation_count
4839 });
4840 hunk_staging_operation_counts.insert(
4841 diff_state.downgrade(),
4842 hunk_staging_operation_count,
4843 );
4844 }
4845 hunk_staging_operation_counts
4846 })
4847 .unwrap_or_default();
4848
4849 let result = match git_repo {
4850 RepositoryState::Local(LocalRepositoryState {
4851 backend,
4852 environment,
4853 ..
4854 }) => {
4855 if stage {
4856 backend.stage_paths(entries, environment.clone()).await
4857 } else {
4858 backend.unstage_paths(entries, environment.clone()).await
4859 }
4860 }
4861 RepositoryState::Remote(RemoteRepositoryState {
4862 project_id,
4863 client,
4864 }) => {
4865 if stage {
4866 client
4867 .request(proto::Stage {
4868 project_id: project_id.0,
4869 repository_id: id.to_proto(),
4870 paths: entries
4871 .into_iter()
4872 .map(|repo_path| repo_path.to_proto())
4873 .collect(),
4874 })
4875 .await
4876 .context("sending stage request")
4877 .map(|_| ())
4878 } else {
4879 client
4880 .request(proto::Unstage {
4881 project_id: project_id.0,
4882 repository_id: id.to_proto(),
4883 paths: entries
4884 .into_iter()
4885 .map(|repo_path| repo_path.to_proto())
4886 .collect(),
4887 })
4888 .await
4889 .context("sending unstage request")
4890 .map(|_| ())
4891 }
4892 }
4893 };
4894
4895 for (diff_state, hunk_staging_operation_count) in
4896 hunk_staging_operation_counts
4897 {
4898 diff_state
4899 .update(&mut cx, |diff_state, cx| {
4900 if result.is_ok() {
4901 diff_state.hunk_staging_operation_count_as_of_write =
4902 hunk_staging_operation_count;
4903 } else if let Some(uncommitted_diff) =
4904 &diff_state.uncommitted_diff
4905 {
4906 uncommitted_diff
4907 .update(cx, |uncommitted_diff, cx| {
4908 uncommitted_diff.clear_pending_hunks(cx);
4909 })
4910 .ok();
4911 }
4912 })
4913 .ok();
4914 }
4915
4916 result
4917 },
4918 )
4919 })?
4920 .await?
4921 },
4922 )
4923 }
4924
4925 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4926 let to_stage = self
4927 .cached_status()
4928 .filter_map(|entry| {
4929 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4930 if ops.staging() || ops.staged() {
4931 None
4932 } else {
4933 Some(entry.repo_path)
4934 }
4935 } else if entry.status.staging().is_fully_staged() {
4936 None
4937 } else {
4938 Some(entry.repo_path)
4939 }
4940 })
4941 .collect();
4942 self.stage_or_unstage_entries(true, to_stage, cx)
4943 }
4944
4945 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4946 let to_unstage = self
4947 .cached_status()
4948 .filter_map(|entry| {
4949 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4950 if !ops.staging() && !ops.staged() {
4951 None
4952 } else {
4953 Some(entry.repo_path)
4954 }
4955 } else if entry.status.staging().is_fully_unstaged() {
4956 None
4957 } else {
4958 Some(entry.repo_path)
4959 }
4960 })
4961 .collect();
4962 self.stage_or_unstage_entries(false, to_unstage, cx)
4963 }
4964
4965 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4966 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4967
4968 self.stash_entries(to_stash, cx)
4969 }
4970
4971 pub fn stash_entries(
4972 &mut self,
4973 entries: Vec<RepoPath>,
4974 cx: &mut Context<Self>,
4975 ) -> Task<anyhow::Result<()>> {
4976 let id = self.id;
4977
4978 cx.spawn(async move |this, cx| {
4979 this.update(cx, |this, _| {
4980 this.send_job(None, move |git_repo, _cx| async move {
4981 match git_repo {
4982 RepositoryState::Local(LocalRepositoryState {
4983 backend,
4984 environment,
4985 ..
4986 }) => backend.stash_paths(entries, environment).await,
4987 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4988 client
4989 .request(proto::Stash {
4990 project_id: project_id.0,
4991 repository_id: id.to_proto(),
4992 paths: entries
4993 .into_iter()
4994 .map(|repo_path| repo_path.to_proto())
4995 .collect(),
4996 })
4997 .await?;
4998 Ok(())
4999 }
5000 }
5001 })
5002 })?
5003 .await??;
5004 Ok(())
5005 })
5006 }
5007
5008 pub fn stash_pop(
5009 &mut self,
5010 index: Option<usize>,
5011 cx: &mut Context<Self>,
5012 ) -> Task<anyhow::Result<()>> {
5013 let id = self.id;
5014 cx.spawn(async move |this, cx| {
5015 this.update(cx, |this, _| {
5016 this.send_job(None, move |git_repo, _cx| async move {
5017 match git_repo {
5018 RepositoryState::Local(LocalRepositoryState {
5019 backend,
5020 environment,
5021 ..
5022 }) => backend.stash_pop(index, environment).await,
5023 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5024 client
5025 .request(proto::StashPop {
5026 project_id: project_id.0,
5027 repository_id: id.to_proto(),
5028 stash_index: index.map(|i| i as u64),
5029 })
5030 .await
5031 .context("sending stash pop request")?;
5032 Ok(())
5033 }
5034 }
5035 })
5036 })?
5037 .await??;
5038 Ok(())
5039 })
5040 }
5041
5042 pub fn stash_apply(
5043 &mut self,
5044 index: Option<usize>,
5045 cx: &mut Context<Self>,
5046 ) -> Task<anyhow::Result<()>> {
5047 let id = self.id;
5048 cx.spawn(async move |this, cx| {
5049 this.update(cx, |this, _| {
5050 this.send_job(None, move |git_repo, _cx| async move {
5051 match git_repo {
5052 RepositoryState::Local(LocalRepositoryState {
5053 backend,
5054 environment,
5055 ..
5056 }) => backend.stash_apply(index, environment).await,
5057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5058 client
5059 .request(proto::StashApply {
5060 project_id: project_id.0,
5061 repository_id: id.to_proto(),
5062 stash_index: index.map(|i| i as u64),
5063 })
5064 .await
5065 .context("sending stash apply request")?;
5066 Ok(())
5067 }
5068 }
5069 })
5070 })?
5071 .await??;
5072 Ok(())
5073 })
5074 }
5075
5076 pub fn stash_drop(
5077 &mut self,
5078 index: Option<usize>,
5079 cx: &mut Context<Self>,
5080 ) -> oneshot::Receiver<anyhow::Result<()>> {
5081 let id = self.id;
5082 let updates_tx = self
5083 .git_store()
5084 .and_then(|git_store| match &git_store.read(cx).state {
5085 GitStoreState::Local { downstream, .. } => downstream
5086 .as_ref()
5087 .map(|downstream| downstream.updates_tx.clone()),
5088 _ => None,
5089 });
5090 let this = cx.weak_entity();
5091 self.send_job(None, move |git_repo, mut cx| async move {
5092 match git_repo {
5093 RepositoryState::Local(LocalRepositoryState {
5094 backend,
5095 environment,
5096 ..
5097 }) => {
5098 // TODO would be nice to not have to do this manually
5099 let result = backend.stash_drop(index, environment).await;
5100 if result.is_ok()
5101 && let Ok(stash_entries) = backend.stash_entries().await
5102 {
5103 let snapshot = this.update(&mut cx, |this, cx| {
5104 this.snapshot.stash_entries = stash_entries;
5105 cx.emit(RepositoryEvent::StashEntriesChanged);
5106 this.snapshot.clone()
5107 })?;
5108 if let Some(updates_tx) = updates_tx {
5109 updates_tx
5110 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5111 .ok();
5112 }
5113 }
5114
5115 result
5116 }
5117 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5118 client
5119 .request(proto::StashDrop {
5120 project_id: project_id.0,
5121 repository_id: id.to_proto(),
5122 stash_index: index.map(|i| i as u64),
5123 })
5124 .await
5125 .context("sending stash pop request")?;
5126 Ok(())
5127 }
5128 }
5129 })
5130 }
5131
5132 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5133 let id = self.id;
5134 self.send_job(
5135 Some(format!("git hook {}", hook.as_str()).into()),
5136 move |git_repo, _cx| async move {
5137 match git_repo {
5138 RepositoryState::Local(LocalRepositoryState {
5139 backend,
5140 environment,
5141 ..
5142 }) => backend.run_hook(hook, environment.clone()).await,
5143 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5144 client
5145 .request(proto::RunGitHook {
5146 project_id: project_id.0,
5147 repository_id: id.to_proto(),
5148 hook: hook.to_proto(),
5149 })
5150 .await?;
5151
5152 Ok(())
5153 }
5154 }
5155 },
5156 )
5157 }
5158
5159 pub fn commit(
5160 &mut self,
5161 message: SharedString,
5162 name_and_email: Option<(SharedString, SharedString)>,
5163 options: CommitOptions,
5164 askpass: AskPassDelegate,
5165 cx: &mut App,
5166 ) -> oneshot::Receiver<Result<()>> {
5167 let id = self.id;
5168 let askpass_delegates = self.askpass_delegates.clone();
5169 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5170
5171 let rx = self.run_hook(RunHook::PreCommit, cx);
5172
5173 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5174 rx.await??;
5175
5176 match git_repo {
5177 RepositoryState::Local(LocalRepositoryState {
5178 backend,
5179 environment,
5180 ..
5181 }) => {
5182 backend
5183 .commit(message, name_and_email, options, askpass, environment)
5184 .await
5185 }
5186 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5187 askpass_delegates.lock().insert(askpass_id, askpass);
5188 let _defer = util::defer(|| {
5189 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5190 debug_assert!(askpass_delegate.is_some());
5191 });
5192 let (name, email) = name_and_email.unzip();
5193 client
5194 .request(proto::Commit {
5195 project_id: project_id.0,
5196 repository_id: id.to_proto(),
5197 message: String::from(message),
5198 name: name.map(String::from),
5199 email: email.map(String::from),
5200 options: Some(proto::commit::CommitOptions {
5201 amend: options.amend,
5202 signoff: options.signoff,
5203 }),
5204 askpass_id,
5205 })
5206 .await?;
5207
5208 Ok(())
5209 }
5210 }
5211 })
5212 }
5213
5214 pub fn fetch(
5215 &mut self,
5216 fetch_options: FetchOptions,
5217 askpass: AskPassDelegate,
5218 _cx: &mut App,
5219 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5220 let askpass_delegates = self.askpass_delegates.clone();
5221 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5222 let id = self.id;
5223
5224 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5225 match git_repo {
5226 RepositoryState::Local(LocalRepositoryState {
5227 backend,
5228 environment,
5229 ..
5230 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5231 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5232 askpass_delegates.lock().insert(askpass_id, askpass);
5233 let _defer = util::defer(|| {
5234 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5235 debug_assert!(askpass_delegate.is_some());
5236 });
5237
5238 let response = client
5239 .request(proto::Fetch {
5240 project_id: project_id.0,
5241 repository_id: id.to_proto(),
5242 askpass_id,
5243 remote: fetch_options.to_proto(),
5244 })
5245 .await?;
5246
5247 Ok(RemoteCommandOutput {
5248 stdout: response.stdout,
5249 stderr: response.stderr,
5250 })
5251 }
5252 }
5253 })
5254 }
5255
5256 pub fn push(
5257 &mut self,
5258 branch: SharedString,
5259 remote_branch: SharedString,
5260 remote: SharedString,
5261 options: Option<PushOptions>,
5262 askpass: AskPassDelegate,
5263 cx: &mut Context<Self>,
5264 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5265 let askpass_delegates = self.askpass_delegates.clone();
5266 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5267 let id = self.id;
5268
5269 let args = options
5270 .map(|option| match option {
5271 PushOptions::SetUpstream => " --set-upstream",
5272 PushOptions::Force => " --force-with-lease",
5273 })
5274 .unwrap_or("");
5275
5276 let updates_tx = self
5277 .git_store()
5278 .and_then(|git_store| match &git_store.read(cx).state {
5279 GitStoreState::Local { downstream, .. } => downstream
5280 .as_ref()
5281 .map(|downstream| downstream.updates_tx.clone()),
5282 _ => None,
5283 });
5284
5285 let this = cx.weak_entity();
5286 self.send_job(
5287 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5288 move |git_repo, mut cx| async move {
5289 match git_repo {
5290 RepositoryState::Local(LocalRepositoryState {
5291 backend,
5292 environment,
5293 ..
5294 }) => {
5295 let result = backend
5296 .push(
5297 branch.to_string(),
5298 remote_branch.to_string(),
5299 remote.to_string(),
5300 options,
5301 askpass,
5302 environment.clone(),
5303 cx.clone(),
5304 )
5305 .await;
5306 // TODO would be nice to not have to do this manually
5307 if result.is_ok() {
5308 let branches = backend.branches().await?;
5309 let branch = branches.into_iter().find(|branch| branch.is_head);
5310 log::info!("head branch after scan is {branch:?}");
5311 let snapshot = this.update(&mut cx, |this, cx| {
5312 this.snapshot.branch = branch;
5313 cx.emit(RepositoryEvent::BranchChanged);
5314 this.snapshot.clone()
5315 })?;
5316 if let Some(updates_tx) = updates_tx {
5317 updates_tx
5318 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5319 .ok();
5320 }
5321 }
5322 result
5323 }
5324 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5325 askpass_delegates.lock().insert(askpass_id, askpass);
5326 let _defer = util::defer(|| {
5327 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5328 debug_assert!(askpass_delegate.is_some());
5329 });
5330 let response = client
5331 .request(proto::Push {
5332 project_id: project_id.0,
5333 repository_id: id.to_proto(),
5334 askpass_id,
5335 branch_name: branch.to_string(),
5336 remote_branch_name: remote_branch.to_string(),
5337 remote_name: remote.to_string(),
5338 options: options.map(|options| match options {
5339 PushOptions::Force => proto::push::PushOptions::Force,
5340 PushOptions::SetUpstream => {
5341 proto::push::PushOptions::SetUpstream
5342 }
5343 }
5344 as i32),
5345 })
5346 .await?;
5347
5348 Ok(RemoteCommandOutput {
5349 stdout: response.stdout,
5350 stderr: response.stderr,
5351 })
5352 }
5353 }
5354 },
5355 )
5356 }
5357
5358 pub fn pull(
5359 &mut self,
5360 branch: Option<SharedString>,
5361 remote: SharedString,
5362 rebase: bool,
5363 askpass: AskPassDelegate,
5364 _cx: &mut App,
5365 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5366 let askpass_delegates = self.askpass_delegates.clone();
5367 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5368 let id = self.id;
5369
5370 let mut status = "git pull".to_string();
5371 if rebase {
5372 status.push_str(" --rebase");
5373 }
5374 status.push_str(&format!(" {}", remote));
5375 if let Some(b) = &branch {
5376 status.push_str(&format!(" {}", b));
5377 }
5378
5379 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5380 match git_repo {
5381 RepositoryState::Local(LocalRepositoryState {
5382 backend,
5383 environment,
5384 ..
5385 }) => {
5386 backend
5387 .pull(
5388 branch.as_ref().map(|b| b.to_string()),
5389 remote.to_string(),
5390 rebase,
5391 askpass,
5392 environment.clone(),
5393 cx,
5394 )
5395 .await
5396 }
5397 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5398 askpass_delegates.lock().insert(askpass_id, askpass);
5399 let _defer = util::defer(|| {
5400 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5401 debug_assert!(askpass_delegate.is_some());
5402 });
5403 let response = client
5404 .request(proto::Pull {
5405 project_id: project_id.0,
5406 repository_id: id.to_proto(),
5407 askpass_id,
5408 rebase,
5409 branch_name: branch.as_ref().map(|b| b.to_string()),
5410 remote_name: remote.to_string(),
5411 })
5412 .await?;
5413
5414 Ok(RemoteCommandOutput {
5415 stdout: response.stdout,
5416 stderr: response.stderr,
5417 })
5418 }
5419 }
5420 })
5421 }
5422
5423 fn spawn_set_index_text_job(
5424 &mut self,
5425 path: RepoPath,
5426 content: Option<String>,
5427 hunk_staging_operation_count: Option<usize>,
5428 cx: &mut Context<Self>,
5429 ) -> oneshot::Receiver<anyhow::Result<()>> {
5430 let id = self.id;
5431 let this = cx.weak_entity();
5432 let git_store = self.git_store.clone();
5433 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5434 self.send_keyed_job(
5435 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5436 None,
5437 move |git_repo, mut cx| async move {
5438 log::debug!(
5439 "start updating index text for buffer {}",
5440 path.as_unix_str()
5441 );
5442
5443 match git_repo {
5444 RepositoryState::Local(LocalRepositoryState {
5445 fs,
5446 backend,
5447 environment,
5448 ..
5449 }) => {
5450 let executable = match fs.metadata(&abs_path).await {
5451 Ok(Some(meta)) => meta.is_executable,
5452 Ok(None) => false,
5453 Err(_err) => false,
5454 };
5455 backend
5456 .set_index_text(path.clone(), content, environment.clone(), executable)
5457 .await?;
5458 }
5459 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5460 client
5461 .request(proto::SetIndexText {
5462 project_id: project_id.0,
5463 repository_id: id.to_proto(),
5464 path: path.to_proto(),
5465 text: content,
5466 })
5467 .await?;
5468 }
5469 }
5470 log::debug!(
5471 "finish updating index text for buffer {}",
5472 path.as_unix_str()
5473 );
5474
5475 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5476 let project_path = this
5477 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5478 .ok()
5479 .flatten();
5480 git_store
5481 .update(&mut cx, |git_store, cx| {
5482 let buffer_id = git_store
5483 .buffer_store
5484 .read(cx)
5485 .get_by_path(&project_path?)?
5486 .read(cx)
5487 .remote_id();
5488 let diff_state = git_store.diffs.get(&buffer_id)?;
5489 diff_state.update(cx, |diff_state, _| {
5490 diff_state.hunk_staging_operation_count_as_of_write =
5491 hunk_staging_operation_count;
5492 });
5493 Some(())
5494 })
5495 .context("Git store dropped")?;
5496 }
5497 Ok(())
5498 },
5499 )
5500 }
5501
5502 pub fn create_remote(
5503 &mut self,
5504 remote_name: String,
5505 remote_url: String,
5506 ) -> oneshot::Receiver<Result<()>> {
5507 let id = self.id;
5508 self.send_job(
5509 Some(format!("git remote add {remote_name} {remote_url}").into()),
5510 move |repo, _cx| async move {
5511 match repo {
5512 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5513 backend.create_remote(remote_name, remote_url).await
5514 }
5515 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5516 client
5517 .request(proto::GitCreateRemote {
5518 project_id: project_id.0,
5519 repository_id: id.to_proto(),
5520 remote_name,
5521 remote_url,
5522 })
5523 .await?;
5524
5525 Ok(())
5526 }
5527 }
5528 },
5529 )
5530 }
5531
5532 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5533 let id = self.id;
5534 self.send_job(
5535 Some(format!("git remove remote {remote_name}").into()),
5536 move |repo, _cx| async move {
5537 match repo {
5538 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5539 backend.remove_remote(remote_name).await
5540 }
5541 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5542 client
5543 .request(proto::GitRemoveRemote {
5544 project_id: project_id.0,
5545 repository_id: id.to_proto(),
5546 remote_name,
5547 })
5548 .await?;
5549
5550 Ok(())
5551 }
5552 }
5553 },
5554 )
5555 }
5556
5557 pub fn get_remotes(
5558 &mut self,
5559 branch_name: Option<String>,
5560 is_push: bool,
5561 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5562 let id = self.id;
5563 self.send_job(None, move |repo, _cx| async move {
5564 match repo {
5565 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5566 let remote = if let Some(branch_name) = branch_name {
5567 if is_push {
5568 backend.get_push_remote(branch_name).await?
5569 } else {
5570 backend.get_branch_remote(branch_name).await?
5571 }
5572 } else {
5573 None
5574 };
5575
5576 match remote {
5577 Some(remote) => Ok(vec![remote]),
5578 None => backend.get_all_remotes().await,
5579 }
5580 }
5581 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5582 let response = client
5583 .request(proto::GetRemotes {
5584 project_id: project_id.0,
5585 repository_id: id.to_proto(),
5586 branch_name,
5587 is_push,
5588 })
5589 .await?;
5590
5591 let remotes = response
5592 .remotes
5593 .into_iter()
5594 .map(|remotes| Remote {
5595 name: remotes.name.into(),
5596 })
5597 .collect();
5598
5599 Ok(remotes)
5600 }
5601 }
5602 })
5603 }
5604
5605 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5606 let id = self.id;
5607 self.send_job(None, move |repo, _| async move {
5608 match repo {
5609 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5610 backend.branches().await
5611 }
5612 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5613 let response = client
5614 .request(proto::GitGetBranches {
5615 project_id: project_id.0,
5616 repository_id: id.to_proto(),
5617 })
5618 .await?;
5619
5620 let branches = response
5621 .branches
5622 .into_iter()
5623 .map(|branch| proto_to_branch(&branch))
5624 .collect();
5625
5626 Ok(branches)
5627 }
5628 }
5629 })
5630 }
5631
5632 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5633 let id = self.id;
5634 self.send_job(None, move |repo, _| async move {
5635 match repo {
5636 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5637 backend.worktrees().await
5638 }
5639 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5640 let response = client
5641 .request(proto::GitGetWorktrees {
5642 project_id: project_id.0,
5643 repository_id: id.to_proto(),
5644 })
5645 .await?;
5646
5647 let worktrees = response
5648 .worktrees
5649 .into_iter()
5650 .map(|worktree| proto_to_worktree(&worktree))
5651 .collect();
5652
5653 Ok(worktrees)
5654 }
5655 }
5656 })
5657 }
5658
5659 pub fn create_worktree(
5660 &mut self,
5661 name: String,
5662 directory: PathBuf,
5663 commit: Option<String>,
5664 ) -> oneshot::Receiver<Result<()>> {
5665 let id = self.id;
5666 self.send_job(
5667 Some("git worktree add".into()),
5668 move |repo, _cx| async move {
5669 match repo {
5670 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5671 backend.create_worktree(name, directory, commit).await
5672 }
5673 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5674 client
5675 .request(proto::GitCreateWorktree {
5676 project_id: project_id.0,
5677 repository_id: id.to_proto(),
5678 name,
5679 directory: directory.to_string_lossy().to_string(),
5680 commit,
5681 })
5682 .await?;
5683
5684 Ok(())
5685 }
5686 }
5687 },
5688 )
5689 }
5690
5691 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5692 self.send_job(
5693 Some("git worktree remove".into()),
5694 move |repo, _cx| async move {
5695 match repo {
5696 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5697 backend.remove_worktree(path, force).await
5698 }
5699 RepositoryState::Remote(_) => {
5700 anyhow::bail!(
5701 "Removing worktrees on remote repositories is not yet supported"
5702 )
5703 }
5704 }
5705 },
5706 )
5707 }
5708
5709 pub fn default_branch(
5710 &mut self,
5711 include_remote_name: bool,
5712 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5713 let id = self.id;
5714 self.send_job(None, move |repo, _| async move {
5715 match repo {
5716 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5717 backend.default_branch(include_remote_name).await
5718 }
5719 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5720 let response = client
5721 .request(proto::GetDefaultBranch {
5722 project_id: project_id.0,
5723 repository_id: id.to_proto(),
5724 })
5725 .await?;
5726
5727 anyhow::Ok(response.branch.map(SharedString::from))
5728 }
5729 }
5730 })
5731 }
5732
5733 pub fn diff_tree(
5734 &mut self,
5735 diff_type: DiffTreeType,
5736 _cx: &App,
5737 ) -> oneshot::Receiver<Result<TreeDiff>> {
5738 let repository_id = self.snapshot.id;
5739 self.send_job(None, move |repo, _cx| async move {
5740 match repo {
5741 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5742 backend.diff_tree(diff_type).await
5743 }
5744 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5745 let response = client
5746 .request(proto::GetTreeDiff {
5747 project_id: project_id.0,
5748 repository_id: repository_id.0,
5749 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5750 base: diff_type.base().to_string(),
5751 head: diff_type.head().to_string(),
5752 })
5753 .await?;
5754
5755 let entries = response
5756 .entries
5757 .into_iter()
5758 .filter_map(|entry| {
5759 let status = match entry.status() {
5760 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5761 proto::tree_diff_status::Status::Modified => {
5762 TreeDiffStatus::Modified {
5763 old: git::Oid::from_str(
5764 &entry.oid.context("missing oid").log_err()?,
5765 )
5766 .log_err()?,
5767 }
5768 }
5769 proto::tree_diff_status::Status::Deleted => {
5770 TreeDiffStatus::Deleted {
5771 old: git::Oid::from_str(
5772 &entry.oid.context("missing oid").log_err()?,
5773 )
5774 .log_err()?,
5775 }
5776 }
5777 };
5778 Some((
5779 RepoPath::from_rel_path(
5780 &RelPath::from_proto(&entry.path).log_err()?,
5781 ),
5782 status,
5783 ))
5784 })
5785 .collect();
5786
5787 Ok(TreeDiff { entries })
5788 }
5789 }
5790 })
5791 }
5792
5793 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5794 let id = self.id;
5795 self.send_job(None, move |repo, _cx| async move {
5796 match repo {
5797 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5798 backend.diff(diff_type).await
5799 }
5800 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5801 let (proto_diff_type, merge_base_ref) = match &diff_type {
5802 DiffType::HeadToIndex => {
5803 (proto::git_diff::DiffType::HeadToIndex.into(), None)
5804 }
5805 DiffType::HeadToWorktree => {
5806 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
5807 }
5808 DiffType::MergeBase { base_ref } => (
5809 proto::git_diff::DiffType::MergeBase.into(),
5810 Some(base_ref.to_string()),
5811 ),
5812 };
5813 let response = client
5814 .request(proto::GitDiff {
5815 project_id: project_id.0,
5816 repository_id: id.to_proto(),
5817 diff_type: proto_diff_type,
5818 merge_base_ref,
5819 })
5820 .await?;
5821
5822 Ok(response.diff)
5823 }
5824 }
5825 })
5826 }
5827
5828 /// Fetches per-line diff statistics (additions/deletions) via `git diff --numstat`.
5829 pub fn diff_stat(
5830 &mut self,
5831 diff_type: DiffType,
5832 _cx: &App,
5833 ) -> oneshot::Receiver<
5834 Result<collections::HashMap<git::repository::RepoPath, git::status::DiffStat>>,
5835 > {
5836 let id = self.id;
5837 self.send_job(None, move |repo, _cx| async move {
5838 match repo {
5839 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5840 backend.diff_stat(diff_type).await
5841 }
5842 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5843 let (proto_diff_type, merge_base_ref) = match &diff_type {
5844 DiffType::HeadToIndex => {
5845 (proto::git_diff_stat::DiffType::HeadToIndex.into(), None)
5846 }
5847 DiffType::HeadToWorktree => {
5848 (proto::git_diff_stat::DiffType::HeadToWorktree.into(), None)
5849 }
5850 DiffType::MergeBase { base_ref } => (
5851 proto::git_diff_stat::DiffType::MergeBase.into(),
5852 Some(base_ref.to_string()),
5853 ),
5854 };
5855 let response = client
5856 .request(proto::GitDiffStat {
5857 project_id: project_id.0,
5858 repository_id: id.to_proto(),
5859 diff_type: proto_diff_type,
5860 merge_base_ref,
5861 })
5862 .await?;
5863
5864 let stats = response
5865 .entries
5866 .into_iter()
5867 .filter_map(|entry| {
5868 let path = RepoPath::from_proto(&entry.path).log_err()?;
5869 Some((
5870 path,
5871 git::status::DiffStat {
5872 added: entry.added,
5873 deleted: entry.deleted,
5874 },
5875 ))
5876 })
5877 .collect();
5878
5879 Ok(stats)
5880 }
5881 }
5882 })
5883 }
5884
5885 pub fn create_branch(
5886 &mut self,
5887 branch_name: String,
5888 base_branch: Option<String>,
5889 ) -> oneshot::Receiver<Result<()>> {
5890 let id = self.id;
5891 let status_msg = if let Some(ref base) = base_branch {
5892 format!("git switch -c {branch_name} {base}").into()
5893 } else {
5894 format!("git switch -c {branch_name}").into()
5895 };
5896 self.send_job(Some(status_msg), move |repo, _cx| async move {
5897 match repo {
5898 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5899 backend.create_branch(branch_name, base_branch).await
5900 }
5901 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5902 client
5903 .request(proto::GitCreateBranch {
5904 project_id: project_id.0,
5905 repository_id: id.to_proto(),
5906 branch_name,
5907 })
5908 .await?;
5909
5910 Ok(())
5911 }
5912 }
5913 })
5914 }
5915
5916 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5917 let id = self.id;
5918 self.send_job(
5919 Some(format!("git switch {branch_name}").into()),
5920 move |repo, _cx| async move {
5921 match repo {
5922 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5923 backend.change_branch(branch_name).await
5924 }
5925 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5926 client
5927 .request(proto::GitChangeBranch {
5928 project_id: project_id.0,
5929 repository_id: id.to_proto(),
5930 branch_name,
5931 })
5932 .await?;
5933
5934 Ok(())
5935 }
5936 }
5937 },
5938 )
5939 }
5940
5941 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5942 let id = self.id;
5943 self.send_job(
5944 Some(format!("git branch -d {branch_name}").into()),
5945 move |repo, _cx| async move {
5946 match repo {
5947 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5948 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5949 client
5950 .request(proto::GitDeleteBranch {
5951 project_id: project_id.0,
5952 repository_id: id.to_proto(),
5953 branch_name,
5954 })
5955 .await?;
5956
5957 Ok(())
5958 }
5959 }
5960 },
5961 )
5962 }
5963
5964 pub fn rename_branch(
5965 &mut self,
5966 branch: String,
5967 new_name: String,
5968 ) -> oneshot::Receiver<Result<()>> {
5969 let id = self.id;
5970 self.send_job(
5971 Some(format!("git branch -m {branch} {new_name}").into()),
5972 move |repo, _cx| async move {
5973 match repo {
5974 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5975 backend.rename_branch(branch, new_name).await
5976 }
5977 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5978 client
5979 .request(proto::GitRenameBranch {
5980 project_id: project_id.0,
5981 repository_id: id.to_proto(),
5982 branch,
5983 new_name,
5984 })
5985 .await?;
5986
5987 Ok(())
5988 }
5989 }
5990 },
5991 )
5992 }
5993
5994 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5995 let id = self.id;
5996 self.send_job(None, move |repo, _cx| async move {
5997 match repo {
5998 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5999 backend.check_for_pushed_commit().await
6000 }
6001 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6002 let response = client
6003 .request(proto::CheckForPushedCommits {
6004 project_id: project_id.0,
6005 repository_id: id.to_proto(),
6006 })
6007 .await?;
6008
6009 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6010
6011 Ok(branches)
6012 }
6013 }
6014 })
6015 }
6016
6017 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6018 self.send_job(None, |repo, _cx| async move {
6019 match repo {
6020 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6021 backend.checkpoint().await
6022 }
6023 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6024 }
6025 })
6026 }
6027
6028 pub fn restore_checkpoint(
6029 &mut self,
6030 checkpoint: GitRepositoryCheckpoint,
6031 ) -> oneshot::Receiver<Result<()>> {
6032 self.send_job(None, move |repo, _cx| async move {
6033 match repo {
6034 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6035 backend.restore_checkpoint(checkpoint).await
6036 }
6037 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6038 }
6039 })
6040 }
6041
6042 pub(crate) fn apply_remote_update(
6043 &mut self,
6044 update: proto::UpdateRepository,
6045 cx: &mut Context<Self>,
6046 ) -> Result<()> {
6047 if let Some(main_path) = &update.original_repo_abs_path {
6048 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6049 }
6050
6051 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6052 let new_head_commit = update
6053 .head_commit_details
6054 .as_ref()
6055 .map(proto_to_commit_details);
6056 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6057 cx.emit(RepositoryEvent::BranchChanged)
6058 }
6059 self.snapshot.branch = new_branch;
6060 self.snapshot.head_commit = new_head_commit;
6061
6062 // We don't store any merge head state for downstream projects; the upstream
6063 // will track it and we will just get the updated conflicts
6064 let new_merge_heads = TreeMap::from_ordered_entries(
6065 update
6066 .current_merge_conflicts
6067 .into_iter()
6068 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6069 );
6070 let conflicts_changed =
6071 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6072 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6073 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6074 let new_stash_entries = GitStash {
6075 entries: update
6076 .stash_entries
6077 .iter()
6078 .filter_map(|entry| proto_to_stash(entry).ok())
6079 .collect(),
6080 };
6081 if self.snapshot.stash_entries != new_stash_entries {
6082 cx.emit(RepositoryEvent::StashEntriesChanged)
6083 }
6084 self.snapshot.stash_entries = new_stash_entries;
6085 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6086 self.snapshot.remote_origin_url = update.remote_origin_url;
6087
6088 let edits = update
6089 .removed_statuses
6090 .into_iter()
6091 .filter_map(|path| {
6092 Some(sum_tree::Edit::Remove(PathKey(
6093 RelPath::from_proto(&path).log_err()?,
6094 )))
6095 })
6096 .chain(
6097 update
6098 .updated_statuses
6099 .into_iter()
6100 .filter_map(|updated_status| {
6101 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6102 }),
6103 )
6104 .collect::<Vec<_>>();
6105 if conflicts_changed || !edits.is_empty() {
6106 cx.emit(RepositoryEvent::StatusesChanged);
6107 }
6108 self.snapshot.statuses_by_path.edit(edits, ());
6109 if update.is_last_update {
6110 self.snapshot.scan_id = update.scan_id;
6111 }
6112 self.clear_pending_ops(cx);
6113 Ok(())
6114 }
6115
6116 pub fn compare_checkpoints(
6117 &mut self,
6118 left: GitRepositoryCheckpoint,
6119 right: GitRepositoryCheckpoint,
6120 ) -> oneshot::Receiver<Result<bool>> {
6121 self.send_job(None, move |repo, _cx| async move {
6122 match repo {
6123 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6124 backend.compare_checkpoints(left, right).await
6125 }
6126 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6127 }
6128 })
6129 }
6130
6131 pub fn diff_checkpoints(
6132 &mut self,
6133 base_checkpoint: GitRepositoryCheckpoint,
6134 target_checkpoint: GitRepositoryCheckpoint,
6135 ) -> oneshot::Receiver<Result<String>> {
6136 self.send_job(None, move |repo, _cx| async move {
6137 match repo {
6138 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6139 backend
6140 .diff_checkpoints(base_checkpoint, target_checkpoint)
6141 .await
6142 }
6143 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6144 }
6145 })
6146 }
6147
6148 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6149 let updated = SumTree::from_iter(
6150 self.pending_ops.iter().filter_map(|ops| {
6151 let inner_ops: Vec<PendingOp> =
6152 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6153 if inner_ops.is_empty() {
6154 None
6155 } else {
6156 Some(PendingOps {
6157 repo_path: ops.repo_path.clone(),
6158 ops: inner_ops,
6159 })
6160 }
6161 }),
6162 (),
6163 );
6164
6165 if updated != self.pending_ops {
6166 cx.emit(RepositoryEvent::PendingOpsChanged {
6167 pending_ops: self.pending_ops.clone(),
6168 })
6169 }
6170
6171 self.pending_ops = updated;
6172 }
6173
6174 fn schedule_scan(
6175 &mut self,
6176 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6177 cx: &mut Context<Self>,
6178 ) {
6179 let this = cx.weak_entity();
6180 let _ = self.send_keyed_job(
6181 Some(GitJobKey::ReloadGitState),
6182 None,
6183 |state, mut cx| async move {
6184 log::debug!("run scheduled git status scan");
6185
6186 let Some(this) = this.upgrade() else {
6187 return Ok(());
6188 };
6189 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6190 bail!("not a local repository")
6191 };
6192 let compute_snapshot = this.update(&mut cx, |this, _| {
6193 this.paths_needing_status_update.clear();
6194 compute_snapshot(
6195 this.id,
6196 this.work_directory_abs_path.clone(),
6197 this.snapshot.clone(),
6198 backend.clone(),
6199 )
6200 });
6201 let (snapshot, events) = cx.background_spawn(compute_snapshot).await?;
6202 this.update(&mut cx, |this, cx| {
6203 this.snapshot = snapshot.clone();
6204 this.clear_pending_ops(cx);
6205 for event in events {
6206 cx.emit(event);
6207 }
6208 });
6209 if let Some(updates_tx) = updates_tx {
6210 updates_tx
6211 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6212 .ok();
6213 }
6214 Ok(())
6215 },
6216 );
6217 }
6218
6219 fn spawn_local_git_worker(
6220 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6221 cx: &mut Context<Self>,
6222 ) -> mpsc::UnboundedSender<GitJob> {
6223 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6224
6225 cx.spawn(async move |_, cx| {
6226 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6227 if let Some(git_hosting_provider_registry) =
6228 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6229 {
6230 git_hosting_providers::register_additional_providers(
6231 git_hosting_provider_registry,
6232 state.backend.clone(),
6233 )
6234 .await;
6235 }
6236 let state = RepositoryState::Local(state);
6237 let mut jobs = VecDeque::new();
6238 loop {
6239 while let Ok(Some(next_job)) = job_rx.try_next() {
6240 jobs.push_back(next_job);
6241 }
6242
6243 if let Some(job) = jobs.pop_front() {
6244 if let Some(current_key) = &job.key
6245 && jobs
6246 .iter()
6247 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6248 {
6249 continue;
6250 }
6251 (job.job)(state.clone(), cx).await;
6252 } else if let Some(job) = job_rx.next().await {
6253 jobs.push_back(job);
6254 } else {
6255 break;
6256 }
6257 }
6258 anyhow::Ok(())
6259 })
6260 .detach_and_log_err(cx);
6261
6262 job_tx
6263 }
6264
6265 fn spawn_remote_git_worker(
6266 state: RemoteRepositoryState,
6267 cx: &mut Context<Self>,
6268 ) -> mpsc::UnboundedSender<GitJob> {
6269 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6270
6271 cx.spawn(async move |_, cx| {
6272 let state = RepositoryState::Remote(state);
6273 let mut jobs = VecDeque::new();
6274 loop {
6275 while let Ok(Some(next_job)) = job_rx.try_next() {
6276 jobs.push_back(next_job);
6277 }
6278
6279 if let Some(job) = jobs.pop_front() {
6280 if let Some(current_key) = &job.key
6281 && jobs
6282 .iter()
6283 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6284 {
6285 continue;
6286 }
6287 (job.job)(state.clone(), cx).await;
6288 } else if let Some(job) = job_rx.next().await {
6289 jobs.push_back(job);
6290 } else {
6291 break;
6292 }
6293 }
6294 anyhow::Ok(())
6295 })
6296 .detach_and_log_err(cx);
6297
6298 job_tx
6299 }
6300
6301 fn load_staged_text(
6302 &mut self,
6303 buffer_id: BufferId,
6304 repo_path: RepoPath,
6305 cx: &App,
6306 ) -> Task<Result<Option<String>>> {
6307 let rx = self.send_job(None, move |state, _| async move {
6308 match state {
6309 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6310 anyhow::Ok(backend.load_index_text(repo_path).await)
6311 }
6312 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6313 let response = client
6314 .request(proto::OpenUnstagedDiff {
6315 project_id: project_id.to_proto(),
6316 buffer_id: buffer_id.to_proto(),
6317 })
6318 .await?;
6319 Ok(response.staged_text)
6320 }
6321 }
6322 });
6323 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6324 }
6325
6326 fn load_committed_text(
6327 &mut self,
6328 buffer_id: BufferId,
6329 repo_path: RepoPath,
6330 cx: &App,
6331 ) -> Task<Result<DiffBasesChange>> {
6332 let rx = self.send_job(None, move |state, _| async move {
6333 match state {
6334 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6335 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6336 let staged_text = backend.load_index_text(repo_path).await;
6337 let diff_bases_change = if committed_text == staged_text {
6338 DiffBasesChange::SetBoth(committed_text)
6339 } else {
6340 DiffBasesChange::SetEach {
6341 index: staged_text,
6342 head: committed_text,
6343 }
6344 };
6345 anyhow::Ok(diff_bases_change)
6346 }
6347 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6348 use proto::open_uncommitted_diff_response::Mode;
6349
6350 let response = client
6351 .request(proto::OpenUncommittedDiff {
6352 project_id: project_id.to_proto(),
6353 buffer_id: buffer_id.to_proto(),
6354 })
6355 .await?;
6356 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6357 let bases = match mode {
6358 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6359 Mode::IndexAndHead => DiffBasesChange::SetEach {
6360 head: response.committed_text,
6361 index: response.staged_text,
6362 },
6363 };
6364 Ok(bases)
6365 }
6366 }
6367 });
6368
6369 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6370 }
6371
6372 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6373 let repository_id = self.snapshot.id;
6374 let rx = self.send_job(None, move |state, _| async move {
6375 match state {
6376 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6377 backend.load_blob_content(oid).await
6378 }
6379 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6380 let response = client
6381 .request(proto::GetBlobContent {
6382 project_id: project_id.to_proto(),
6383 repository_id: repository_id.0,
6384 oid: oid.to_string(),
6385 })
6386 .await?;
6387 Ok(response.content)
6388 }
6389 }
6390 });
6391 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6392 }
6393
6394 fn paths_changed(
6395 &mut self,
6396 paths: Vec<RepoPath>,
6397 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6398 cx: &mut Context<Self>,
6399 ) {
6400 if !paths.is_empty() {
6401 self.paths_needing_status_update.push(paths);
6402 }
6403
6404 let this = cx.weak_entity();
6405 let _ = self.send_keyed_job(
6406 Some(GitJobKey::RefreshStatuses),
6407 None,
6408 |state, mut cx| async move {
6409 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6410 (
6411 this.snapshot.clone(),
6412 mem::take(&mut this.paths_needing_status_update),
6413 )
6414 })?;
6415 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6416 bail!("not a local repository")
6417 };
6418
6419 if changed_paths.is_empty() {
6420 return Ok(());
6421 }
6422
6423 let stash_entries = backend.stash_entries().await?;
6424 let changed_path_statuses = cx
6425 .background_spawn(async move {
6426 let mut changed_paths =
6427 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6428 let statuses = backend
6429 .status(&changed_paths.iter().cloned().collect::<Vec<_>>())
6430 .await?;
6431 let mut changed_path_statuses = Vec::new();
6432 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6433 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6434
6435 for (repo_path, status) in &*statuses.entries {
6436 changed_paths.remove(repo_path);
6437 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6438 && cursor.item().is_some_and(|entry| entry.status == *status)
6439 {
6440 continue;
6441 }
6442
6443 changed_path_statuses.push(Edit::Insert(StatusEntry {
6444 repo_path: repo_path.clone(),
6445 status: *status,
6446 }));
6447 }
6448 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6449 for path in changed_paths.into_iter() {
6450 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6451 changed_path_statuses
6452 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6453 }
6454 }
6455 anyhow::Ok(changed_path_statuses)
6456 })
6457 .await?;
6458
6459 this.update(&mut cx, |this, cx| {
6460 if this.snapshot.stash_entries != stash_entries {
6461 cx.emit(RepositoryEvent::StashEntriesChanged);
6462 this.snapshot.stash_entries = stash_entries;
6463 }
6464
6465 if !changed_path_statuses.is_empty() {
6466 cx.emit(RepositoryEvent::StatusesChanged);
6467 this.snapshot
6468 .statuses_by_path
6469 .edit(changed_path_statuses, ());
6470 this.snapshot.scan_id += 1;
6471 }
6472
6473 if let Some(updates_tx) = updates_tx {
6474 updates_tx
6475 .unbounded_send(DownstreamUpdate::UpdateRepository(
6476 this.snapshot.clone(),
6477 ))
6478 .ok();
6479 }
6480 })
6481 },
6482 );
6483 }
6484
6485 /// currently running git command and when it started
6486 pub fn current_job(&self) -> Option<JobInfo> {
6487 self.active_jobs.values().next().cloned()
6488 }
6489
6490 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6491 self.send_job(None, |_, _| async {})
6492 }
6493
6494 fn spawn_job_with_tracking<AsyncFn>(
6495 &mut self,
6496 paths: Vec<RepoPath>,
6497 git_status: pending_op::GitStatus,
6498 cx: &mut Context<Self>,
6499 f: AsyncFn,
6500 ) -> Task<Result<()>>
6501 where
6502 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6503 {
6504 let ids = self.new_pending_ops_for_paths(paths, git_status);
6505
6506 cx.spawn(async move |this, cx| {
6507 let (job_status, result) = match f(this.clone(), cx).await {
6508 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6509 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6510 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6511 };
6512
6513 this.update(cx, |this, _| {
6514 let mut edits = Vec::with_capacity(ids.len());
6515 for (id, entry) in ids {
6516 if let Some(mut ops) = this
6517 .pending_ops
6518 .get(&PathKey(entry.as_ref().clone()), ())
6519 .cloned()
6520 {
6521 if let Some(op) = ops.op_by_id_mut(id) {
6522 op.job_status = job_status;
6523 }
6524 edits.push(sum_tree::Edit::Insert(ops));
6525 }
6526 }
6527 this.pending_ops.edit(edits, ());
6528 })?;
6529
6530 result
6531 })
6532 }
6533
6534 fn new_pending_ops_for_paths(
6535 &mut self,
6536 paths: Vec<RepoPath>,
6537 git_status: pending_op::GitStatus,
6538 ) -> Vec<(PendingOpId, RepoPath)> {
6539 let mut edits = Vec::with_capacity(paths.len());
6540 let mut ids = Vec::with_capacity(paths.len());
6541 for path in paths {
6542 let mut ops = self
6543 .pending_ops
6544 .get(&PathKey(path.as_ref().clone()), ())
6545 .cloned()
6546 .unwrap_or_else(|| PendingOps::new(&path));
6547 let id = ops.max_id() + 1;
6548 ops.ops.push(PendingOp {
6549 id,
6550 git_status,
6551 job_status: pending_op::JobStatus::Running,
6552 });
6553 edits.push(sum_tree::Edit::Insert(ops));
6554 ids.push((id, path));
6555 }
6556 self.pending_ops.edit(edits, ());
6557 ids
6558 }
6559 pub fn default_remote_url(&self) -> Option<String> {
6560 self.remote_upstream_url
6561 .clone()
6562 .or(self.remote_origin_url.clone())
6563 }
6564}
6565
6566fn get_permalink_in_rust_registry_src(
6567 provider_registry: Arc<GitHostingProviderRegistry>,
6568 path: PathBuf,
6569 selection: Range<u32>,
6570) -> Result<url::Url> {
6571 #[derive(Deserialize)]
6572 struct CargoVcsGit {
6573 sha1: String,
6574 }
6575
6576 #[derive(Deserialize)]
6577 struct CargoVcsInfo {
6578 git: CargoVcsGit,
6579 path_in_vcs: String,
6580 }
6581
6582 #[derive(Deserialize)]
6583 struct CargoPackage {
6584 repository: String,
6585 }
6586
6587 #[derive(Deserialize)]
6588 struct CargoToml {
6589 package: CargoPackage,
6590 }
6591
6592 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6593 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6594 Some((dir, json))
6595 }) else {
6596 bail!("No .cargo_vcs_info.json found in parent directories")
6597 };
6598 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6599 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6600 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6601 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6602 .context("parsing package.repository field of manifest")?;
6603 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6604 let permalink = provider.build_permalink(
6605 remote,
6606 BuildPermalinkParams::new(
6607 &cargo_vcs_info.git.sha1,
6608 &RepoPath::from_rel_path(
6609 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6610 ),
6611 Some(selection),
6612 ),
6613 );
6614 Ok(permalink)
6615}
6616
6617fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6618 let Some(blame) = blame else {
6619 return proto::BlameBufferResponse {
6620 blame_response: None,
6621 };
6622 };
6623
6624 let entries = blame
6625 .entries
6626 .into_iter()
6627 .map(|entry| proto::BlameEntry {
6628 sha: entry.sha.as_bytes().into(),
6629 start_line: entry.range.start,
6630 end_line: entry.range.end,
6631 original_line_number: entry.original_line_number,
6632 author: entry.author,
6633 author_mail: entry.author_mail,
6634 author_time: entry.author_time,
6635 author_tz: entry.author_tz,
6636 committer: entry.committer_name,
6637 committer_mail: entry.committer_email,
6638 committer_time: entry.committer_time,
6639 committer_tz: entry.committer_tz,
6640 summary: entry.summary,
6641 previous: entry.previous,
6642 filename: entry.filename,
6643 })
6644 .collect::<Vec<_>>();
6645
6646 let messages = blame
6647 .messages
6648 .into_iter()
6649 .map(|(oid, message)| proto::CommitMessage {
6650 oid: oid.as_bytes().into(),
6651 message,
6652 })
6653 .collect::<Vec<_>>();
6654
6655 proto::BlameBufferResponse {
6656 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6657 }
6658}
6659
6660fn deserialize_blame_buffer_response(
6661 response: proto::BlameBufferResponse,
6662) -> Option<git::blame::Blame> {
6663 let response = response.blame_response?;
6664 let entries = response
6665 .entries
6666 .into_iter()
6667 .filter_map(|entry| {
6668 Some(git::blame::BlameEntry {
6669 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6670 range: entry.start_line..entry.end_line,
6671 original_line_number: entry.original_line_number,
6672 committer_name: entry.committer,
6673 committer_time: entry.committer_time,
6674 committer_tz: entry.committer_tz,
6675 committer_email: entry.committer_mail,
6676 author: entry.author,
6677 author_mail: entry.author_mail,
6678 author_time: entry.author_time,
6679 author_tz: entry.author_tz,
6680 summary: entry.summary,
6681 previous: entry.previous,
6682 filename: entry.filename,
6683 })
6684 })
6685 .collect::<Vec<_>>();
6686
6687 let messages = response
6688 .messages
6689 .into_iter()
6690 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6691 .collect::<HashMap<_, _>>();
6692
6693 Some(Blame { entries, messages })
6694}
6695
6696fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6697 proto::Branch {
6698 is_head: branch.is_head,
6699 ref_name: branch.ref_name.to_string(),
6700 unix_timestamp: branch
6701 .most_recent_commit
6702 .as_ref()
6703 .map(|commit| commit.commit_timestamp as u64),
6704 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6705 ref_name: upstream.ref_name.to_string(),
6706 tracking: upstream
6707 .tracking
6708 .status()
6709 .map(|upstream| proto::UpstreamTracking {
6710 ahead: upstream.ahead as u64,
6711 behind: upstream.behind as u64,
6712 }),
6713 }),
6714 most_recent_commit: branch
6715 .most_recent_commit
6716 .as_ref()
6717 .map(|commit| proto::CommitSummary {
6718 sha: commit.sha.to_string(),
6719 subject: commit.subject.to_string(),
6720 commit_timestamp: commit.commit_timestamp,
6721 author_name: commit.author_name.to_string(),
6722 }),
6723 }
6724}
6725
6726fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6727 proto::Worktree {
6728 path: worktree.path.to_string_lossy().to_string(),
6729 ref_name: worktree.ref_name.to_string(),
6730 sha: worktree.sha.to_string(),
6731 }
6732}
6733
6734fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6735 git::repository::Worktree {
6736 path: PathBuf::from(proto.path.clone()),
6737 ref_name: proto.ref_name.clone().into(),
6738 sha: proto.sha.clone().into(),
6739 }
6740}
6741
6742fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6743 git::repository::Branch {
6744 is_head: proto.is_head,
6745 ref_name: proto.ref_name.clone().into(),
6746 upstream: proto
6747 .upstream
6748 .as_ref()
6749 .map(|upstream| git::repository::Upstream {
6750 ref_name: upstream.ref_name.to_string().into(),
6751 tracking: upstream
6752 .tracking
6753 .as_ref()
6754 .map(|tracking| {
6755 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6756 ahead: tracking.ahead as u32,
6757 behind: tracking.behind as u32,
6758 })
6759 })
6760 .unwrap_or(git::repository::UpstreamTracking::Gone),
6761 }),
6762 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6763 git::repository::CommitSummary {
6764 sha: commit.sha.to_string().into(),
6765 subject: commit.subject.to_string().into(),
6766 commit_timestamp: commit.commit_timestamp,
6767 author_name: commit.author_name.to_string().into(),
6768 has_parent: true,
6769 }
6770 }),
6771 }
6772}
6773
6774fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6775 proto::GitCommitDetails {
6776 sha: commit.sha.to_string(),
6777 message: commit.message.to_string(),
6778 commit_timestamp: commit.commit_timestamp,
6779 author_email: commit.author_email.to_string(),
6780 author_name: commit.author_name.to_string(),
6781 }
6782}
6783
6784fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6785 CommitDetails {
6786 sha: proto.sha.clone().into(),
6787 message: proto.message.clone().into(),
6788 commit_timestamp: proto.commit_timestamp,
6789 author_email: proto.author_email.clone().into(),
6790 author_name: proto.author_name.clone().into(),
6791 }
6792}
6793
6794async fn compute_snapshot(
6795 id: RepositoryId,
6796 work_directory_abs_path: Arc<Path>,
6797 prev_snapshot: RepositorySnapshot,
6798 backend: Arc<dyn GitRepository>,
6799) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6800 let mut events = Vec::new();
6801 let branches = backend.branches().await?;
6802 let branch = branches.into_iter().find(|branch| branch.is_head);
6803 let statuses = backend
6804 .status(&[RepoPath::from_rel_path(
6805 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6806 )])
6807 .await?;
6808 let stash_entries = backend.stash_entries().await?;
6809 let mut conflicted_paths = Vec::new();
6810 let statuses_by_path = SumTree::from_iter(
6811 statuses.entries.iter().map(|(repo_path, status)| {
6812 if status.is_conflicted() {
6813 conflicted_paths.push(repo_path.clone());
6814 }
6815 StatusEntry {
6816 repo_path: repo_path.clone(),
6817 status: *status,
6818 }
6819 }),
6820 (),
6821 );
6822 let mut merge_details = prev_snapshot.merge;
6823 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
6824 log::debug!("new merge details: {merge_details:?}");
6825
6826 if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path {
6827 events.push(RepositoryEvent::StatusesChanged)
6828 }
6829
6830 // Useful when branch is None in detached head state
6831 let head_commit = match backend.head_sha().await {
6832 Some(head_sha) => backend.show(head_sha).await.log_err(),
6833 None => None,
6834 };
6835
6836 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6837 events.push(RepositoryEvent::BranchChanged);
6838 }
6839
6840 let remote_origin_url = backend.remote_url("origin").await;
6841 let remote_upstream_url = backend.remote_url("upstream").await;
6842
6843 let snapshot = RepositorySnapshot {
6844 id,
6845 statuses_by_path,
6846 work_directory_abs_path,
6847 original_repo_abs_path: prev_snapshot.original_repo_abs_path,
6848 path_style: prev_snapshot.path_style,
6849 scan_id: prev_snapshot.scan_id + 1,
6850 branch,
6851 head_commit,
6852 merge: merge_details,
6853 remote_origin_url,
6854 remote_upstream_url,
6855 stash_entries,
6856 };
6857
6858 Ok((snapshot, events))
6859}
6860
6861fn status_from_proto(
6862 simple_status: i32,
6863 status: Option<proto::GitFileStatus>,
6864) -> anyhow::Result<FileStatus> {
6865 use proto::git_file_status::Variant;
6866
6867 let Some(variant) = status.and_then(|status| status.variant) else {
6868 let code = proto::GitStatus::from_i32(simple_status)
6869 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6870 let result = match code {
6871 proto::GitStatus::Added => TrackedStatus {
6872 worktree_status: StatusCode::Added,
6873 index_status: StatusCode::Unmodified,
6874 }
6875 .into(),
6876 proto::GitStatus::Modified => TrackedStatus {
6877 worktree_status: StatusCode::Modified,
6878 index_status: StatusCode::Unmodified,
6879 }
6880 .into(),
6881 proto::GitStatus::Conflict => UnmergedStatus {
6882 first_head: UnmergedStatusCode::Updated,
6883 second_head: UnmergedStatusCode::Updated,
6884 }
6885 .into(),
6886 proto::GitStatus::Deleted => TrackedStatus {
6887 worktree_status: StatusCode::Deleted,
6888 index_status: StatusCode::Unmodified,
6889 }
6890 .into(),
6891 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6892 };
6893 return Ok(result);
6894 };
6895
6896 let result = match variant {
6897 Variant::Untracked(_) => FileStatus::Untracked,
6898 Variant::Ignored(_) => FileStatus::Ignored,
6899 Variant::Unmerged(unmerged) => {
6900 let [first_head, second_head] =
6901 [unmerged.first_head, unmerged.second_head].map(|head| {
6902 let code = proto::GitStatus::from_i32(head)
6903 .with_context(|| format!("Invalid git status code: {head}"))?;
6904 let result = match code {
6905 proto::GitStatus::Added => UnmergedStatusCode::Added,
6906 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6907 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6908 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6909 };
6910 Ok(result)
6911 });
6912 let [first_head, second_head] = [first_head?, second_head?];
6913 UnmergedStatus {
6914 first_head,
6915 second_head,
6916 }
6917 .into()
6918 }
6919 Variant::Tracked(tracked) => {
6920 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6921 .map(|status| {
6922 let code = proto::GitStatus::from_i32(status)
6923 .with_context(|| format!("Invalid git status code: {status}"))?;
6924 let result = match code {
6925 proto::GitStatus::Modified => StatusCode::Modified,
6926 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6927 proto::GitStatus::Added => StatusCode::Added,
6928 proto::GitStatus::Deleted => StatusCode::Deleted,
6929 proto::GitStatus::Renamed => StatusCode::Renamed,
6930 proto::GitStatus::Copied => StatusCode::Copied,
6931 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6932 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6933 };
6934 Ok(result)
6935 });
6936 let [index_status, worktree_status] = [index_status?, worktree_status?];
6937 TrackedStatus {
6938 index_status,
6939 worktree_status,
6940 }
6941 .into()
6942 }
6943 };
6944 Ok(result)
6945}
6946
6947fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6948 use proto::git_file_status::{Tracked, Unmerged, Variant};
6949
6950 let variant = match status {
6951 FileStatus::Untracked => Variant::Untracked(Default::default()),
6952 FileStatus::Ignored => Variant::Ignored(Default::default()),
6953 FileStatus::Unmerged(UnmergedStatus {
6954 first_head,
6955 second_head,
6956 }) => Variant::Unmerged(Unmerged {
6957 first_head: unmerged_status_to_proto(first_head),
6958 second_head: unmerged_status_to_proto(second_head),
6959 }),
6960 FileStatus::Tracked(TrackedStatus {
6961 index_status,
6962 worktree_status,
6963 }) => Variant::Tracked(Tracked {
6964 index_status: tracked_status_to_proto(index_status),
6965 worktree_status: tracked_status_to_proto(worktree_status),
6966 }),
6967 };
6968 proto::GitFileStatus {
6969 variant: Some(variant),
6970 }
6971}
6972
6973fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6974 match code {
6975 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6976 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6977 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6978 }
6979}
6980
6981fn tracked_status_to_proto(code: StatusCode) -> i32 {
6982 match code {
6983 StatusCode::Added => proto::GitStatus::Added as _,
6984 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6985 StatusCode::Modified => proto::GitStatus::Modified as _,
6986 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6987 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6988 StatusCode::Copied => proto::GitStatus::Copied as _,
6989 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6990 }
6991}