1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332#[derive(Clone, Debug)]
333enum CreateWorktreeStartPoint {
334 Detached,
335 Branched { name: String },
336}
337
338pub struct Repository {
339 this: WeakEntity<Self>,
340 snapshot: RepositorySnapshot,
341 commit_message_buffer: Option<Entity<Buffer>>,
342 git_store: WeakEntity<GitStore>,
343 // For a local repository, holds paths that have had worktree events since the last status scan completed,
344 // and that should be examined during the next status scan.
345 paths_needing_status_update: Vec<Vec<RepoPath>>,
346 job_sender: mpsc::UnboundedSender<GitJob>,
347 active_jobs: HashMap<JobId, JobInfo>,
348 pending_ops: SumTree<PendingOps>,
349 job_id: JobId,
350 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
351 latest_askpass_id: u64,
352 repository_state: Shared<Task<Result<RepositoryState, String>>>,
353 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
354 graph_commit_data_handler: GraphCommitHandlerState,
355 commit_data: HashMap<Oid, CommitDataState>,
356}
357
358impl std::ops::Deref for Repository {
359 type Target = RepositorySnapshot;
360
361 fn deref(&self) -> &Self::Target {
362 &self.snapshot
363 }
364}
365
366#[derive(Clone)]
367pub struct LocalRepositoryState {
368 pub fs: Arc<dyn Fs>,
369 pub backend: Arc<dyn GitRepository>,
370 pub environment: Arc<HashMap<String, String>>,
371}
372
373impl LocalRepositoryState {
374 async fn new(
375 work_directory_abs_path: Arc<Path>,
376 dot_git_abs_path: Arc<Path>,
377 project_environment: WeakEntity<ProjectEnvironment>,
378 fs: Arc<dyn Fs>,
379 is_trusted: bool,
380 cx: &mut AsyncApp,
381 ) -> anyhow::Result<Self> {
382 let environment = project_environment
383 .update(cx, |project_environment, cx| {
384 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
385 })?
386 .await
387 .unwrap_or_else(|| {
388 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
389 HashMap::default()
390 });
391 let search_paths = environment.get("PATH").map(|val| val.to_owned());
392 let backend = cx
393 .background_spawn({
394 let fs = fs.clone();
395 async move {
396 let system_git_binary_path = search_paths
397 .and_then(|search_paths| {
398 which::which_in("git", Some(search_paths), &work_directory_abs_path)
399 .ok()
400 })
401 .or_else(|| which::which("git").ok());
402 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
403 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
404 }
405 })
406 .await?;
407 backend.set_trusted(is_trusted);
408 Ok(LocalRepositoryState {
409 backend,
410 environment: Arc::new(environment),
411 fs,
412 })
413 }
414}
415
416#[derive(Clone)]
417pub struct RemoteRepositoryState {
418 pub project_id: ProjectId,
419 pub client: AnyProtoClient,
420}
421
422#[derive(Clone)]
423pub enum RepositoryState {
424 Local(LocalRepositoryState),
425 Remote(RemoteRepositoryState),
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum GitGraphEvent {
430 CountUpdated(usize),
431 FullyLoaded,
432 LoadingError,
433}
434
435#[derive(Clone, Debug, PartialEq, Eq)]
436pub enum RepositoryEvent {
437 StatusesChanged,
438 HeadChanged,
439 BranchListChanged,
440 StashEntriesChanged,
441 GitWorktreeListChanged,
442 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
443 GraphEvent((LogSource, LogOrder), GitGraphEvent),
444}
445
446#[derive(Clone, Debug)]
447pub struct JobsUpdated;
448
449#[derive(Debug)]
450pub enum GitStoreEvent {
451 ActiveRepositoryChanged(Option<RepositoryId>),
452 /// Bool is true when the repository that's updated is the active repository
453 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
454 RepositoryAdded,
455 RepositoryRemoved(RepositoryId),
456 IndexWriteError(anyhow::Error),
457 JobsUpdated,
458 ConflictsUpdated,
459}
460
461impl EventEmitter<RepositoryEvent> for Repository {}
462impl EventEmitter<JobsUpdated> for Repository {}
463impl EventEmitter<GitStoreEvent> for GitStore {}
464
465pub struct GitJob {
466 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
467 key: Option<GitJobKey>,
468}
469
470#[derive(PartialEq, Eq)]
471enum GitJobKey {
472 WriteIndex(Vec<RepoPath>),
473 ReloadBufferDiffBases,
474 RefreshStatuses,
475 ReloadGitState,
476}
477
478impl GitStore {
479 pub fn local(
480 worktree_store: &Entity<WorktreeStore>,
481 buffer_store: Entity<BufferStore>,
482 environment: Entity<ProjectEnvironment>,
483 fs: Arc<dyn Fs>,
484 cx: &mut Context<Self>,
485 ) -> Self {
486 Self::new(
487 worktree_store.clone(),
488 buffer_store,
489 GitStoreState::Local {
490 next_repository_id: Arc::new(AtomicU64::new(1)),
491 downstream: None,
492 project_environment: environment,
493 fs,
494 },
495 cx,
496 )
497 }
498
499 pub fn remote(
500 worktree_store: &Entity<WorktreeStore>,
501 buffer_store: Entity<BufferStore>,
502 upstream_client: AnyProtoClient,
503 project_id: u64,
504 cx: &mut Context<Self>,
505 ) -> Self {
506 Self::new(
507 worktree_store.clone(),
508 buffer_store,
509 GitStoreState::Remote {
510 upstream_client,
511 upstream_project_id: project_id,
512 downstream: None,
513 },
514 cx,
515 )
516 }
517
518 fn new(
519 worktree_store: Entity<WorktreeStore>,
520 buffer_store: Entity<BufferStore>,
521 state: GitStoreState,
522 cx: &mut Context<Self>,
523 ) -> Self {
524 let mut _subscriptions = vec![
525 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
526 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
527 ];
528
529 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
530 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
531 }
532
533 GitStore {
534 state,
535 buffer_store,
536 worktree_store,
537 repositories: HashMap::default(),
538 worktree_ids: HashMap::default(),
539 active_repo_id: None,
540 _subscriptions,
541 loading_diffs: HashMap::default(),
542 shared_diffs: HashMap::default(),
543 diffs: HashMap::default(),
544 }
545 }
546
547 pub fn init(client: &AnyProtoClient) {
548 client.add_entity_request_handler(Self::handle_get_remotes);
549 client.add_entity_request_handler(Self::handle_get_branches);
550 client.add_entity_request_handler(Self::handle_get_default_branch);
551 client.add_entity_request_handler(Self::handle_change_branch);
552 client.add_entity_request_handler(Self::handle_create_branch);
553 client.add_entity_request_handler(Self::handle_rename_branch);
554 client.add_entity_request_handler(Self::handle_create_remote);
555 client.add_entity_request_handler(Self::handle_remove_remote);
556 client.add_entity_request_handler(Self::handle_delete_branch);
557 client.add_entity_request_handler(Self::handle_git_init);
558 client.add_entity_request_handler(Self::handle_push);
559 client.add_entity_request_handler(Self::handle_pull);
560 client.add_entity_request_handler(Self::handle_fetch);
561 client.add_entity_request_handler(Self::handle_stage);
562 client.add_entity_request_handler(Self::handle_unstage);
563 client.add_entity_request_handler(Self::handle_stash);
564 client.add_entity_request_handler(Self::handle_stash_pop);
565 client.add_entity_request_handler(Self::handle_stash_apply);
566 client.add_entity_request_handler(Self::handle_stash_drop);
567 client.add_entity_request_handler(Self::handle_commit);
568 client.add_entity_request_handler(Self::handle_run_hook);
569 client.add_entity_request_handler(Self::handle_reset);
570 client.add_entity_request_handler(Self::handle_show);
571 client.add_entity_request_handler(Self::handle_create_checkpoint);
572 client.add_entity_request_handler(Self::handle_restore_checkpoint);
573 client.add_entity_request_handler(Self::handle_compare_checkpoints);
574 client.add_entity_request_handler(Self::handle_diff_checkpoints);
575 client.add_entity_request_handler(Self::handle_load_commit_diff);
576 client.add_entity_request_handler(Self::handle_checkout_files);
577 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
578 client.add_entity_request_handler(Self::handle_set_index_text);
579 client.add_entity_request_handler(Self::handle_askpass);
580 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
581 client.add_entity_request_handler(Self::handle_git_diff);
582 client.add_entity_request_handler(Self::handle_tree_diff);
583 client.add_entity_request_handler(Self::handle_get_blob_content);
584 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
585 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
586 client.add_entity_message_handler(Self::handle_update_diff_bases);
587 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
588 client.add_entity_request_handler(Self::handle_blame_buffer);
589 client.add_entity_message_handler(Self::handle_update_repository);
590 client.add_entity_message_handler(Self::handle_remove_repository);
591 client.add_entity_request_handler(Self::handle_git_clone);
592 client.add_entity_request_handler(Self::handle_get_worktrees);
593 client.add_entity_request_handler(Self::handle_create_worktree);
594 client.add_entity_request_handler(Self::handle_remove_worktree);
595 client.add_entity_request_handler(Self::handle_rename_worktree);
596 client.add_entity_request_handler(Self::handle_get_head_sha);
597 }
598
599 pub fn is_local(&self) -> bool {
600 matches!(self.state, GitStoreState::Local { .. })
601 }
602 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
603 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
604 let id = repo.read(cx).id;
605 if self.active_repo_id != Some(id) {
606 self.active_repo_id = Some(id);
607 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
608 }
609 }
610 }
611
612 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
613 match &mut self.state {
614 GitStoreState::Remote {
615 downstream: downstream_client,
616 ..
617 } => {
618 for repo in self.repositories.values() {
619 let update = repo.read(cx).snapshot.initial_update(project_id);
620 for update in split_repository_update(update) {
621 client.send(update).log_err();
622 }
623 }
624 *downstream_client = Some((client, ProjectId(project_id)));
625 }
626 GitStoreState::Local {
627 downstream: downstream_client,
628 ..
629 } => {
630 let mut snapshots = HashMap::default();
631 let (updates_tx, mut updates_rx) = mpsc::unbounded();
632 for repo in self.repositories.values() {
633 updates_tx
634 .unbounded_send(DownstreamUpdate::UpdateRepository(
635 repo.read(cx).snapshot.clone(),
636 ))
637 .ok();
638 }
639 *downstream_client = Some(LocalDownstreamState {
640 client: client.clone(),
641 project_id: ProjectId(project_id),
642 updates_tx,
643 _task: cx.spawn(async move |this, cx| {
644 cx.background_spawn(async move {
645 while let Some(update) = updates_rx.next().await {
646 match update {
647 DownstreamUpdate::UpdateRepository(snapshot) => {
648 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
649 {
650 let update =
651 snapshot.build_update(old_snapshot, project_id);
652 *old_snapshot = snapshot;
653 for update in split_repository_update(update) {
654 client.send(update)?;
655 }
656 } else {
657 let update = snapshot.initial_update(project_id);
658 for update in split_repository_update(update) {
659 client.send(update)?;
660 }
661 snapshots.insert(snapshot.id, snapshot);
662 }
663 }
664 DownstreamUpdate::RemoveRepository(id) => {
665 client.send(proto::RemoveRepository {
666 project_id,
667 id: id.to_proto(),
668 })?;
669 }
670 }
671 }
672 anyhow::Ok(())
673 })
674 .await
675 .ok();
676 this.update(cx, |this, _| {
677 if let GitStoreState::Local {
678 downstream: downstream_client,
679 ..
680 } = &mut this.state
681 {
682 downstream_client.take();
683 } else {
684 unreachable!("unshared called on remote store");
685 }
686 })
687 }),
688 });
689 }
690 }
691 }
692
693 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
694 match &mut self.state {
695 GitStoreState::Local {
696 downstream: downstream_client,
697 ..
698 } => {
699 downstream_client.take();
700 }
701 GitStoreState::Remote {
702 downstream: downstream_client,
703 ..
704 } => {
705 downstream_client.take();
706 }
707 }
708 self.shared_diffs.clear();
709 }
710
711 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
712 self.shared_diffs.remove(peer_id);
713 }
714
715 pub fn active_repository(&self) -> Option<Entity<Repository>> {
716 self.active_repo_id
717 .as_ref()
718 .map(|id| self.repositories[id].clone())
719 }
720
721 pub fn open_unstaged_diff(
722 &mut self,
723 buffer: Entity<Buffer>,
724 cx: &mut Context<Self>,
725 ) -> Task<Result<Entity<BufferDiff>>> {
726 let buffer_id = buffer.read(cx).remote_id();
727 if let Some(diff_state) = self.diffs.get(&buffer_id)
728 && let Some(unstaged_diff) = diff_state
729 .read(cx)
730 .unstaged_diff
731 .as_ref()
732 .and_then(|weak| weak.upgrade())
733 {
734 if let Some(task) =
735 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
736 {
737 return cx.background_executor().spawn(async move {
738 task.await;
739 Ok(unstaged_diff)
740 });
741 }
742 return Task::ready(Ok(unstaged_diff));
743 }
744
745 let Some((repo, repo_path)) =
746 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
747 else {
748 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
749 };
750
751 let task = self
752 .loading_diffs
753 .entry((buffer_id, DiffKind::Unstaged))
754 .or_insert_with(|| {
755 let staged_text = repo.update(cx, |repo, cx| {
756 repo.load_staged_text(buffer_id, repo_path, cx)
757 });
758 cx.spawn(async move |this, cx| {
759 Self::open_diff_internal(
760 this,
761 DiffKind::Unstaged,
762 staged_text.await.map(DiffBasesChange::SetIndex),
763 buffer,
764 cx,
765 )
766 .await
767 .map_err(Arc::new)
768 })
769 .shared()
770 })
771 .clone();
772
773 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
774 }
775
776 pub fn open_diff_since(
777 &mut self,
778 oid: Option<git::Oid>,
779 buffer: Entity<Buffer>,
780 repo: Entity<Repository>,
781 cx: &mut Context<Self>,
782 ) -> Task<Result<Entity<BufferDiff>>> {
783 let buffer_id = buffer.read(cx).remote_id();
784
785 if let Some(diff_state) = self.diffs.get(&buffer_id)
786 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
787 {
788 if let Some(task) =
789 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
790 {
791 return cx.background_executor().spawn(async move {
792 task.await;
793 Ok(oid_diff)
794 });
795 }
796 return Task::ready(Ok(oid_diff));
797 }
798
799 let diff_kind = DiffKind::SinceOid(oid);
800 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
801 let task = task.clone();
802 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
803 }
804
805 let task = cx
806 .spawn(async move |this, cx| {
807 let result: Result<Entity<BufferDiff>> = async {
808 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
809 let language_registry =
810 buffer.update(cx, |buffer, _| buffer.language_registry());
811 let content: Option<Arc<str>> = match oid {
812 None => None,
813 Some(oid) => Some(
814 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
815 .await?
816 .into(),
817 ),
818 };
819 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
820
821 buffer_diff
822 .update(cx, |buffer_diff, cx| {
823 buffer_diff.language_changed(
824 buffer_snapshot.language().cloned(),
825 language_registry,
826 cx,
827 );
828 buffer_diff.set_base_text(
829 content.clone(),
830 buffer_snapshot.language().cloned(),
831 buffer_snapshot.text,
832 cx,
833 )
834 })
835 .await?;
836 let unstaged_diff = this
837 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
838 .await?;
839 buffer_diff.update(cx, |buffer_diff, _| {
840 buffer_diff.set_secondary_diff(unstaged_diff);
841 });
842
843 this.update(cx, |this, cx| {
844 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
845 .detach();
846
847 this.loading_diffs.remove(&(buffer_id, diff_kind));
848
849 let git_store = cx.weak_entity();
850 let diff_state = this
851 .diffs
852 .entry(buffer_id)
853 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
854
855 diff_state.update(cx, |state, _| {
856 if let Some(oid) = oid {
857 if let Some(content) = content {
858 state.oid_texts.insert(oid, content);
859 }
860 }
861 state.oid_diffs.insert(oid, buffer_diff.downgrade());
862 });
863 })?;
864
865 Ok(buffer_diff)
866 }
867 .await;
868 result.map_err(Arc::new)
869 })
870 .shared();
871
872 self.loading_diffs
873 .insert((buffer_id, diff_kind), task.clone());
874 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
875 }
876
877 #[ztracing::instrument(skip_all)]
878 pub fn open_uncommitted_diff(
879 &mut self,
880 buffer: Entity<Buffer>,
881 cx: &mut Context<Self>,
882 ) -> Task<Result<Entity<BufferDiff>>> {
883 let buffer_id = buffer.read(cx).remote_id();
884
885 if let Some(diff_state) = self.diffs.get(&buffer_id)
886 && let Some(uncommitted_diff) = diff_state
887 .read(cx)
888 .uncommitted_diff
889 .as_ref()
890 .and_then(|weak| weak.upgrade())
891 {
892 if let Some(task) =
893 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
894 {
895 return cx.background_executor().spawn(async move {
896 task.await;
897 Ok(uncommitted_diff)
898 });
899 }
900 return Task::ready(Ok(uncommitted_diff));
901 }
902
903 let Some((repo, repo_path)) =
904 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
905 else {
906 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
907 };
908
909 let task = self
910 .loading_diffs
911 .entry((buffer_id, DiffKind::Uncommitted))
912 .or_insert_with(|| {
913 let changes = repo.update(cx, |repo, cx| {
914 repo.load_committed_text(buffer_id, repo_path, cx)
915 });
916
917 // todo(lw): hot foreground spawn
918 cx.spawn(async move |this, cx| {
919 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
920 .await
921 .map_err(Arc::new)
922 })
923 .shared()
924 })
925 .clone();
926
927 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
928 }
929
930 #[ztracing::instrument(skip_all)]
931 async fn open_diff_internal(
932 this: WeakEntity<Self>,
933 kind: DiffKind,
934 texts: Result<DiffBasesChange>,
935 buffer_entity: Entity<Buffer>,
936 cx: &mut AsyncApp,
937 ) -> Result<Entity<BufferDiff>> {
938 let diff_bases_change = match texts {
939 Err(e) => {
940 this.update(cx, |this, cx| {
941 let buffer = buffer_entity.read(cx);
942 let buffer_id = buffer.remote_id();
943 this.loading_diffs.remove(&(buffer_id, kind));
944 })?;
945 return Err(e);
946 }
947 Ok(change) => change,
948 };
949
950 this.update(cx, |this, cx| {
951 let buffer = buffer_entity.read(cx);
952 let buffer_id = buffer.remote_id();
953 let language = buffer.language().cloned();
954 let language_registry = buffer.language_registry();
955 let text_snapshot = buffer.text_snapshot();
956 this.loading_diffs.remove(&(buffer_id, kind));
957
958 let git_store = cx.weak_entity();
959 let diff_state = this
960 .diffs
961 .entry(buffer_id)
962 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
963
964 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
965
966 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
967 diff_state.update(cx, |diff_state, cx| {
968 diff_state.language_changed = true;
969 diff_state.language = language;
970 diff_state.language_registry = language_registry;
971
972 match kind {
973 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
974 DiffKind::Uncommitted => {
975 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
976 diff
977 } else {
978 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
979 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
980 unstaged_diff
981 };
982
983 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
984 diff_state.uncommitted_diff = Some(diff.downgrade())
985 }
986 DiffKind::SinceOid(_) => {
987 unreachable!("open_diff_internal is not used for OID diffs")
988 }
989 }
990
991 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
992 let rx = diff_state.wait_for_recalculation();
993
994 anyhow::Ok(async move {
995 if let Some(rx) = rx {
996 rx.await;
997 }
998 Ok(diff)
999 })
1000 })
1001 })??
1002 .await
1003 }
1004
1005 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1006 let diff_state = self.diffs.get(&buffer_id)?;
1007 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1008 }
1009
1010 pub fn get_uncommitted_diff(
1011 &self,
1012 buffer_id: BufferId,
1013 cx: &App,
1014 ) -> Option<Entity<BufferDiff>> {
1015 let diff_state = self.diffs.get(&buffer_id)?;
1016 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1017 }
1018
1019 pub fn get_diff_since_oid(
1020 &self,
1021 buffer_id: BufferId,
1022 oid: Option<git::Oid>,
1023 cx: &App,
1024 ) -> Option<Entity<BufferDiff>> {
1025 let diff_state = self.diffs.get(&buffer_id)?;
1026 diff_state.read(cx).oid_diff(oid)
1027 }
1028
1029 pub fn open_conflict_set(
1030 &mut self,
1031 buffer: Entity<Buffer>,
1032 cx: &mut Context<Self>,
1033 ) -> Entity<ConflictSet> {
1034 log::debug!("open conflict set");
1035 let buffer_id = buffer.read(cx).remote_id();
1036
1037 if let Some(git_state) = self.diffs.get(&buffer_id)
1038 && let Some(conflict_set) = git_state
1039 .read(cx)
1040 .conflict_set
1041 .as_ref()
1042 .and_then(|weak| weak.upgrade())
1043 {
1044 let conflict_set = conflict_set;
1045 let buffer_snapshot = buffer.read(cx).text_snapshot();
1046
1047 git_state.update(cx, |state, cx| {
1048 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1049 });
1050
1051 return conflict_set;
1052 }
1053
1054 let is_unmerged = self
1055 .repository_and_path_for_buffer_id(buffer_id, cx)
1056 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1057 let git_store = cx.weak_entity();
1058 let buffer_git_state = self
1059 .diffs
1060 .entry(buffer_id)
1061 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1062 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1063
1064 self._subscriptions
1065 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1066 cx.emit(GitStoreEvent::ConflictsUpdated);
1067 }));
1068
1069 buffer_git_state.update(cx, |state, cx| {
1070 state.conflict_set = Some(conflict_set.downgrade());
1071 let buffer_snapshot = buffer.read(cx).text_snapshot();
1072 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1073 });
1074
1075 conflict_set
1076 }
1077
1078 pub fn project_path_git_status(
1079 &self,
1080 project_path: &ProjectPath,
1081 cx: &App,
1082 ) -> Option<FileStatus> {
1083 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1084 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1085 }
1086
1087 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1088 let mut work_directory_abs_paths = Vec::new();
1089 let mut checkpoints = Vec::new();
1090 for repository in self.repositories.values() {
1091 repository.update(cx, |repository, _| {
1092 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1093 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1094 });
1095 }
1096
1097 cx.background_executor().spawn(async move {
1098 let checkpoints = future::try_join_all(checkpoints).await?;
1099 Ok(GitStoreCheckpoint {
1100 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1101 .into_iter()
1102 .zip(checkpoints)
1103 .collect(),
1104 })
1105 })
1106 }
1107
1108 pub fn restore_checkpoint(
1109 &self,
1110 checkpoint: GitStoreCheckpoint,
1111 cx: &mut App,
1112 ) -> Task<Result<()>> {
1113 let repositories_by_work_dir_abs_path = self
1114 .repositories
1115 .values()
1116 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1117 .collect::<HashMap<_, _>>();
1118
1119 let mut tasks = Vec::new();
1120 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1121 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1122 let restore = repository.update(cx, |repository, _| {
1123 repository.restore_checkpoint(checkpoint)
1124 });
1125 tasks.push(async move { restore.await? });
1126 }
1127 }
1128 cx.background_spawn(async move {
1129 future::try_join_all(tasks).await?;
1130 Ok(())
1131 })
1132 }
1133
1134 /// Compares two checkpoints, returning true if they are equal.
1135 pub fn compare_checkpoints(
1136 &self,
1137 left: GitStoreCheckpoint,
1138 mut right: GitStoreCheckpoint,
1139 cx: &mut App,
1140 ) -> Task<Result<bool>> {
1141 let repositories_by_work_dir_abs_path = self
1142 .repositories
1143 .values()
1144 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1145 .collect::<HashMap<_, _>>();
1146
1147 let mut tasks = Vec::new();
1148 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1149 if let Some(right_checkpoint) = right
1150 .checkpoints_by_work_dir_abs_path
1151 .remove(&work_dir_abs_path)
1152 {
1153 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1154 {
1155 let compare = repository.update(cx, |repository, _| {
1156 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1157 });
1158
1159 tasks.push(async move { compare.await? });
1160 }
1161 } else {
1162 return Task::ready(Ok(false));
1163 }
1164 }
1165 cx.background_spawn(async move {
1166 Ok(future::try_join_all(tasks)
1167 .await?
1168 .into_iter()
1169 .all(|result| result))
1170 })
1171 }
1172
1173 /// Blames a buffer.
1174 pub fn blame_buffer(
1175 &self,
1176 buffer: &Entity<Buffer>,
1177 version: Option<clock::Global>,
1178 cx: &mut Context<Self>,
1179 ) -> Task<Result<Option<Blame>>> {
1180 let buffer = buffer.read(cx);
1181 let Some((repo, repo_path)) =
1182 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1183 else {
1184 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1185 };
1186 let content = match &version {
1187 Some(version) => buffer.rope_for_version(version),
1188 None => buffer.as_rope().clone(),
1189 };
1190 let line_ending = buffer.line_ending();
1191 let version = version.unwrap_or(buffer.version());
1192 let buffer_id = buffer.remote_id();
1193
1194 let repo = repo.downgrade();
1195 cx.spawn(async move |_, cx| {
1196 let repository_state = repo
1197 .update(cx, |repo, _| repo.repository_state.clone())?
1198 .await
1199 .map_err(|err| anyhow::anyhow!(err))?;
1200 match repository_state {
1201 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1202 .blame(repo_path.clone(), content, line_ending)
1203 .await
1204 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1205 .map(Some),
1206 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1207 let response = client
1208 .request(proto::BlameBuffer {
1209 project_id: project_id.to_proto(),
1210 buffer_id: buffer_id.into(),
1211 version: serialize_version(&version),
1212 })
1213 .await?;
1214 Ok(deserialize_blame_buffer_response(response))
1215 }
1216 }
1217 })
1218 }
1219
1220 pub fn get_permalink_to_line(
1221 &self,
1222 buffer: &Entity<Buffer>,
1223 selection: Range<u32>,
1224 cx: &mut App,
1225 ) -> Task<Result<url::Url>> {
1226 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1227 return Task::ready(Err(anyhow!("buffer has no file")));
1228 };
1229
1230 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1231 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1232 cx,
1233 ) else {
1234 // If we're not in a Git repo, check whether this is a Rust source
1235 // file in the Cargo registry (presumably opened with go-to-definition
1236 // from a normal Rust file). If so, we can put together a permalink
1237 // using crate metadata.
1238 if buffer
1239 .read(cx)
1240 .language()
1241 .is_none_or(|lang| lang.name() != "Rust")
1242 {
1243 return Task::ready(Err(anyhow!("no permalink available")));
1244 }
1245 let file_path = file.worktree.read(cx).absolutize(&file.path);
1246 return cx.spawn(async move |cx| {
1247 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1248 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1249 .context("no permalink available")
1250 });
1251 };
1252
1253 let buffer_id = buffer.read(cx).remote_id();
1254 let branch = repo.read(cx).branch.clone();
1255 let remote = branch
1256 .as_ref()
1257 .and_then(|b| b.upstream.as_ref())
1258 .and_then(|b| b.remote_name())
1259 .unwrap_or("origin")
1260 .to_string();
1261
1262 let rx = repo.update(cx, |repo, _| {
1263 repo.send_job(None, move |state, cx| async move {
1264 match state {
1265 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1266 let origin_url = backend
1267 .remote_url(&remote)
1268 .await
1269 .with_context(|| format!("remote \"{remote}\" not found"))?;
1270
1271 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1272
1273 let provider_registry =
1274 cx.update(GitHostingProviderRegistry::default_global);
1275
1276 let (provider, remote) =
1277 parse_git_remote_url(provider_registry, &origin_url)
1278 .context("parsing Git remote URL")?;
1279
1280 Ok(provider.build_permalink(
1281 remote,
1282 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1283 ))
1284 }
1285 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1286 let response = client
1287 .request(proto::GetPermalinkToLine {
1288 project_id: project_id.to_proto(),
1289 buffer_id: buffer_id.into(),
1290 selection: Some(proto::Range {
1291 start: selection.start as u64,
1292 end: selection.end as u64,
1293 }),
1294 })
1295 .await?;
1296
1297 url::Url::parse(&response.permalink).context("failed to parse permalink")
1298 }
1299 }
1300 })
1301 });
1302 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1303 }
1304
1305 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1306 match &self.state {
1307 GitStoreState::Local {
1308 downstream: downstream_client,
1309 ..
1310 } => downstream_client
1311 .as_ref()
1312 .map(|state| (state.client.clone(), state.project_id)),
1313 GitStoreState::Remote {
1314 downstream: downstream_client,
1315 ..
1316 } => downstream_client.clone(),
1317 }
1318 }
1319
1320 fn upstream_client(&self) -> Option<AnyProtoClient> {
1321 match &self.state {
1322 GitStoreState::Local { .. } => None,
1323 GitStoreState::Remote {
1324 upstream_client, ..
1325 } => Some(upstream_client.clone()),
1326 }
1327 }
1328
1329 fn on_worktree_store_event(
1330 &mut self,
1331 worktree_store: Entity<WorktreeStore>,
1332 event: &WorktreeStoreEvent,
1333 cx: &mut Context<Self>,
1334 ) {
1335 let GitStoreState::Local {
1336 project_environment,
1337 downstream,
1338 next_repository_id,
1339 fs,
1340 } = &self.state
1341 else {
1342 return;
1343 };
1344
1345 match event {
1346 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1347 if let Some(worktree) = self
1348 .worktree_store
1349 .read(cx)
1350 .worktree_for_id(*worktree_id, cx)
1351 {
1352 let paths_by_git_repo =
1353 self.process_updated_entries(&worktree, updated_entries, cx);
1354 let downstream = downstream
1355 .as_ref()
1356 .map(|downstream| downstream.updates_tx.clone());
1357 cx.spawn(async move |_, cx| {
1358 let paths_by_git_repo = paths_by_git_repo.await;
1359 for (repo, paths) in paths_by_git_repo {
1360 repo.update(cx, |repo, cx| {
1361 repo.paths_changed(paths, downstream.clone(), cx);
1362 });
1363 }
1364 })
1365 .detach();
1366 }
1367 }
1368 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1369 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1370 else {
1371 return;
1372 };
1373 if !worktree.read(cx).is_visible() {
1374 log::debug!(
1375 "not adding repositories for local worktree {:?} because it's not visible",
1376 worktree.read(cx).abs_path()
1377 );
1378 return;
1379 }
1380 self.update_repositories_from_worktree(
1381 *worktree_id,
1382 project_environment.clone(),
1383 next_repository_id.clone(),
1384 downstream
1385 .as_ref()
1386 .map(|downstream| downstream.updates_tx.clone()),
1387 changed_repos.clone(),
1388 fs.clone(),
1389 cx,
1390 );
1391 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1392 }
1393 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1394 let repos_without_worktree: Vec<RepositoryId> = self
1395 .worktree_ids
1396 .iter_mut()
1397 .filter_map(|(repo_id, worktree_ids)| {
1398 worktree_ids.remove(worktree_id);
1399 if worktree_ids.is_empty() {
1400 Some(*repo_id)
1401 } else {
1402 None
1403 }
1404 })
1405 .collect();
1406 let is_active_repo_removed = repos_without_worktree
1407 .iter()
1408 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1409
1410 for repo_id in repos_without_worktree {
1411 self.repositories.remove(&repo_id);
1412 self.worktree_ids.remove(&repo_id);
1413 if let Some(updates_tx) =
1414 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1415 {
1416 updates_tx
1417 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1418 .ok();
1419 }
1420 }
1421
1422 if is_active_repo_removed {
1423 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1424 self.active_repo_id = Some(repo_id);
1425 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1426 } else {
1427 self.active_repo_id = None;
1428 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1429 }
1430 }
1431 }
1432 _ => {}
1433 }
1434 }
1435 fn on_repository_event(
1436 &mut self,
1437 repo: Entity<Repository>,
1438 event: &RepositoryEvent,
1439 cx: &mut Context<Self>,
1440 ) {
1441 let id = repo.read(cx).id;
1442 let repo_snapshot = repo.read(cx).snapshot.clone();
1443 for (buffer_id, diff) in self.diffs.iter() {
1444 if let Some((buffer_repo, repo_path)) =
1445 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1446 && buffer_repo == repo
1447 {
1448 diff.update(cx, |diff, cx| {
1449 if let Some(conflict_set) = &diff.conflict_set {
1450 let conflict_status_changed =
1451 conflict_set.update(cx, |conflict_set, cx| {
1452 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1453 conflict_set.set_has_conflict(has_conflict, cx)
1454 })?;
1455 if conflict_status_changed {
1456 let buffer_store = self.buffer_store.read(cx);
1457 if let Some(buffer) = buffer_store.get(*buffer_id) {
1458 let _ = diff
1459 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1460 }
1461 }
1462 }
1463 anyhow::Ok(())
1464 })
1465 .ok();
1466 }
1467 }
1468 cx.emit(GitStoreEvent::RepositoryUpdated(
1469 id,
1470 event.clone(),
1471 self.active_repo_id == Some(id),
1472 ))
1473 }
1474
1475 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1476 cx.emit(GitStoreEvent::JobsUpdated)
1477 }
1478
1479 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1480 fn update_repositories_from_worktree(
1481 &mut self,
1482 worktree_id: WorktreeId,
1483 project_environment: Entity<ProjectEnvironment>,
1484 next_repository_id: Arc<AtomicU64>,
1485 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1486 updated_git_repositories: UpdatedGitRepositoriesSet,
1487 fs: Arc<dyn Fs>,
1488 cx: &mut Context<Self>,
1489 ) {
1490 let mut removed_ids = Vec::new();
1491 for update in updated_git_repositories.iter() {
1492 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1493 let existing_work_directory_abs_path =
1494 repo.read(cx).work_directory_abs_path.clone();
1495 Some(&existing_work_directory_abs_path)
1496 == update.old_work_directory_abs_path.as_ref()
1497 || Some(&existing_work_directory_abs_path)
1498 == update.new_work_directory_abs_path.as_ref()
1499 }) {
1500 let repo_id = *id;
1501 if let Some(new_work_directory_abs_path) =
1502 update.new_work_directory_abs_path.clone()
1503 {
1504 self.worktree_ids
1505 .entry(repo_id)
1506 .or_insert_with(HashSet::new)
1507 .insert(worktree_id);
1508 existing.update(cx, |existing, cx| {
1509 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1510 existing.schedule_scan(updates_tx.clone(), cx);
1511 });
1512 } else {
1513 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1514 worktree_ids.remove(&worktree_id);
1515 if worktree_ids.is_empty() {
1516 removed_ids.push(repo_id);
1517 }
1518 }
1519 }
1520 } else if let UpdatedGitRepository {
1521 new_work_directory_abs_path: Some(work_directory_abs_path),
1522 dot_git_abs_path: Some(dot_git_abs_path),
1523 repository_dir_abs_path: Some(repository_dir_abs_path),
1524 common_dir_abs_path: Some(common_dir_abs_path),
1525 ..
1526 } = update
1527 {
1528 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1529 work_directory_abs_path,
1530 common_dir_abs_path,
1531 repository_dir_abs_path,
1532 )
1533 .into();
1534 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1535 let is_trusted = TrustedWorktrees::try_get_global(cx)
1536 .map(|trusted_worktrees| {
1537 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1538 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1539 })
1540 })
1541 .unwrap_or(false);
1542 let git_store = cx.weak_entity();
1543 let repo = cx.new(|cx| {
1544 let mut repo = Repository::local(
1545 id,
1546 work_directory_abs_path.clone(),
1547 original_repo_abs_path.clone(),
1548 dot_git_abs_path.clone(),
1549 project_environment.downgrade(),
1550 fs.clone(),
1551 is_trusted,
1552 git_store,
1553 cx,
1554 );
1555 if let Some(updates_tx) = updates_tx.as_ref() {
1556 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1557 updates_tx
1558 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1559 .ok();
1560 }
1561 repo.schedule_scan(updates_tx.clone(), cx);
1562 repo
1563 });
1564 self._subscriptions
1565 .push(cx.subscribe(&repo, Self::on_repository_event));
1566 self._subscriptions
1567 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1568 self.repositories.insert(id, repo);
1569 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1570 cx.emit(GitStoreEvent::RepositoryAdded);
1571 self.active_repo_id.get_or_insert_with(|| {
1572 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1573 id
1574 });
1575 }
1576 }
1577
1578 for id in removed_ids {
1579 if self.active_repo_id == Some(id) {
1580 self.active_repo_id = None;
1581 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1582 }
1583 self.repositories.remove(&id);
1584 if let Some(updates_tx) = updates_tx.as_ref() {
1585 updates_tx
1586 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1587 .ok();
1588 }
1589 }
1590 }
1591
1592 fn on_trusted_worktrees_event(
1593 &mut self,
1594 _: Entity<TrustedWorktreesStore>,
1595 event: &TrustedWorktreesEvent,
1596 cx: &mut Context<Self>,
1597 ) {
1598 if !matches!(self.state, GitStoreState::Local { .. }) {
1599 return;
1600 }
1601
1602 let (is_trusted, event_paths) = match event {
1603 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1604 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1605 };
1606
1607 for (repo_id, worktree_ids) in &self.worktree_ids {
1608 if worktree_ids
1609 .iter()
1610 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1611 {
1612 if let Some(repo) = self.repositories.get(repo_id) {
1613 let repository_state = repo.read(cx).repository_state.clone();
1614 cx.background_spawn(async move {
1615 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1616 state.backend.set_trusted(is_trusted);
1617 }
1618 })
1619 .detach();
1620 }
1621 }
1622 }
1623 }
1624
1625 fn on_buffer_store_event(
1626 &mut self,
1627 _: Entity<BufferStore>,
1628 event: &BufferStoreEvent,
1629 cx: &mut Context<Self>,
1630 ) {
1631 match event {
1632 BufferStoreEvent::BufferAdded(buffer) => {
1633 cx.subscribe(buffer, |this, buffer, event, cx| {
1634 if let BufferEvent::LanguageChanged(_) = event {
1635 let buffer_id = buffer.read(cx).remote_id();
1636 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1637 diff_state.update(cx, |diff_state, cx| {
1638 diff_state.buffer_language_changed(buffer, cx);
1639 });
1640 }
1641 }
1642 })
1643 .detach();
1644 }
1645 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1646 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1647 diffs.remove(buffer_id);
1648 }
1649 }
1650 BufferStoreEvent::BufferDropped(buffer_id) => {
1651 self.diffs.remove(buffer_id);
1652 for diffs in self.shared_diffs.values_mut() {
1653 diffs.remove(buffer_id);
1654 }
1655 }
1656 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1657 // Whenever a buffer's file path changes, it's possible that the
1658 // new path is actually a path that is being tracked by a git
1659 // repository. In that case, we'll want to update the buffer's
1660 // `BufferDiffState`, in case it already has one.
1661 let buffer_id = buffer.read(cx).remote_id();
1662 let diff_state = self.diffs.get(&buffer_id);
1663 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1664
1665 if let Some(diff_state) = diff_state
1666 && let Some((repo, repo_path)) = repo
1667 {
1668 let buffer = buffer.clone();
1669 let diff_state = diff_state.clone();
1670
1671 cx.spawn(async move |_git_store, cx| {
1672 async {
1673 let diff_bases_change = repo
1674 .update(cx, |repo, cx| {
1675 repo.load_committed_text(buffer_id, repo_path, cx)
1676 })
1677 .await?;
1678
1679 diff_state.update(cx, |diff_state, cx| {
1680 let buffer_snapshot = buffer.read(cx).text_snapshot();
1681 diff_state.diff_bases_changed(
1682 buffer_snapshot,
1683 Some(diff_bases_change),
1684 cx,
1685 );
1686 });
1687 anyhow::Ok(())
1688 }
1689 .await
1690 .log_err();
1691 })
1692 .detach();
1693 }
1694 }
1695 }
1696 }
1697
1698 pub fn recalculate_buffer_diffs(
1699 &mut self,
1700 buffers: Vec<Entity<Buffer>>,
1701 cx: &mut Context<Self>,
1702 ) -> impl Future<Output = ()> + use<> {
1703 let mut futures = Vec::new();
1704 for buffer in buffers {
1705 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1706 let buffer = buffer.read(cx).text_snapshot();
1707 diff_state.update(cx, |diff_state, cx| {
1708 diff_state.recalculate_diffs(buffer.clone(), cx);
1709 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1710 });
1711 futures.push(diff_state.update(cx, |diff_state, cx| {
1712 diff_state
1713 .reparse_conflict_markers(buffer, cx)
1714 .map(|_| {})
1715 .boxed()
1716 }));
1717 }
1718 }
1719 async move {
1720 futures::future::join_all(futures).await;
1721 }
1722 }
1723
1724 fn on_buffer_diff_event(
1725 &mut self,
1726 diff: Entity<buffer_diff::BufferDiff>,
1727 event: &BufferDiffEvent,
1728 cx: &mut Context<Self>,
1729 ) {
1730 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1731 let buffer_id = diff.read(cx).buffer_id;
1732 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1733 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1734 diff_state.hunk_staging_operation_count += 1;
1735 diff_state.hunk_staging_operation_count
1736 });
1737 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1738 let recv = repo.update(cx, |repo, cx| {
1739 log::debug!("hunks changed for {}", path.as_unix_str());
1740 repo.spawn_set_index_text_job(
1741 path,
1742 new_index_text.as_ref().map(|rope| rope.to_string()),
1743 Some(hunk_staging_operation_count),
1744 cx,
1745 )
1746 });
1747 let diff = diff.downgrade();
1748 cx.spawn(async move |this, cx| {
1749 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1750 diff.update(cx, |diff, cx| {
1751 diff.clear_pending_hunks(cx);
1752 })
1753 .ok();
1754 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1755 .ok();
1756 }
1757 })
1758 .detach();
1759 }
1760 }
1761 }
1762 }
1763
1764 fn local_worktree_git_repos_changed(
1765 &mut self,
1766 worktree: Entity<Worktree>,
1767 changed_repos: &UpdatedGitRepositoriesSet,
1768 cx: &mut Context<Self>,
1769 ) {
1770 log::debug!("local worktree repos changed");
1771 debug_assert!(worktree.read(cx).is_local());
1772
1773 for repository in self.repositories.values() {
1774 repository.update(cx, |repository, cx| {
1775 let repo_abs_path = &repository.work_directory_abs_path;
1776 if changed_repos.iter().any(|update| {
1777 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1778 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1779 }) {
1780 repository.reload_buffer_diff_bases(cx);
1781 }
1782 });
1783 }
1784 }
1785
1786 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1787 &self.repositories
1788 }
1789
1790 /// Returns the original (main) repository working directory for the given worktree.
1791 /// For normal checkouts this equals the worktree's own path; for linked
1792 /// worktrees it points back to the original repo.
1793 pub fn original_repo_path_for_worktree(
1794 &self,
1795 worktree_id: WorktreeId,
1796 cx: &App,
1797 ) -> Option<Arc<Path>> {
1798 self.active_repo_id
1799 .iter()
1800 .chain(self.worktree_ids.keys())
1801 .find(|repo_id| {
1802 self.worktree_ids
1803 .get(repo_id)
1804 .is_some_and(|ids| ids.contains(&worktree_id))
1805 })
1806 .and_then(|repo_id| self.repositories.get(repo_id))
1807 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1808 }
1809
1810 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1811 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1812 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1813 Some(status.status)
1814 }
1815
1816 pub fn repository_and_path_for_buffer_id(
1817 &self,
1818 buffer_id: BufferId,
1819 cx: &App,
1820 ) -> Option<(Entity<Repository>, RepoPath)> {
1821 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1822 let project_path = buffer.read(cx).project_path(cx)?;
1823 self.repository_and_path_for_project_path(&project_path, cx)
1824 }
1825
1826 pub fn repository_and_path_for_project_path(
1827 &self,
1828 path: &ProjectPath,
1829 cx: &App,
1830 ) -> Option<(Entity<Repository>, RepoPath)> {
1831 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1832 self.repositories
1833 .values()
1834 .filter_map(|repo| {
1835 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1836 Some((repo.clone(), repo_path))
1837 })
1838 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1839 }
1840
1841 pub fn git_init(
1842 &self,
1843 path: Arc<Path>,
1844 fallback_branch_name: String,
1845 cx: &App,
1846 ) -> Task<Result<()>> {
1847 match &self.state {
1848 GitStoreState::Local { fs, .. } => {
1849 let fs = fs.clone();
1850 cx.background_executor()
1851 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1852 }
1853 GitStoreState::Remote {
1854 upstream_client,
1855 upstream_project_id: project_id,
1856 ..
1857 } => {
1858 let client = upstream_client.clone();
1859 let project_id = *project_id;
1860 cx.background_executor().spawn(async move {
1861 client
1862 .request(proto::GitInit {
1863 project_id: project_id,
1864 abs_path: path.to_string_lossy().into_owned(),
1865 fallback_branch_name,
1866 })
1867 .await?;
1868 Ok(())
1869 })
1870 }
1871 }
1872 }
1873
1874 pub fn git_clone(
1875 &self,
1876 repo: String,
1877 path: impl Into<Arc<std::path::Path>>,
1878 cx: &App,
1879 ) -> Task<Result<()>> {
1880 let path = path.into();
1881 match &self.state {
1882 GitStoreState::Local { fs, .. } => {
1883 let fs = fs.clone();
1884 cx.background_executor()
1885 .spawn(async move { fs.git_clone(&repo, &path).await })
1886 }
1887 GitStoreState::Remote {
1888 upstream_client,
1889 upstream_project_id,
1890 ..
1891 } => {
1892 if upstream_client.is_via_collab() {
1893 return Task::ready(Err(anyhow!(
1894 "Git Clone isn't supported for project guests"
1895 )));
1896 }
1897 let request = upstream_client.request(proto::GitClone {
1898 project_id: *upstream_project_id,
1899 abs_path: path.to_string_lossy().into_owned(),
1900 remote_repo: repo,
1901 });
1902
1903 cx.background_spawn(async move {
1904 let result = request.await?;
1905
1906 match result.success {
1907 true => Ok(()),
1908 false => Err(anyhow!("Git Clone failed")),
1909 }
1910 })
1911 }
1912 }
1913 }
1914
1915 async fn handle_update_repository(
1916 this: Entity<Self>,
1917 envelope: TypedEnvelope<proto::UpdateRepository>,
1918 mut cx: AsyncApp,
1919 ) -> Result<()> {
1920 this.update(&mut cx, |this, cx| {
1921 let path_style = this.worktree_store.read(cx).path_style();
1922 let mut update = envelope.payload;
1923
1924 let id = RepositoryId::from_proto(update.id);
1925 let client = this.upstream_client().context("no upstream client")?;
1926
1927 let original_repo_abs_path: Option<Arc<Path>> = update
1928 .original_repo_abs_path
1929 .as_deref()
1930 .map(|p| Path::new(p).into());
1931
1932 let mut repo_subscription = None;
1933 let repo = this.repositories.entry(id).or_insert_with(|| {
1934 let git_store = cx.weak_entity();
1935 let repo = cx.new(|cx| {
1936 Repository::remote(
1937 id,
1938 Path::new(&update.abs_path).into(),
1939 original_repo_abs_path.clone(),
1940 path_style,
1941 ProjectId(update.project_id),
1942 client,
1943 git_store,
1944 cx,
1945 )
1946 });
1947 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1948 cx.emit(GitStoreEvent::RepositoryAdded);
1949 repo
1950 });
1951 this._subscriptions.extend(repo_subscription);
1952
1953 repo.update(cx, {
1954 let update = update.clone();
1955 |repo, cx| repo.apply_remote_update(update, cx)
1956 })?;
1957
1958 this.active_repo_id.get_or_insert_with(|| {
1959 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1960 id
1961 });
1962
1963 if let Some((client, project_id)) = this.downstream_client() {
1964 update.project_id = project_id.to_proto();
1965 client.send(update).log_err();
1966 }
1967 Ok(())
1968 })
1969 }
1970
1971 async fn handle_remove_repository(
1972 this: Entity<Self>,
1973 envelope: TypedEnvelope<proto::RemoveRepository>,
1974 mut cx: AsyncApp,
1975 ) -> Result<()> {
1976 this.update(&mut cx, |this, cx| {
1977 let mut update = envelope.payload;
1978 let id = RepositoryId::from_proto(update.id);
1979 this.repositories.remove(&id);
1980 if let Some((client, project_id)) = this.downstream_client() {
1981 update.project_id = project_id.to_proto();
1982 client.send(update).log_err();
1983 }
1984 if this.active_repo_id == Some(id) {
1985 this.active_repo_id = None;
1986 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1987 }
1988 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1989 });
1990 Ok(())
1991 }
1992
1993 async fn handle_git_init(
1994 this: Entity<Self>,
1995 envelope: TypedEnvelope<proto::GitInit>,
1996 cx: AsyncApp,
1997 ) -> Result<proto::Ack> {
1998 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1999 let name = envelope.payload.fallback_branch_name;
2000 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2001 .await?;
2002
2003 Ok(proto::Ack {})
2004 }
2005
2006 async fn handle_git_clone(
2007 this: Entity<Self>,
2008 envelope: TypedEnvelope<proto::GitClone>,
2009 cx: AsyncApp,
2010 ) -> Result<proto::GitCloneResponse> {
2011 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2012 let repo_name = envelope.payload.remote_repo;
2013 let result = cx
2014 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2015 .await;
2016
2017 Ok(proto::GitCloneResponse {
2018 success: result.is_ok(),
2019 })
2020 }
2021
2022 async fn handle_fetch(
2023 this: Entity<Self>,
2024 envelope: TypedEnvelope<proto::Fetch>,
2025 mut cx: AsyncApp,
2026 ) -> Result<proto::RemoteMessageResponse> {
2027 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2028 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2029 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2030 let askpass_id = envelope.payload.askpass_id;
2031
2032 let askpass = make_remote_delegate(
2033 this,
2034 envelope.payload.project_id,
2035 repository_id,
2036 askpass_id,
2037 &mut cx,
2038 );
2039
2040 let remote_output = repository_handle
2041 .update(&mut cx, |repository_handle, cx| {
2042 repository_handle.fetch(fetch_options, askpass, cx)
2043 })
2044 .await??;
2045
2046 Ok(proto::RemoteMessageResponse {
2047 stdout: remote_output.stdout,
2048 stderr: remote_output.stderr,
2049 })
2050 }
2051
2052 async fn handle_push(
2053 this: Entity<Self>,
2054 envelope: TypedEnvelope<proto::Push>,
2055 mut cx: AsyncApp,
2056 ) -> Result<proto::RemoteMessageResponse> {
2057 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2058 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2059
2060 let askpass_id = envelope.payload.askpass_id;
2061 let askpass = make_remote_delegate(
2062 this,
2063 envelope.payload.project_id,
2064 repository_id,
2065 askpass_id,
2066 &mut cx,
2067 );
2068
2069 let options = envelope
2070 .payload
2071 .options
2072 .as_ref()
2073 .map(|_| match envelope.payload.options() {
2074 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2075 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2076 });
2077
2078 let branch_name = envelope.payload.branch_name.into();
2079 let remote_branch_name = envelope.payload.remote_branch_name.into();
2080 let remote_name = envelope.payload.remote_name.into();
2081
2082 let remote_output = repository_handle
2083 .update(&mut cx, |repository_handle, cx| {
2084 repository_handle.push(
2085 branch_name,
2086 remote_branch_name,
2087 remote_name,
2088 options,
2089 askpass,
2090 cx,
2091 )
2092 })
2093 .await??;
2094 Ok(proto::RemoteMessageResponse {
2095 stdout: remote_output.stdout,
2096 stderr: remote_output.stderr,
2097 })
2098 }
2099
2100 async fn handle_pull(
2101 this: Entity<Self>,
2102 envelope: TypedEnvelope<proto::Pull>,
2103 mut cx: AsyncApp,
2104 ) -> Result<proto::RemoteMessageResponse> {
2105 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2106 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2107 let askpass_id = envelope.payload.askpass_id;
2108 let askpass = make_remote_delegate(
2109 this,
2110 envelope.payload.project_id,
2111 repository_id,
2112 askpass_id,
2113 &mut cx,
2114 );
2115
2116 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2117 let remote_name = envelope.payload.remote_name.into();
2118 let rebase = envelope.payload.rebase;
2119
2120 let remote_message = repository_handle
2121 .update(&mut cx, |repository_handle, cx| {
2122 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2123 })
2124 .await??;
2125
2126 Ok(proto::RemoteMessageResponse {
2127 stdout: remote_message.stdout,
2128 stderr: remote_message.stderr,
2129 })
2130 }
2131
2132 async fn handle_stage(
2133 this: Entity<Self>,
2134 envelope: TypedEnvelope<proto::Stage>,
2135 mut cx: AsyncApp,
2136 ) -> Result<proto::Ack> {
2137 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2138 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2139
2140 let entries = envelope
2141 .payload
2142 .paths
2143 .into_iter()
2144 .map(|path| RepoPath::new(&path))
2145 .collect::<Result<Vec<_>>>()?;
2146
2147 repository_handle
2148 .update(&mut cx, |repository_handle, cx| {
2149 repository_handle.stage_entries(entries, cx)
2150 })
2151 .await?;
2152 Ok(proto::Ack {})
2153 }
2154
2155 async fn handle_unstage(
2156 this: Entity<Self>,
2157 envelope: TypedEnvelope<proto::Unstage>,
2158 mut cx: AsyncApp,
2159 ) -> Result<proto::Ack> {
2160 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2161 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2162
2163 let entries = envelope
2164 .payload
2165 .paths
2166 .into_iter()
2167 .map(|path| RepoPath::new(&path))
2168 .collect::<Result<Vec<_>>>()?;
2169
2170 repository_handle
2171 .update(&mut cx, |repository_handle, cx| {
2172 repository_handle.unstage_entries(entries, cx)
2173 })
2174 .await?;
2175
2176 Ok(proto::Ack {})
2177 }
2178
2179 async fn handle_stash(
2180 this: Entity<Self>,
2181 envelope: TypedEnvelope<proto::Stash>,
2182 mut cx: AsyncApp,
2183 ) -> Result<proto::Ack> {
2184 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2185 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2186
2187 let entries = envelope
2188 .payload
2189 .paths
2190 .into_iter()
2191 .map(|path| RepoPath::new(&path))
2192 .collect::<Result<Vec<_>>>()?;
2193
2194 repository_handle
2195 .update(&mut cx, |repository_handle, cx| {
2196 repository_handle.stash_entries(entries, cx)
2197 })
2198 .await?;
2199
2200 Ok(proto::Ack {})
2201 }
2202
2203 async fn handle_stash_pop(
2204 this: Entity<Self>,
2205 envelope: TypedEnvelope<proto::StashPop>,
2206 mut cx: AsyncApp,
2207 ) -> Result<proto::Ack> {
2208 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2209 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2210 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2211
2212 repository_handle
2213 .update(&mut cx, |repository_handle, cx| {
2214 repository_handle.stash_pop(stash_index, cx)
2215 })
2216 .await?;
2217
2218 Ok(proto::Ack {})
2219 }
2220
2221 async fn handle_stash_apply(
2222 this: Entity<Self>,
2223 envelope: TypedEnvelope<proto::StashApply>,
2224 mut cx: AsyncApp,
2225 ) -> Result<proto::Ack> {
2226 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2227 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2228 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2229
2230 repository_handle
2231 .update(&mut cx, |repository_handle, cx| {
2232 repository_handle.stash_apply(stash_index, cx)
2233 })
2234 .await?;
2235
2236 Ok(proto::Ack {})
2237 }
2238
2239 async fn handle_stash_drop(
2240 this: Entity<Self>,
2241 envelope: TypedEnvelope<proto::StashDrop>,
2242 mut cx: AsyncApp,
2243 ) -> Result<proto::Ack> {
2244 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2245 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2246 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2247
2248 repository_handle
2249 .update(&mut cx, |repository_handle, cx| {
2250 repository_handle.stash_drop(stash_index, cx)
2251 })
2252 .await??;
2253
2254 Ok(proto::Ack {})
2255 }
2256
2257 async fn handle_set_index_text(
2258 this: Entity<Self>,
2259 envelope: TypedEnvelope<proto::SetIndexText>,
2260 mut cx: AsyncApp,
2261 ) -> Result<proto::Ack> {
2262 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2263 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2264 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2265
2266 repository_handle
2267 .update(&mut cx, |repository_handle, cx| {
2268 repository_handle.spawn_set_index_text_job(
2269 repo_path,
2270 envelope.payload.text,
2271 None,
2272 cx,
2273 )
2274 })
2275 .await??;
2276 Ok(proto::Ack {})
2277 }
2278
2279 async fn handle_run_hook(
2280 this: Entity<Self>,
2281 envelope: TypedEnvelope<proto::RunGitHook>,
2282 mut cx: AsyncApp,
2283 ) -> Result<proto::Ack> {
2284 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2285 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2286 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2287 repository_handle
2288 .update(&mut cx, |repository_handle, cx| {
2289 repository_handle.run_hook(hook, cx)
2290 })
2291 .await??;
2292 Ok(proto::Ack {})
2293 }
2294
2295 async fn handle_commit(
2296 this: Entity<Self>,
2297 envelope: TypedEnvelope<proto::Commit>,
2298 mut cx: AsyncApp,
2299 ) -> Result<proto::Ack> {
2300 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2301 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2302 let askpass_id = envelope.payload.askpass_id;
2303
2304 let askpass = make_remote_delegate(
2305 this,
2306 envelope.payload.project_id,
2307 repository_id,
2308 askpass_id,
2309 &mut cx,
2310 );
2311
2312 let message = SharedString::from(envelope.payload.message);
2313 let name = envelope.payload.name.map(SharedString::from);
2314 let email = envelope.payload.email.map(SharedString::from);
2315 let options = envelope.payload.options.unwrap_or_default();
2316
2317 repository_handle
2318 .update(&mut cx, |repository_handle, cx| {
2319 repository_handle.commit(
2320 message,
2321 name.zip(email),
2322 CommitOptions {
2323 amend: options.amend,
2324 signoff: options.signoff,
2325 allow_empty: options.allow_empty,
2326 },
2327 askpass,
2328 cx,
2329 )
2330 })
2331 .await??;
2332 Ok(proto::Ack {})
2333 }
2334
2335 async fn handle_get_remotes(
2336 this: Entity<Self>,
2337 envelope: TypedEnvelope<proto::GetRemotes>,
2338 mut cx: AsyncApp,
2339 ) -> Result<proto::GetRemotesResponse> {
2340 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2341 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2342
2343 let branch_name = envelope.payload.branch_name;
2344 let is_push = envelope.payload.is_push;
2345
2346 let remotes = repository_handle
2347 .update(&mut cx, |repository_handle, _| {
2348 repository_handle.get_remotes(branch_name, is_push)
2349 })
2350 .await??;
2351
2352 Ok(proto::GetRemotesResponse {
2353 remotes: remotes
2354 .into_iter()
2355 .map(|remotes| proto::get_remotes_response::Remote {
2356 name: remotes.name.to_string(),
2357 })
2358 .collect::<Vec<_>>(),
2359 })
2360 }
2361
2362 async fn handle_get_worktrees(
2363 this: Entity<Self>,
2364 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2365 mut cx: AsyncApp,
2366 ) -> Result<proto::GitWorktreesResponse> {
2367 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2368 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2369
2370 let worktrees = repository_handle
2371 .update(&mut cx, |repository_handle, _| {
2372 repository_handle.worktrees()
2373 })
2374 .await??;
2375
2376 Ok(proto::GitWorktreesResponse {
2377 worktrees: worktrees
2378 .into_iter()
2379 .map(|worktree| worktree_to_proto(&worktree))
2380 .collect::<Vec<_>>(),
2381 })
2382 }
2383
2384 async fn handle_create_worktree(
2385 this: Entity<Self>,
2386 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2387 mut cx: AsyncApp,
2388 ) -> Result<proto::Ack> {
2389 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2390 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2391 let directory = PathBuf::from(envelope.payload.directory);
2392 let start_point = if envelope.payload.name.is_empty() {
2393 CreateWorktreeStartPoint::Detached
2394 } else {
2395 CreateWorktreeStartPoint::Branched {
2396 name: envelope.payload.name,
2397 }
2398 };
2399 let commit = envelope.payload.commit;
2400
2401 repository_handle
2402 .update(&mut cx, |repository_handle, _| {
2403 repository_handle.create_worktree_with_start_point(start_point, directory, commit)
2404 })
2405 .await??;
2406
2407 Ok(proto::Ack {})
2408 }
2409
2410 async fn handle_remove_worktree(
2411 this: Entity<Self>,
2412 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2413 mut cx: AsyncApp,
2414 ) -> Result<proto::Ack> {
2415 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2416 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2417 let path = PathBuf::from(envelope.payload.path);
2418 let force = envelope.payload.force;
2419
2420 repository_handle
2421 .update(&mut cx, |repository_handle, _| {
2422 repository_handle.remove_worktree(path, force)
2423 })
2424 .await??;
2425
2426 Ok(proto::Ack {})
2427 }
2428
2429 async fn handle_rename_worktree(
2430 this: Entity<Self>,
2431 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2432 mut cx: AsyncApp,
2433 ) -> Result<proto::Ack> {
2434 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2435 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2436 let old_path = PathBuf::from(envelope.payload.old_path);
2437 let new_path = PathBuf::from(envelope.payload.new_path);
2438
2439 repository_handle
2440 .update(&mut cx, |repository_handle, _| {
2441 repository_handle.rename_worktree(old_path, new_path)
2442 })
2443 .await??;
2444
2445 Ok(proto::Ack {})
2446 }
2447
2448 async fn handle_get_head_sha(
2449 this: Entity<Self>,
2450 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2451 mut cx: AsyncApp,
2452 ) -> Result<proto::GitGetHeadShaResponse> {
2453 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2454 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2455
2456 let head_sha = repository_handle
2457 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2458 .await??;
2459
2460 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2461 }
2462
2463 async fn handle_get_branches(
2464 this: Entity<Self>,
2465 envelope: TypedEnvelope<proto::GitGetBranches>,
2466 mut cx: AsyncApp,
2467 ) -> Result<proto::GitBranchesResponse> {
2468 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2469 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2470
2471 let branches = repository_handle
2472 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2473 .await??;
2474
2475 Ok(proto::GitBranchesResponse {
2476 branches: branches
2477 .into_iter()
2478 .map(|branch| branch_to_proto(&branch))
2479 .collect::<Vec<_>>(),
2480 })
2481 }
2482 async fn handle_get_default_branch(
2483 this: Entity<Self>,
2484 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2485 mut cx: AsyncApp,
2486 ) -> Result<proto::GetDefaultBranchResponse> {
2487 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2488 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2489
2490 let branch = repository_handle
2491 .update(&mut cx, |repository_handle, _| {
2492 repository_handle.default_branch(false)
2493 })
2494 .await??
2495 .map(Into::into);
2496
2497 Ok(proto::GetDefaultBranchResponse { branch })
2498 }
2499 async fn handle_create_branch(
2500 this: Entity<Self>,
2501 envelope: TypedEnvelope<proto::GitCreateBranch>,
2502 mut cx: AsyncApp,
2503 ) -> Result<proto::Ack> {
2504 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2505 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2506 let branch_name = envelope.payload.branch_name;
2507
2508 repository_handle
2509 .update(&mut cx, |repository_handle, _| {
2510 repository_handle.create_branch(branch_name, None)
2511 })
2512 .await??;
2513
2514 Ok(proto::Ack {})
2515 }
2516
2517 async fn handle_change_branch(
2518 this: Entity<Self>,
2519 envelope: TypedEnvelope<proto::GitChangeBranch>,
2520 mut cx: AsyncApp,
2521 ) -> Result<proto::Ack> {
2522 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2523 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2524 let branch_name = envelope.payload.branch_name;
2525
2526 repository_handle
2527 .update(&mut cx, |repository_handle, _| {
2528 repository_handle.change_branch(branch_name)
2529 })
2530 .await??;
2531
2532 Ok(proto::Ack {})
2533 }
2534
2535 async fn handle_rename_branch(
2536 this: Entity<Self>,
2537 envelope: TypedEnvelope<proto::GitRenameBranch>,
2538 mut cx: AsyncApp,
2539 ) -> Result<proto::Ack> {
2540 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2541 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2542 let branch = envelope.payload.branch;
2543 let new_name = envelope.payload.new_name;
2544
2545 repository_handle
2546 .update(&mut cx, |repository_handle, _| {
2547 repository_handle.rename_branch(branch, new_name)
2548 })
2549 .await??;
2550
2551 Ok(proto::Ack {})
2552 }
2553
2554 async fn handle_create_remote(
2555 this: Entity<Self>,
2556 envelope: TypedEnvelope<proto::GitCreateRemote>,
2557 mut cx: AsyncApp,
2558 ) -> Result<proto::Ack> {
2559 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2560 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2561 let remote_name = envelope.payload.remote_name;
2562 let remote_url = envelope.payload.remote_url;
2563
2564 repository_handle
2565 .update(&mut cx, |repository_handle, _| {
2566 repository_handle.create_remote(remote_name, remote_url)
2567 })
2568 .await??;
2569
2570 Ok(proto::Ack {})
2571 }
2572
2573 async fn handle_delete_branch(
2574 this: Entity<Self>,
2575 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2576 mut cx: AsyncApp,
2577 ) -> Result<proto::Ack> {
2578 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2579 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2580 let is_remote = envelope.payload.is_remote;
2581 let branch_name = envelope.payload.branch_name;
2582
2583 repository_handle
2584 .update(&mut cx, |repository_handle, _| {
2585 repository_handle.delete_branch(is_remote, branch_name)
2586 })
2587 .await??;
2588
2589 Ok(proto::Ack {})
2590 }
2591
2592 async fn handle_remove_remote(
2593 this: Entity<Self>,
2594 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2595 mut cx: AsyncApp,
2596 ) -> Result<proto::Ack> {
2597 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2598 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2599 let remote_name = envelope.payload.remote_name;
2600
2601 repository_handle
2602 .update(&mut cx, |repository_handle, _| {
2603 repository_handle.remove_remote(remote_name)
2604 })
2605 .await??;
2606
2607 Ok(proto::Ack {})
2608 }
2609
2610 async fn handle_show(
2611 this: Entity<Self>,
2612 envelope: TypedEnvelope<proto::GitShow>,
2613 mut cx: AsyncApp,
2614 ) -> Result<proto::GitCommitDetails> {
2615 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2616 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2617
2618 let commit = repository_handle
2619 .update(&mut cx, |repository_handle, _| {
2620 repository_handle.show(envelope.payload.commit)
2621 })
2622 .await??;
2623 Ok(proto::GitCommitDetails {
2624 sha: commit.sha.into(),
2625 message: commit.message.into(),
2626 commit_timestamp: commit.commit_timestamp,
2627 author_email: commit.author_email.into(),
2628 author_name: commit.author_name.into(),
2629 })
2630 }
2631
2632 async fn handle_create_checkpoint(
2633 this: Entity<Self>,
2634 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2635 mut cx: AsyncApp,
2636 ) -> Result<proto::GitCreateCheckpointResponse> {
2637 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2638 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2639
2640 let checkpoint = repository_handle
2641 .update(&mut cx, |repository, _| repository.checkpoint())
2642 .await??;
2643
2644 Ok(proto::GitCreateCheckpointResponse {
2645 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2646 })
2647 }
2648
2649 async fn handle_restore_checkpoint(
2650 this: Entity<Self>,
2651 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2652 mut cx: AsyncApp,
2653 ) -> Result<proto::Ack> {
2654 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2655 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2656
2657 let checkpoint = GitRepositoryCheckpoint {
2658 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2659 };
2660
2661 repository_handle
2662 .update(&mut cx, |repository, _| {
2663 repository.restore_checkpoint(checkpoint)
2664 })
2665 .await??;
2666
2667 Ok(proto::Ack {})
2668 }
2669
2670 async fn handle_compare_checkpoints(
2671 this: Entity<Self>,
2672 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2673 mut cx: AsyncApp,
2674 ) -> Result<proto::GitCompareCheckpointsResponse> {
2675 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2676 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2677
2678 let left = GitRepositoryCheckpoint {
2679 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2680 };
2681 let right = GitRepositoryCheckpoint {
2682 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2683 };
2684
2685 let equal = repository_handle
2686 .update(&mut cx, |repository, _| {
2687 repository.compare_checkpoints(left, right)
2688 })
2689 .await??;
2690
2691 Ok(proto::GitCompareCheckpointsResponse { equal })
2692 }
2693
2694 async fn handle_diff_checkpoints(
2695 this: Entity<Self>,
2696 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2697 mut cx: AsyncApp,
2698 ) -> Result<proto::GitDiffCheckpointsResponse> {
2699 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2700 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2701
2702 let base = GitRepositoryCheckpoint {
2703 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2704 };
2705 let target = GitRepositoryCheckpoint {
2706 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2707 };
2708
2709 let diff = repository_handle
2710 .update(&mut cx, |repository, _| {
2711 repository.diff_checkpoints(base, target)
2712 })
2713 .await??;
2714
2715 Ok(proto::GitDiffCheckpointsResponse { diff })
2716 }
2717
2718 async fn handle_load_commit_diff(
2719 this: Entity<Self>,
2720 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2721 mut cx: AsyncApp,
2722 ) -> Result<proto::LoadCommitDiffResponse> {
2723 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2724 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2725
2726 let commit_diff = repository_handle
2727 .update(&mut cx, |repository_handle, _| {
2728 repository_handle.load_commit_diff(envelope.payload.commit)
2729 })
2730 .await??;
2731 Ok(proto::LoadCommitDiffResponse {
2732 files: commit_diff
2733 .files
2734 .into_iter()
2735 .map(|file| proto::CommitFile {
2736 path: file.path.to_proto(),
2737 old_text: file.old_text,
2738 new_text: file.new_text,
2739 is_binary: file.is_binary,
2740 })
2741 .collect(),
2742 })
2743 }
2744
2745 async fn handle_reset(
2746 this: Entity<Self>,
2747 envelope: TypedEnvelope<proto::GitReset>,
2748 mut cx: AsyncApp,
2749 ) -> Result<proto::Ack> {
2750 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2751 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2752
2753 let mode = match envelope.payload.mode() {
2754 git_reset::ResetMode::Soft => ResetMode::Soft,
2755 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2756 };
2757
2758 repository_handle
2759 .update(&mut cx, |repository_handle, cx| {
2760 repository_handle.reset(envelope.payload.commit, mode, cx)
2761 })
2762 .await??;
2763 Ok(proto::Ack {})
2764 }
2765
2766 async fn handle_checkout_files(
2767 this: Entity<Self>,
2768 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2769 mut cx: AsyncApp,
2770 ) -> Result<proto::Ack> {
2771 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2772 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2773 let paths = envelope
2774 .payload
2775 .paths
2776 .iter()
2777 .map(|s| RepoPath::from_proto(s))
2778 .collect::<Result<Vec<_>>>()?;
2779
2780 repository_handle
2781 .update(&mut cx, |repository_handle, cx| {
2782 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2783 })
2784 .await?;
2785 Ok(proto::Ack {})
2786 }
2787
2788 async fn handle_open_commit_message_buffer(
2789 this: Entity<Self>,
2790 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2791 mut cx: AsyncApp,
2792 ) -> Result<proto::OpenBufferResponse> {
2793 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2794 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2795 let buffer = repository
2796 .update(&mut cx, |repository, cx| {
2797 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2798 })
2799 .await?;
2800
2801 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2802 this.update(&mut cx, |this, cx| {
2803 this.buffer_store.update(cx, |buffer_store, cx| {
2804 buffer_store
2805 .create_buffer_for_peer(
2806 &buffer,
2807 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2808 cx,
2809 )
2810 .detach_and_log_err(cx);
2811 })
2812 });
2813
2814 Ok(proto::OpenBufferResponse {
2815 buffer_id: buffer_id.to_proto(),
2816 })
2817 }
2818
2819 async fn handle_askpass(
2820 this: Entity<Self>,
2821 envelope: TypedEnvelope<proto::AskPassRequest>,
2822 mut cx: AsyncApp,
2823 ) -> Result<proto::AskPassResponse> {
2824 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2825 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2826
2827 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2828 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2829 debug_panic!("no askpass found");
2830 anyhow::bail!("no askpass found");
2831 };
2832
2833 let response = askpass
2834 .ask_password(envelope.payload.prompt)
2835 .await
2836 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2837
2838 delegates
2839 .lock()
2840 .insert(envelope.payload.askpass_id, askpass);
2841
2842 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2843 Ok(proto::AskPassResponse {
2844 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2845 })
2846 }
2847
2848 async fn handle_check_for_pushed_commits(
2849 this: Entity<Self>,
2850 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2851 mut cx: AsyncApp,
2852 ) -> Result<proto::CheckForPushedCommitsResponse> {
2853 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2854 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2855
2856 let branches = repository_handle
2857 .update(&mut cx, |repository_handle, _| {
2858 repository_handle.check_for_pushed_commits()
2859 })
2860 .await??;
2861 Ok(proto::CheckForPushedCommitsResponse {
2862 pushed_to: branches
2863 .into_iter()
2864 .map(|commit| commit.to_string())
2865 .collect(),
2866 })
2867 }
2868
2869 async fn handle_git_diff(
2870 this: Entity<Self>,
2871 envelope: TypedEnvelope<proto::GitDiff>,
2872 mut cx: AsyncApp,
2873 ) -> Result<proto::GitDiffResponse> {
2874 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2875 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2876 let diff_type = match envelope.payload.diff_type() {
2877 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2878 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2879 proto::git_diff::DiffType::MergeBase => {
2880 let base_ref = envelope
2881 .payload
2882 .merge_base_ref
2883 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2884 DiffType::MergeBase {
2885 base_ref: base_ref.into(),
2886 }
2887 }
2888 };
2889
2890 let mut diff = repository_handle
2891 .update(&mut cx, |repository_handle, cx| {
2892 repository_handle.diff(diff_type, cx)
2893 })
2894 .await??;
2895 const ONE_MB: usize = 1_000_000;
2896 if diff.len() > ONE_MB {
2897 diff = diff.chars().take(ONE_MB).collect()
2898 }
2899
2900 Ok(proto::GitDiffResponse { diff })
2901 }
2902
2903 async fn handle_tree_diff(
2904 this: Entity<Self>,
2905 request: TypedEnvelope<proto::GetTreeDiff>,
2906 mut cx: AsyncApp,
2907 ) -> Result<proto::GetTreeDiffResponse> {
2908 let repository_id = RepositoryId(request.payload.repository_id);
2909 let diff_type = if request.payload.is_merge {
2910 DiffTreeType::MergeBase {
2911 base: request.payload.base.into(),
2912 head: request.payload.head.into(),
2913 }
2914 } else {
2915 DiffTreeType::Since {
2916 base: request.payload.base.into(),
2917 head: request.payload.head.into(),
2918 }
2919 };
2920
2921 let diff = this
2922 .update(&mut cx, |this, cx| {
2923 let repository = this.repositories().get(&repository_id)?;
2924 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2925 })
2926 .context("missing repository")?
2927 .await??;
2928
2929 Ok(proto::GetTreeDiffResponse {
2930 entries: diff
2931 .entries
2932 .into_iter()
2933 .map(|(path, status)| proto::TreeDiffStatus {
2934 path: path.as_ref().to_proto(),
2935 status: match status {
2936 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2937 TreeDiffStatus::Modified { .. } => {
2938 proto::tree_diff_status::Status::Modified.into()
2939 }
2940 TreeDiffStatus::Deleted { .. } => {
2941 proto::tree_diff_status::Status::Deleted.into()
2942 }
2943 },
2944 oid: match status {
2945 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2946 Some(old.to_string())
2947 }
2948 TreeDiffStatus::Added => None,
2949 },
2950 })
2951 .collect(),
2952 })
2953 }
2954
2955 async fn handle_get_blob_content(
2956 this: Entity<Self>,
2957 request: TypedEnvelope<proto::GetBlobContent>,
2958 mut cx: AsyncApp,
2959 ) -> Result<proto::GetBlobContentResponse> {
2960 let oid = git::Oid::from_str(&request.payload.oid)?;
2961 let repository_id = RepositoryId(request.payload.repository_id);
2962 let content = this
2963 .update(&mut cx, |this, cx| {
2964 let repository = this.repositories().get(&repository_id)?;
2965 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2966 })
2967 .context("missing repository")?
2968 .await?;
2969 Ok(proto::GetBlobContentResponse { content })
2970 }
2971
2972 async fn handle_open_unstaged_diff(
2973 this: Entity<Self>,
2974 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2975 mut cx: AsyncApp,
2976 ) -> Result<proto::OpenUnstagedDiffResponse> {
2977 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2978 let diff = this
2979 .update(&mut cx, |this, cx| {
2980 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2981 Some(this.open_unstaged_diff(buffer, cx))
2982 })
2983 .context("missing buffer")?
2984 .await?;
2985 this.update(&mut cx, |this, _| {
2986 let shared_diffs = this
2987 .shared_diffs
2988 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2989 .or_default();
2990 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2991 });
2992 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2993 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2994 }
2995
2996 async fn handle_open_uncommitted_diff(
2997 this: Entity<Self>,
2998 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2999 mut cx: AsyncApp,
3000 ) -> Result<proto::OpenUncommittedDiffResponse> {
3001 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3002 let diff = this
3003 .update(&mut cx, |this, cx| {
3004 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3005 Some(this.open_uncommitted_diff(buffer, cx))
3006 })
3007 .context("missing buffer")?
3008 .await?;
3009 this.update(&mut cx, |this, _| {
3010 let shared_diffs = this
3011 .shared_diffs
3012 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3013 .or_default();
3014 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3015 });
3016 Ok(diff.read_with(&cx, |diff, cx| {
3017 use proto::open_uncommitted_diff_response::Mode;
3018
3019 let unstaged_diff = diff.secondary_diff();
3020 let index_snapshot = unstaged_diff.and_then(|diff| {
3021 let diff = diff.read(cx);
3022 diff.base_text_exists().then(|| diff.base_text(cx))
3023 });
3024
3025 let mode;
3026 let staged_text;
3027 let committed_text;
3028 if diff.base_text_exists() {
3029 let committed_snapshot = diff.base_text(cx);
3030 committed_text = Some(committed_snapshot.text());
3031 if let Some(index_text) = index_snapshot {
3032 if index_text.remote_id() == committed_snapshot.remote_id() {
3033 mode = Mode::IndexMatchesHead;
3034 staged_text = None;
3035 } else {
3036 mode = Mode::IndexAndHead;
3037 staged_text = Some(index_text.text());
3038 }
3039 } else {
3040 mode = Mode::IndexAndHead;
3041 staged_text = None;
3042 }
3043 } else {
3044 mode = Mode::IndexAndHead;
3045 committed_text = None;
3046 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3047 }
3048
3049 proto::OpenUncommittedDiffResponse {
3050 committed_text,
3051 staged_text,
3052 mode: mode.into(),
3053 }
3054 }))
3055 }
3056
3057 async fn handle_update_diff_bases(
3058 this: Entity<Self>,
3059 request: TypedEnvelope<proto::UpdateDiffBases>,
3060 mut cx: AsyncApp,
3061 ) -> Result<()> {
3062 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3063 this.update(&mut cx, |this, cx| {
3064 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3065 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3066 {
3067 let buffer = buffer.read(cx).text_snapshot();
3068 diff_state.update(cx, |diff_state, cx| {
3069 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3070 })
3071 }
3072 });
3073 Ok(())
3074 }
3075
3076 async fn handle_blame_buffer(
3077 this: Entity<Self>,
3078 envelope: TypedEnvelope<proto::BlameBuffer>,
3079 mut cx: AsyncApp,
3080 ) -> Result<proto::BlameBufferResponse> {
3081 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3082 let version = deserialize_version(&envelope.payload.version);
3083 let buffer = this.read_with(&cx, |this, cx| {
3084 this.buffer_store.read(cx).get_existing(buffer_id)
3085 })?;
3086 buffer
3087 .update(&mut cx, |buffer, _| {
3088 buffer.wait_for_version(version.clone())
3089 })
3090 .await?;
3091 let blame = this
3092 .update(&mut cx, |this, cx| {
3093 this.blame_buffer(&buffer, Some(version), cx)
3094 })
3095 .await?;
3096 Ok(serialize_blame_buffer_response(blame))
3097 }
3098
3099 async fn handle_get_permalink_to_line(
3100 this: Entity<Self>,
3101 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3102 mut cx: AsyncApp,
3103 ) -> Result<proto::GetPermalinkToLineResponse> {
3104 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3105 // let version = deserialize_version(&envelope.payload.version);
3106 let selection = {
3107 let proto_selection = envelope
3108 .payload
3109 .selection
3110 .context("no selection to get permalink for defined")?;
3111 proto_selection.start as u32..proto_selection.end as u32
3112 };
3113 let buffer = this.read_with(&cx, |this, cx| {
3114 this.buffer_store.read(cx).get_existing(buffer_id)
3115 })?;
3116 let permalink = this
3117 .update(&mut cx, |this, cx| {
3118 this.get_permalink_to_line(&buffer, selection, cx)
3119 })
3120 .await?;
3121 Ok(proto::GetPermalinkToLineResponse {
3122 permalink: permalink.to_string(),
3123 })
3124 }
3125
3126 fn repository_for_request(
3127 this: &Entity<Self>,
3128 id: RepositoryId,
3129 cx: &mut AsyncApp,
3130 ) -> Result<Entity<Repository>> {
3131 this.read_with(cx, |this, _| {
3132 this.repositories
3133 .get(&id)
3134 .context("missing repository handle")
3135 .cloned()
3136 })
3137 }
3138
3139 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3140 self.repositories
3141 .iter()
3142 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3143 .collect()
3144 }
3145
3146 fn process_updated_entries(
3147 &self,
3148 worktree: &Entity<Worktree>,
3149 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3150 cx: &mut App,
3151 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3152 let path_style = worktree.read(cx).path_style();
3153 let mut repo_paths = self
3154 .repositories
3155 .values()
3156 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3157 .collect::<Vec<_>>();
3158 let mut entries: Vec<_> = updated_entries
3159 .iter()
3160 .map(|(path, _, _)| path.clone())
3161 .collect();
3162 entries.sort();
3163 let worktree = worktree.read(cx);
3164
3165 let entries = entries
3166 .into_iter()
3167 .map(|path| worktree.absolutize(&path))
3168 .collect::<Arc<[_]>>();
3169
3170 let executor = cx.background_executor().clone();
3171 cx.background_executor().spawn(async move {
3172 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3173 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3174 let mut tasks = FuturesOrdered::new();
3175 for (repo_path, repo) in repo_paths.into_iter().rev() {
3176 let entries = entries.clone();
3177 let task = executor.spawn(async move {
3178 // Find all repository paths that belong to this repo
3179 let mut ix = entries.partition_point(|path| path < &*repo_path);
3180 if ix == entries.len() {
3181 return None;
3182 };
3183
3184 let mut paths = Vec::new();
3185 // All paths prefixed by a given repo will constitute a continuous range.
3186 while let Some(path) = entries.get(ix)
3187 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3188 &repo_path, path, path_style,
3189 )
3190 {
3191 paths.push((repo_path, ix));
3192 ix += 1;
3193 }
3194 if paths.is_empty() {
3195 None
3196 } else {
3197 Some((repo, paths))
3198 }
3199 });
3200 tasks.push_back(task);
3201 }
3202
3203 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3204 let mut path_was_used = vec![false; entries.len()];
3205 let tasks = tasks.collect::<Vec<_>>().await;
3206 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3207 // We always want to assign a path to it's innermost repository.
3208 for t in tasks {
3209 let Some((repo, paths)) = t else {
3210 continue;
3211 };
3212 let entry = paths_by_git_repo.entry(repo).or_default();
3213 for (repo_path, ix) in paths {
3214 if path_was_used[ix] {
3215 continue;
3216 }
3217 path_was_used[ix] = true;
3218 entry.push(repo_path);
3219 }
3220 }
3221
3222 paths_by_git_repo
3223 })
3224 }
3225}
3226
3227impl BufferGitState {
3228 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3229 Self {
3230 unstaged_diff: Default::default(),
3231 uncommitted_diff: Default::default(),
3232 oid_diffs: Default::default(),
3233 recalculate_diff_task: Default::default(),
3234 language: Default::default(),
3235 language_registry: Default::default(),
3236 recalculating_tx: postage::watch::channel_with(false).0,
3237 hunk_staging_operation_count: 0,
3238 hunk_staging_operation_count_as_of_write: 0,
3239 head_text: Default::default(),
3240 index_text: Default::default(),
3241 oid_texts: Default::default(),
3242 head_changed: Default::default(),
3243 index_changed: Default::default(),
3244 language_changed: Default::default(),
3245 conflict_updated_futures: Default::default(),
3246 conflict_set: Default::default(),
3247 reparse_conflict_markers_task: Default::default(),
3248 }
3249 }
3250
3251 #[ztracing::instrument(skip_all)]
3252 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3253 self.language = buffer.read(cx).language().cloned();
3254 self.language_changed = true;
3255 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3256 }
3257
3258 fn reparse_conflict_markers(
3259 &mut self,
3260 buffer: text::BufferSnapshot,
3261 cx: &mut Context<Self>,
3262 ) -> oneshot::Receiver<()> {
3263 let (tx, rx) = oneshot::channel();
3264
3265 let Some(conflict_set) = self
3266 .conflict_set
3267 .as_ref()
3268 .and_then(|conflict_set| conflict_set.upgrade())
3269 else {
3270 return rx;
3271 };
3272
3273 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3274 if conflict_set.has_conflict {
3275 Some(conflict_set.snapshot())
3276 } else {
3277 None
3278 }
3279 });
3280
3281 if let Some(old_snapshot) = old_snapshot {
3282 self.conflict_updated_futures.push(tx);
3283 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3284 let (snapshot, changed_range) = cx
3285 .background_spawn(async move {
3286 let new_snapshot = ConflictSet::parse(&buffer);
3287 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3288 (new_snapshot, changed_range)
3289 })
3290 .await;
3291 this.update(cx, |this, cx| {
3292 if let Some(conflict_set) = &this.conflict_set {
3293 conflict_set
3294 .update(cx, |conflict_set, cx| {
3295 conflict_set.set_snapshot(snapshot, changed_range, cx);
3296 })
3297 .ok();
3298 }
3299 let futures = std::mem::take(&mut this.conflict_updated_futures);
3300 for tx in futures {
3301 tx.send(()).ok();
3302 }
3303 })
3304 }))
3305 }
3306
3307 rx
3308 }
3309
3310 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3311 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3312 }
3313
3314 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3315 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3316 }
3317
3318 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3319 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3320 }
3321
3322 fn handle_base_texts_updated(
3323 &mut self,
3324 buffer: text::BufferSnapshot,
3325 message: proto::UpdateDiffBases,
3326 cx: &mut Context<Self>,
3327 ) {
3328 use proto::update_diff_bases::Mode;
3329
3330 let Some(mode) = Mode::from_i32(message.mode) else {
3331 return;
3332 };
3333
3334 let diff_bases_change = match mode {
3335 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3336 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3337 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3338 Mode::IndexAndHead => DiffBasesChange::SetEach {
3339 index: message.staged_text,
3340 head: message.committed_text,
3341 },
3342 };
3343
3344 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3345 }
3346
3347 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3348 if *self.recalculating_tx.borrow() {
3349 let mut rx = self.recalculating_tx.subscribe();
3350 Some(async move {
3351 loop {
3352 let is_recalculating = rx.recv().await;
3353 if is_recalculating != Some(true) {
3354 break;
3355 }
3356 }
3357 })
3358 } else {
3359 None
3360 }
3361 }
3362
3363 fn diff_bases_changed(
3364 &mut self,
3365 buffer: text::BufferSnapshot,
3366 diff_bases_change: Option<DiffBasesChange>,
3367 cx: &mut Context<Self>,
3368 ) {
3369 match diff_bases_change {
3370 Some(DiffBasesChange::SetIndex(index)) => {
3371 self.index_text = index.map(|mut index| {
3372 text::LineEnding::normalize(&mut index);
3373 Arc::from(index.as_str())
3374 });
3375 self.index_changed = true;
3376 }
3377 Some(DiffBasesChange::SetHead(head)) => {
3378 self.head_text = head.map(|mut head| {
3379 text::LineEnding::normalize(&mut head);
3380 Arc::from(head.as_str())
3381 });
3382 self.head_changed = true;
3383 }
3384 Some(DiffBasesChange::SetBoth(text)) => {
3385 let text = text.map(|mut text| {
3386 text::LineEnding::normalize(&mut text);
3387 Arc::from(text.as_str())
3388 });
3389 self.head_text = text.clone();
3390 self.index_text = text;
3391 self.head_changed = true;
3392 self.index_changed = true;
3393 }
3394 Some(DiffBasesChange::SetEach { index, head }) => {
3395 self.index_text = index.map(|mut index| {
3396 text::LineEnding::normalize(&mut index);
3397 Arc::from(index.as_str())
3398 });
3399 self.index_changed = true;
3400 self.head_text = head.map(|mut head| {
3401 text::LineEnding::normalize(&mut head);
3402 Arc::from(head.as_str())
3403 });
3404 self.head_changed = true;
3405 }
3406 None => {}
3407 }
3408
3409 self.recalculate_diffs(buffer, cx)
3410 }
3411
3412 #[ztracing::instrument(skip_all)]
3413 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3414 *self.recalculating_tx.borrow_mut() = true;
3415
3416 let language = self.language.clone();
3417 let language_registry = self.language_registry.clone();
3418 let unstaged_diff = self.unstaged_diff();
3419 let uncommitted_diff = self.uncommitted_diff();
3420 let head = self.head_text.clone();
3421 let index = self.index_text.clone();
3422 let index_changed = self.index_changed;
3423 let head_changed = self.head_changed;
3424 let language_changed = self.language_changed;
3425 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3426 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3427 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3428 (None, None) => true,
3429 _ => false,
3430 };
3431
3432 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3433 .oid_diffs
3434 .iter()
3435 .filter_map(|(oid, weak)| {
3436 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3437 weak.upgrade().map(|diff| (*oid, diff, base_text))
3438 })
3439 .collect();
3440
3441 self.oid_diffs.retain(|oid, weak| {
3442 let alive = weak.upgrade().is_some();
3443 if !alive {
3444 if let Some(oid) = oid {
3445 self.oid_texts.remove(oid);
3446 }
3447 }
3448 alive
3449 });
3450 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3451 log::debug!(
3452 "start recalculating diffs for buffer {}",
3453 buffer.remote_id()
3454 );
3455
3456 let mut new_unstaged_diff = None;
3457 if let Some(unstaged_diff) = &unstaged_diff {
3458 new_unstaged_diff = Some(
3459 cx.update(|cx| {
3460 unstaged_diff.read(cx).update_diff(
3461 buffer.clone(),
3462 index,
3463 index_changed.then_some(false),
3464 language.clone(),
3465 cx,
3466 )
3467 })
3468 .await,
3469 );
3470 }
3471
3472 // Dropping BufferDiff can be expensive, so yield back to the event loop
3473 // for a bit
3474 yield_now().await;
3475
3476 let mut new_uncommitted_diff = None;
3477 if let Some(uncommitted_diff) = &uncommitted_diff {
3478 new_uncommitted_diff = if index_matches_head {
3479 new_unstaged_diff.clone()
3480 } else {
3481 Some(
3482 cx.update(|cx| {
3483 uncommitted_diff.read(cx).update_diff(
3484 buffer.clone(),
3485 head,
3486 head_changed.then_some(true),
3487 language.clone(),
3488 cx,
3489 )
3490 })
3491 .await,
3492 )
3493 }
3494 }
3495
3496 // Dropping BufferDiff can be expensive, so yield back to the event loop
3497 // for a bit
3498 yield_now().await;
3499
3500 let cancel = this.update(cx, |this, _| {
3501 // This checks whether all pending stage/unstage operations
3502 // have quiesced (i.e. both the corresponding write and the
3503 // read of that write have completed). If not, then we cancel
3504 // this recalculation attempt to avoid invalidating pending
3505 // state too quickly; another recalculation will come along
3506 // later and clear the pending state once the state of the index has settled.
3507 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3508 *this.recalculating_tx.borrow_mut() = false;
3509 true
3510 } else {
3511 false
3512 }
3513 })?;
3514 if cancel {
3515 log::debug!(
3516 concat!(
3517 "aborting recalculating diffs for buffer {}",
3518 "due to subsequent hunk operations",
3519 ),
3520 buffer.remote_id()
3521 );
3522 return Ok(());
3523 }
3524
3525 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3526 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3527 {
3528 let task = unstaged_diff.update(cx, |diff, cx| {
3529 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3530 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3531 // view multibuffers.
3532 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3533 });
3534 Some(task.await)
3535 } else {
3536 None
3537 };
3538
3539 yield_now().await;
3540
3541 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3542 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3543 {
3544 uncommitted_diff
3545 .update(cx, |diff, cx| {
3546 if language_changed {
3547 diff.language_changed(language.clone(), language_registry, cx);
3548 }
3549 diff.set_snapshot_with_secondary(
3550 new_uncommitted_diff,
3551 &buffer,
3552 unstaged_changed_range.flatten(),
3553 true,
3554 cx,
3555 )
3556 })
3557 .await;
3558 }
3559
3560 yield_now().await;
3561
3562 for (oid, oid_diff, base_text) in oid_diffs {
3563 let new_oid_diff = cx
3564 .update(|cx| {
3565 oid_diff.read(cx).update_diff(
3566 buffer.clone(),
3567 base_text,
3568 None,
3569 language.clone(),
3570 cx,
3571 )
3572 })
3573 .await;
3574
3575 oid_diff
3576 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3577 .await;
3578
3579 log::debug!(
3580 "finished recalculating oid diff for buffer {} oid {:?}",
3581 buffer.remote_id(),
3582 oid
3583 );
3584
3585 yield_now().await;
3586 }
3587
3588 log::debug!(
3589 "finished recalculating diffs for buffer {}",
3590 buffer.remote_id()
3591 );
3592
3593 if let Some(this) = this.upgrade() {
3594 this.update(cx, |this, _| {
3595 this.index_changed = false;
3596 this.head_changed = false;
3597 this.language_changed = false;
3598 *this.recalculating_tx.borrow_mut() = false;
3599 });
3600 }
3601
3602 Ok(())
3603 }));
3604 }
3605}
3606
3607fn make_remote_delegate(
3608 this: Entity<GitStore>,
3609 project_id: u64,
3610 repository_id: RepositoryId,
3611 askpass_id: u64,
3612 cx: &mut AsyncApp,
3613) -> AskPassDelegate {
3614 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3615 this.update(cx, |this, cx| {
3616 let Some((client, _)) = this.downstream_client() else {
3617 return;
3618 };
3619 let response = client.request(proto::AskPassRequest {
3620 project_id,
3621 repository_id: repository_id.to_proto(),
3622 askpass_id,
3623 prompt,
3624 });
3625 cx.spawn(async move |_, _| {
3626 let mut response = response.await?.response;
3627 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3628 .ok();
3629 response.zeroize();
3630 anyhow::Ok(())
3631 })
3632 .detach_and_log_err(cx);
3633 });
3634 })
3635}
3636
3637impl RepositoryId {
3638 pub fn to_proto(self) -> u64 {
3639 self.0
3640 }
3641
3642 pub fn from_proto(id: u64) -> Self {
3643 RepositoryId(id)
3644 }
3645}
3646
3647impl RepositorySnapshot {
3648 fn empty(
3649 id: RepositoryId,
3650 work_directory_abs_path: Arc<Path>,
3651 original_repo_abs_path: Option<Arc<Path>>,
3652 path_style: PathStyle,
3653 ) -> Self {
3654 Self {
3655 id,
3656 statuses_by_path: Default::default(),
3657 original_repo_abs_path: original_repo_abs_path
3658 .unwrap_or_else(|| work_directory_abs_path.clone()),
3659 work_directory_abs_path,
3660 branch: None,
3661 branch_list: Arc::from([]),
3662 head_commit: None,
3663 scan_id: 0,
3664 merge: Default::default(),
3665 remote_origin_url: None,
3666 remote_upstream_url: None,
3667 stash_entries: Default::default(),
3668 linked_worktrees: Arc::from([]),
3669 path_style,
3670 }
3671 }
3672
3673 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3674 proto::UpdateRepository {
3675 branch_summary: self.branch.as_ref().map(branch_to_proto),
3676 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3677 updated_statuses: self
3678 .statuses_by_path
3679 .iter()
3680 .map(|entry| entry.to_proto())
3681 .collect(),
3682 removed_statuses: Default::default(),
3683 current_merge_conflicts: self
3684 .merge
3685 .merge_heads_by_conflicted_path
3686 .iter()
3687 .map(|(repo_path, _)| repo_path.to_proto())
3688 .collect(),
3689 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3690 project_id,
3691 id: self.id.to_proto(),
3692 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3693 entry_ids: vec![self.id.to_proto()],
3694 scan_id: self.scan_id,
3695 is_last_update: true,
3696 stash_entries: self
3697 .stash_entries
3698 .entries
3699 .iter()
3700 .map(stash_to_proto)
3701 .collect(),
3702 remote_upstream_url: self.remote_upstream_url.clone(),
3703 remote_origin_url: self.remote_origin_url.clone(),
3704 original_repo_abs_path: Some(
3705 self.original_repo_abs_path.to_string_lossy().into_owned(),
3706 ),
3707 linked_worktrees: self
3708 .linked_worktrees
3709 .iter()
3710 .map(worktree_to_proto)
3711 .collect(),
3712 }
3713 }
3714
3715 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3716 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3717 let mut removed_statuses: Vec<String> = Vec::new();
3718
3719 let mut new_statuses = self.statuses_by_path.iter().peekable();
3720 let mut old_statuses = old.statuses_by_path.iter().peekable();
3721
3722 let mut current_new_entry = new_statuses.next();
3723 let mut current_old_entry = old_statuses.next();
3724 loop {
3725 match (current_new_entry, current_old_entry) {
3726 (Some(new_entry), Some(old_entry)) => {
3727 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3728 Ordering::Less => {
3729 updated_statuses.push(new_entry.to_proto());
3730 current_new_entry = new_statuses.next();
3731 }
3732 Ordering::Equal => {
3733 if new_entry.status != old_entry.status
3734 || new_entry.diff_stat != old_entry.diff_stat
3735 {
3736 updated_statuses.push(new_entry.to_proto());
3737 }
3738 current_old_entry = old_statuses.next();
3739 current_new_entry = new_statuses.next();
3740 }
3741 Ordering::Greater => {
3742 removed_statuses.push(old_entry.repo_path.to_proto());
3743 current_old_entry = old_statuses.next();
3744 }
3745 }
3746 }
3747 (None, Some(old_entry)) => {
3748 removed_statuses.push(old_entry.repo_path.to_proto());
3749 current_old_entry = old_statuses.next();
3750 }
3751 (Some(new_entry), None) => {
3752 updated_statuses.push(new_entry.to_proto());
3753 current_new_entry = new_statuses.next();
3754 }
3755 (None, None) => break,
3756 }
3757 }
3758
3759 proto::UpdateRepository {
3760 branch_summary: self.branch.as_ref().map(branch_to_proto),
3761 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3762 updated_statuses,
3763 removed_statuses,
3764 current_merge_conflicts: self
3765 .merge
3766 .merge_heads_by_conflicted_path
3767 .iter()
3768 .map(|(path, _)| path.to_proto())
3769 .collect(),
3770 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3771 project_id,
3772 id: self.id.to_proto(),
3773 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3774 entry_ids: vec![],
3775 scan_id: self.scan_id,
3776 is_last_update: true,
3777 stash_entries: self
3778 .stash_entries
3779 .entries
3780 .iter()
3781 .map(stash_to_proto)
3782 .collect(),
3783 remote_upstream_url: self.remote_upstream_url.clone(),
3784 remote_origin_url: self.remote_origin_url.clone(),
3785 original_repo_abs_path: Some(
3786 self.original_repo_abs_path.to_string_lossy().into_owned(),
3787 ),
3788 linked_worktrees: self
3789 .linked_worktrees
3790 .iter()
3791 .map(worktree_to_proto)
3792 .collect(),
3793 }
3794 }
3795
3796 /// The main worktree is the original checkout that other worktrees were
3797 /// created from.
3798 ///
3799 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3800 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3801 ///
3802 /// Submodules also return `true` here, since they are not linked worktrees.
3803 pub fn is_main_worktree(&self) -> bool {
3804 self.work_directory_abs_path == self.original_repo_abs_path
3805 }
3806
3807 /// Returns true if this repository is a linked worktree, that is, one that
3808 /// was created from another worktree.
3809 ///
3810 /// Returns `false` for both the main worktree and submodules.
3811 pub fn is_linked_worktree(&self) -> bool {
3812 !self.is_main_worktree()
3813 }
3814
3815 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3816 &self.linked_worktrees
3817 }
3818
3819 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3820 self.statuses_by_path.iter().cloned()
3821 }
3822
3823 pub fn status_summary(&self) -> GitSummary {
3824 self.statuses_by_path.summary().item_summary
3825 }
3826
3827 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3828 self.statuses_by_path
3829 .get(&PathKey(path.as_ref().clone()), ())
3830 .cloned()
3831 }
3832
3833 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3834 self.statuses_by_path
3835 .get(&PathKey(path.as_ref().clone()), ())
3836 .and_then(|entry| entry.diff_stat)
3837 }
3838
3839 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3840 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3841 }
3842
3843 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3844 self.path_style
3845 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3846 .unwrap()
3847 .into()
3848 }
3849
3850 #[inline]
3851 fn abs_path_to_repo_path_inner(
3852 work_directory_abs_path: &Path,
3853 abs_path: &Path,
3854 path_style: PathStyle,
3855 ) -> Option<RepoPath> {
3856 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3857 Some(RepoPath::from_rel_path(&rel_path))
3858 }
3859
3860 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3861 self.merge
3862 .merge_heads_by_conflicted_path
3863 .contains_key(repo_path)
3864 }
3865
3866 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3867 let had_conflict_on_last_merge_head_change = self
3868 .merge
3869 .merge_heads_by_conflicted_path
3870 .contains_key(repo_path);
3871 let has_conflict_currently = self
3872 .status_for_path(repo_path)
3873 .is_some_and(|entry| entry.status.is_conflicted());
3874 had_conflict_on_last_merge_head_change || has_conflict_currently
3875 }
3876
3877 /// This is the name that will be displayed in the repository selector for this repository.
3878 pub fn display_name(&self) -> SharedString {
3879 self.work_directory_abs_path
3880 .file_name()
3881 .unwrap_or_default()
3882 .to_string_lossy()
3883 .to_string()
3884 .into()
3885 }
3886}
3887
3888pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3889 proto::StashEntry {
3890 oid: entry.oid.as_bytes().to_vec(),
3891 message: entry.message.clone(),
3892 branch: entry.branch.clone(),
3893 index: entry.index as u64,
3894 timestamp: entry.timestamp,
3895 }
3896}
3897
3898pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3899 Ok(StashEntry {
3900 oid: Oid::from_bytes(&entry.oid)?,
3901 message: entry.message.clone(),
3902 index: entry.index as usize,
3903 branch: entry.branch.clone(),
3904 timestamp: entry.timestamp,
3905 })
3906}
3907
3908impl MergeDetails {
3909 async fn update(
3910 &mut self,
3911 backend: &Arc<dyn GitRepository>,
3912 current_conflicted_paths: Vec<RepoPath>,
3913 ) -> Result<bool> {
3914 log::debug!("load merge details");
3915 self.message = backend.merge_message().await.map(SharedString::from);
3916 let heads = backend
3917 .revparse_batch(vec![
3918 "MERGE_HEAD".into(),
3919 "CHERRY_PICK_HEAD".into(),
3920 "REBASE_HEAD".into(),
3921 "REVERT_HEAD".into(),
3922 "APPLY_HEAD".into(),
3923 ])
3924 .await
3925 .log_err()
3926 .unwrap_or_default()
3927 .into_iter()
3928 .map(|opt| opt.map(SharedString::from))
3929 .collect::<Vec<_>>();
3930
3931 let mut conflicts_changed = false;
3932
3933 // Record the merge state for newly conflicted paths
3934 for path in ¤t_conflicted_paths {
3935 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3936 conflicts_changed = true;
3937 self.merge_heads_by_conflicted_path
3938 .insert(path.clone(), heads.clone());
3939 }
3940 }
3941
3942 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3943 self.merge_heads_by_conflicted_path
3944 .retain(|path, old_merge_heads| {
3945 let keep = current_conflicted_paths.contains(path)
3946 || (old_merge_heads == &heads
3947 && old_merge_heads.iter().any(|head| head.is_some()));
3948 if !keep {
3949 conflicts_changed = true;
3950 }
3951 keep
3952 });
3953
3954 Ok(conflicts_changed)
3955 }
3956}
3957
3958impl Repository {
3959 pub fn is_trusted(&self) -> bool {
3960 match self.repository_state.peek() {
3961 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3962 _ => false,
3963 }
3964 }
3965
3966 pub fn snapshot(&self) -> RepositorySnapshot {
3967 self.snapshot.clone()
3968 }
3969
3970 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3971 self.pending_ops.iter().cloned()
3972 }
3973
3974 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3975 self.pending_ops.summary().clone()
3976 }
3977
3978 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3979 self.pending_ops
3980 .get(&PathKey(path.as_ref().clone()), ())
3981 .cloned()
3982 }
3983
3984 fn local(
3985 id: RepositoryId,
3986 work_directory_abs_path: Arc<Path>,
3987 original_repo_abs_path: Arc<Path>,
3988 dot_git_abs_path: Arc<Path>,
3989 project_environment: WeakEntity<ProjectEnvironment>,
3990 fs: Arc<dyn Fs>,
3991 is_trusted: bool,
3992 git_store: WeakEntity<GitStore>,
3993 cx: &mut Context<Self>,
3994 ) -> Self {
3995 let snapshot = RepositorySnapshot::empty(
3996 id,
3997 work_directory_abs_path.clone(),
3998 Some(original_repo_abs_path),
3999 PathStyle::local(),
4000 );
4001 let state = cx
4002 .spawn(async move |_, cx| {
4003 LocalRepositoryState::new(
4004 work_directory_abs_path,
4005 dot_git_abs_path,
4006 project_environment,
4007 fs,
4008 is_trusted,
4009 cx,
4010 )
4011 .await
4012 .map_err(|err| err.to_string())
4013 })
4014 .shared();
4015 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4016 let state = cx
4017 .spawn(async move |_, _| {
4018 let state = state.await?;
4019 Ok(RepositoryState::Local(state))
4020 })
4021 .shared();
4022
4023 // todo(git_graph_remote): Make this subscription on both remote/local repo
4024 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4025 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4026 if this.scan_id > 2 {
4027 this.initial_graph_data.clear();
4028 }
4029 }
4030 RepositoryEvent::StashEntriesChanged => {
4031 if this.scan_id > 2 {
4032 this.initial_graph_data
4033 .retain(|(log_source, _), _| *log_source != LogSource::All);
4034 }
4035 }
4036 _ => {}
4037 })
4038 .detach();
4039
4040 Repository {
4041 this: cx.weak_entity(),
4042 git_store,
4043 snapshot,
4044 pending_ops: Default::default(),
4045 repository_state: state,
4046 commit_message_buffer: None,
4047 askpass_delegates: Default::default(),
4048 paths_needing_status_update: Default::default(),
4049 latest_askpass_id: 0,
4050 job_sender,
4051 job_id: 0,
4052 active_jobs: Default::default(),
4053 initial_graph_data: Default::default(),
4054 commit_data: Default::default(),
4055 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4056 }
4057 }
4058
4059 fn remote(
4060 id: RepositoryId,
4061 work_directory_abs_path: Arc<Path>,
4062 original_repo_abs_path: Option<Arc<Path>>,
4063 path_style: PathStyle,
4064 project_id: ProjectId,
4065 client: AnyProtoClient,
4066 git_store: WeakEntity<GitStore>,
4067 cx: &mut Context<Self>,
4068 ) -> Self {
4069 let snapshot = RepositorySnapshot::empty(
4070 id,
4071 work_directory_abs_path,
4072 original_repo_abs_path,
4073 path_style,
4074 );
4075 let repository_state = RemoteRepositoryState { project_id, client };
4076 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4077 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4078 Self {
4079 this: cx.weak_entity(),
4080 snapshot,
4081 commit_message_buffer: None,
4082 git_store,
4083 pending_ops: Default::default(),
4084 paths_needing_status_update: Default::default(),
4085 job_sender,
4086 repository_state,
4087 askpass_delegates: Default::default(),
4088 latest_askpass_id: 0,
4089 active_jobs: Default::default(),
4090 job_id: 0,
4091 initial_graph_data: Default::default(),
4092 commit_data: Default::default(),
4093 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4094 }
4095 }
4096
4097 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4098 self.git_store.upgrade()
4099 }
4100
4101 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4102 let this = cx.weak_entity();
4103 let git_store = self.git_store.clone();
4104 let _ = self.send_keyed_job(
4105 Some(GitJobKey::ReloadBufferDiffBases),
4106 None,
4107 |state, mut cx| async move {
4108 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4109 log::error!("tried to recompute diffs for a non-local repository");
4110 return Ok(());
4111 };
4112
4113 let Some(this) = this.upgrade() else {
4114 return Ok(());
4115 };
4116
4117 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4118 git_store.update(cx, |git_store, cx| {
4119 git_store
4120 .diffs
4121 .iter()
4122 .filter_map(|(buffer_id, diff_state)| {
4123 let buffer_store = git_store.buffer_store.read(cx);
4124 let buffer = buffer_store.get(*buffer_id)?;
4125 let file = File::from_dyn(buffer.read(cx).file())?;
4126 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4127 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4128 log::debug!(
4129 "start reload diff bases for repo path {}",
4130 repo_path.as_unix_str()
4131 );
4132 diff_state.update(cx, |diff_state, _| {
4133 let has_unstaged_diff = diff_state
4134 .unstaged_diff
4135 .as_ref()
4136 .is_some_and(|diff| diff.is_upgradable());
4137 let has_uncommitted_diff = diff_state
4138 .uncommitted_diff
4139 .as_ref()
4140 .is_some_and(|set| set.is_upgradable());
4141
4142 Some((
4143 buffer,
4144 repo_path,
4145 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4146 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4147 ))
4148 })
4149 })
4150 .collect::<Vec<_>>()
4151 })
4152 })?;
4153
4154 let buffer_diff_base_changes = cx
4155 .background_spawn(async move {
4156 let mut changes = Vec::new();
4157 for (buffer, repo_path, current_index_text, current_head_text) in
4158 &repo_diff_state_updates
4159 {
4160 let index_text = if current_index_text.is_some() {
4161 backend.load_index_text(repo_path.clone()).await
4162 } else {
4163 None
4164 };
4165 let head_text = if current_head_text.is_some() {
4166 backend.load_committed_text(repo_path.clone()).await
4167 } else {
4168 None
4169 };
4170
4171 let change =
4172 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4173 (Some(current_index), Some(current_head)) => {
4174 let index_changed =
4175 index_text.as_deref() != current_index.as_deref();
4176 let head_changed =
4177 head_text.as_deref() != current_head.as_deref();
4178 if index_changed && head_changed {
4179 if index_text == head_text {
4180 Some(DiffBasesChange::SetBoth(head_text))
4181 } else {
4182 Some(DiffBasesChange::SetEach {
4183 index: index_text,
4184 head: head_text,
4185 })
4186 }
4187 } else if index_changed {
4188 Some(DiffBasesChange::SetIndex(index_text))
4189 } else if head_changed {
4190 Some(DiffBasesChange::SetHead(head_text))
4191 } else {
4192 None
4193 }
4194 }
4195 (Some(current_index), None) => {
4196 let index_changed =
4197 index_text.as_deref() != current_index.as_deref();
4198 index_changed
4199 .then_some(DiffBasesChange::SetIndex(index_text))
4200 }
4201 (None, Some(current_head)) => {
4202 let head_changed =
4203 head_text.as_deref() != current_head.as_deref();
4204 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4205 }
4206 (None, None) => None,
4207 };
4208
4209 changes.push((buffer.clone(), change))
4210 }
4211 changes
4212 })
4213 .await;
4214
4215 git_store.update(&mut cx, |git_store, cx| {
4216 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4217 let buffer_snapshot = buffer.read(cx).text_snapshot();
4218 let buffer_id = buffer_snapshot.remote_id();
4219 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4220 continue;
4221 };
4222
4223 let downstream_client = git_store.downstream_client();
4224 diff_state.update(cx, |diff_state, cx| {
4225 use proto::update_diff_bases::Mode;
4226
4227 if let Some((diff_bases_change, (client, project_id))) =
4228 diff_bases_change.clone().zip(downstream_client)
4229 {
4230 let (staged_text, committed_text, mode) = match diff_bases_change {
4231 DiffBasesChange::SetIndex(index) => {
4232 (index, None, Mode::IndexOnly)
4233 }
4234 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4235 DiffBasesChange::SetEach { index, head } => {
4236 (index, head, Mode::IndexAndHead)
4237 }
4238 DiffBasesChange::SetBoth(text) => {
4239 (None, text, Mode::IndexMatchesHead)
4240 }
4241 };
4242 client
4243 .send(proto::UpdateDiffBases {
4244 project_id: project_id.to_proto(),
4245 buffer_id: buffer_id.to_proto(),
4246 staged_text,
4247 committed_text,
4248 mode: mode as i32,
4249 })
4250 .log_err();
4251 }
4252
4253 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4254 });
4255 }
4256 })
4257 },
4258 );
4259 }
4260
4261 pub fn send_job<F, Fut, R>(
4262 &mut self,
4263 status: Option<SharedString>,
4264 job: F,
4265 ) -> oneshot::Receiver<R>
4266 where
4267 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4268 Fut: Future<Output = R> + 'static,
4269 R: Send + 'static,
4270 {
4271 self.send_keyed_job(None, status, job)
4272 }
4273
4274 fn send_keyed_job<F, Fut, R>(
4275 &mut self,
4276 key: Option<GitJobKey>,
4277 status: Option<SharedString>,
4278 job: F,
4279 ) -> oneshot::Receiver<R>
4280 where
4281 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4282 Fut: Future<Output = R> + 'static,
4283 R: Send + 'static,
4284 {
4285 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4286 let job_id = post_inc(&mut self.job_id);
4287 let this = self.this.clone();
4288 self.job_sender
4289 .unbounded_send(GitJob {
4290 key,
4291 job: Box::new(move |state, cx: &mut AsyncApp| {
4292 let job = job(state, cx.clone());
4293 cx.spawn(async move |cx| {
4294 if let Some(s) = status.clone() {
4295 this.update(cx, |this, cx| {
4296 this.active_jobs.insert(
4297 job_id,
4298 JobInfo {
4299 start: Instant::now(),
4300 message: s.clone(),
4301 },
4302 );
4303
4304 cx.notify();
4305 })
4306 .ok();
4307 }
4308 let result = job.await;
4309
4310 this.update(cx, |this, cx| {
4311 this.active_jobs.remove(&job_id);
4312 cx.notify();
4313 })
4314 .ok();
4315
4316 result_tx.send(result).ok();
4317 })
4318 }),
4319 })
4320 .ok();
4321 result_rx
4322 }
4323
4324 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4325 let Some(git_store) = self.git_store.upgrade() else {
4326 return;
4327 };
4328 let entity = cx.entity();
4329 git_store.update(cx, |git_store, cx| {
4330 let Some((&id, _)) = git_store
4331 .repositories
4332 .iter()
4333 .find(|(_, handle)| *handle == &entity)
4334 else {
4335 return;
4336 };
4337 git_store.active_repo_id = Some(id);
4338 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4339 });
4340 }
4341
4342 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4343 self.snapshot.status()
4344 }
4345
4346 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4347 self.snapshot.diff_stat_for_path(path)
4348 }
4349
4350 pub fn cached_stash(&self) -> GitStash {
4351 self.snapshot.stash_entries.clone()
4352 }
4353
4354 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4355 let git_store = self.git_store.upgrade()?;
4356 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4357 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4358 let abs_path = SanitizedPath::new(&abs_path);
4359 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4360 Some(ProjectPath {
4361 worktree_id: worktree.read(cx).id(),
4362 path: relative_path,
4363 })
4364 }
4365
4366 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4367 let git_store = self.git_store.upgrade()?;
4368 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4369 let abs_path = worktree_store.absolutize(path, cx)?;
4370 self.snapshot.abs_path_to_repo_path(&abs_path)
4371 }
4372
4373 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4374 other
4375 .read(cx)
4376 .snapshot
4377 .work_directory_abs_path
4378 .starts_with(&self.snapshot.work_directory_abs_path)
4379 }
4380
4381 pub fn open_commit_buffer(
4382 &mut self,
4383 languages: Option<Arc<LanguageRegistry>>,
4384 buffer_store: Entity<BufferStore>,
4385 cx: &mut Context<Self>,
4386 ) -> Task<Result<Entity<Buffer>>> {
4387 let id = self.id;
4388 if let Some(buffer) = self.commit_message_buffer.clone() {
4389 return Task::ready(Ok(buffer));
4390 }
4391 let this = cx.weak_entity();
4392
4393 let rx = self.send_job(None, move |state, mut cx| async move {
4394 let Some(this) = this.upgrade() else {
4395 bail!("git store was dropped");
4396 };
4397 match state {
4398 RepositoryState::Local(..) => {
4399 this.update(&mut cx, |_, cx| {
4400 Self::open_local_commit_buffer(languages, buffer_store, cx)
4401 })
4402 .await
4403 }
4404 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4405 let request = client.request(proto::OpenCommitMessageBuffer {
4406 project_id: project_id.0,
4407 repository_id: id.to_proto(),
4408 });
4409 let response = request.await.context("requesting to open commit buffer")?;
4410 let buffer_id = BufferId::new(response.buffer_id)?;
4411 let buffer = buffer_store
4412 .update(&mut cx, |buffer_store, cx| {
4413 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4414 })
4415 .await?;
4416 if let Some(language_registry) = languages {
4417 let git_commit_language =
4418 language_registry.language_for_name("Git Commit").await?;
4419 buffer.update(&mut cx, |buffer, cx| {
4420 buffer.set_language(Some(git_commit_language), cx);
4421 });
4422 }
4423 this.update(&mut cx, |this, _| {
4424 this.commit_message_buffer = Some(buffer.clone());
4425 });
4426 Ok(buffer)
4427 }
4428 }
4429 });
4430
4431 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4432 }
4433
4434 fn open_local_commit_buffer(
4435 language_registry: Option<Arc<LanguageRegistry>>,
4436 buffer_store: Entity<BufferStore>,
4437 cx: &mut Context<Self>,
4438 ) -> Task<Result<Entity<Buffer>>> {
4439 cx.spawn(async move |repository, cx| {
4440 let git_commit_language = match language_registry {
4441 Some(language_registry) => {
4442 Some(language_registry.language_for_name("Git Commit").await?)
4443 }
4444 None => None,
4445 };
4446 let buffer = buffer_store
4447 .update(cx, |buffer_store, cx| {
4448 buffer_store.create_buffer(git_commit_language, false, cx)
4449 })
4450 .await?;
4451
4452 repository.update(cx, |repository, _| {
4453 repository.commit_message_buffer = Some(buffer.clone());
4454 })?;
4455 Ok(buffer)
4456 })
4457 }
4458
4459 pub fn checkout_files(
4460 &mut self,
4461 commit: &str,
4462 paths: Vec<RepoPath>,
4463 cx: &mut Context<Self>,
4464 ) -> Task<Result<()>> {
4465 let commit = commit.to_string();
4466 let id = self.id;
4467
4468 self.spawn_job_with_tracking(
4469 paths.clone(),
4470 pending_op::GitStatus::Reverted,
4471 cx,
4472 async move |this, cx| {
4473 this.update(cx, |this, _cx| {
4474 this.send_job(
4475 Some(format!("git checkout {}", commit).into()),
4476 move |git_repo, _| async move {
4477 match git_repo {
4478 RepositoryState::Local(LocalRepositoryState {
4479 backend,
4480 environment,
4481 ..
4482 }) => {
4483 backend
4484 .checkout_files(commit, paths, environment.clone())
4485 .await
4486 }
4487 RepositoryState::Remote(RemoteRepositoryState {
4488 project_id,
4489 client,
4490 }) => {
4491 client
4492 .request(proto::GitCheckoutFiles {
4493 project_id: project_id.0,
4494 repository_id: id.to_proto(),
4495 commit,
4496 paths: paths
4497 .into_iter()
4498 .map(|p| p.to_proto())
4499 .collect(),
4500 })
4501 .await?;
4502
4503 Ok(())
4504 }
4505 }
4506 },
4507 )
4508 })?
4509 .await?
4510 },
4511 )
4512 }
4513
4514 pub fn reset(
4515 &mut self,
4516 commit: String,
4517 reset_mode: ResetMode,
4518 _cx: &mut App,
4519 ) -> oneshot::Receiver<Result<()>> {
4520 let id = self.id;
4521
4522 self.send_job(None, move |git_repo, _| async move {
4523 match git_repo {
4524 RepositoryState::Local(LocalRepositoryState {
4525 backend,
4526 environment,
4527 ..
4528 }) => backend.reset(commit, reset_mode, environment).await,
4529 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4530 client
4531 .request(proto::GitReset {
4532 project_id: project_id.0,
4533 repository_id: id.to_proto(),
4534 commit,
4535 mode: match reset_mode {
4536 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4537 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4538 },
4539 })
4540 .await?;
4541
4542 Ok(())
4543 }
4544 }
4545 })
4546 }
4547
4548 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4549 let id = self.id;
4550 self.send_job(None, move |git_repo, _cx| async move {
4551 match git_repo {
4552 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4553 backend.show(commit).await
4554 }
4555 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4556 let resp = client
4557 .request(proto::GitShow {
4558 project_id: project_id.0,
4559 repository_id: id.to_proto(),
4560 commit,
4561 })
4562 .await?;
4563
4564 Ok(CommitDetails {
4565 sha: resp.sha.into(),
4566 message: resp.message.into(),
4567 commit_timestamp: resp.commit_timestamp,
4568 author_email: resp.author_email.into(),
4569 author_name: resp.author_name.into(),
4570 })
4571 }
4572 }
4573 })
4574 }
4575
4576 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4577 let id = self.id;
4578 self.send_job(None, move |git_repo, cx| async move {
4579 match git_repo {
4580 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4581 backend.load_commit(commit, cx).await
4582 }
4583 RepositoryState::Remote(RemoteRepositoryState {
4584 client, project_id, ..
4585 }) => {
4586 let response = client
4587 .request(proto::LoadCommitDiff {
4588 project_id: project_id.0,
4589 repository_id: id.to_proto(),
4590 commit,
4591 })
4592 .await?;
4593 Ok(CommitDiff {
4594 files: response
4595 .files
4596 .into_iter()
4597 .map(|file| {
4598 Ok(CommitFile {
4599 path: RepoPath::from_proto(&file.path)?,
4600 old_text: file.old_text,
4601 new_text: file.new_text,
4602 is_binary: file.is_binary,
4603 })
4604 })
4605 .collect::<Result<Vec<_>>>()?,
4606 })
4607 }
4608 }
4609 })
4610 }
4611
4612 pub fn get_graph_data(
4613 &self,
4614 log_source: LogSource,
4615 log_order: LogOrder,
4616 ) -> Option<&InitialGitGraphData> {
4617 self.initial_graph_data.get(&(log_source, log_order))
4618 }
4619
4620 pub fn search_commits(
4621 &mut self,
4622 log_source: LogSource,
4623 search_args: SearchCommitArgs,
4624 request_tx: smol::channel::Sender<Oid>,
4625 cx: &mut Context<Self>,
4626 ) {
4627 let repository_state = self.repository_state.clone();
4628
4629 cx.background_spawn(async move {
4630 let repo_state = repository_state.await;
4631
4632 match repo_state {
4633 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4634 backend
4635 .search_commits(log_source, search_args, request_tx)
4636 .await
4637 .log_err();
4638 }
4639 Ok(RepositoryState::Remote(_)) => {}
4640 Err(_) => {}
4641 };
4642 })
4643 .detach();
4644 }
4645
4646 pub fn graph_data(
4647 &mut self,
4648 log_source: LogSource,
4649 log_order: LogOrder,
4650 range: Range<usize>,
4651 cx: &mut Context<Self>,
4652 ) -> GraphDataResponse<'_> {
4653 let initial_commit_data = self
4654 .initial_graph_data
4655 .entry((log_source.clone(), log_order))
4656 .or_insert_with(|| {
4657 let state = self.repository_state.clone();
4658 let log_source = log_source.clone();
4659
4660 let fetch_task = cx.spawn(async move |repository, cx| {
4661 let state = state.await;
4662 let result = match state {
4663 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4664 Self::local_git_graph_data(
4665 repository.clone(),
4666 backend,
4667 log_source.clone(),
4668 log_order,
4669 cx,
4670 )
4671 .await
4672 }
4673 Ok(RepositoryState::Remote(_)) => {
4674 Err("Git graph is not supported for collab yet".into())
4675 }
4676 Err(e) => Err(SharedString::from(e)),
4677 };
4678
4679 if let Err(fetch_task_error) = result {
4680 repository
4681 .update(cx, |repository, _| {
4682 if let Some(data) = repository
4683 .initial_graph_data
4684 .get_mut(&(log_source, log_order))
4685 {
4686 data.error = Some(fetch_task_error);
4687 } else {
4688 debug_panic!(
4689 "This task would be dropped if this entry doesn't exist"
4690 );
4691 }
4692 })
4693 .ok();
4694 }
4695 });
4696
4697 InitialGitGraphData {
4698 fetch_task,
4699 error: None,
4700 commit_data: Vec::new(),
4701 commit_oid_to_index: HashMap::default(),
4702 }
4703 });
4704
4705 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4706 let max_end = initial_commit_data.commit_data.len();
4707
4708 GraphDataResponse {
4709 commits: &initial_commit_data.commit_data
4710 [range.start.min(max_start)..range.end.min(max_end)],
4711 is_loading: !initial_commit_data.fetch_task.is_ready(),
4712 error: initial_commit_data.error.clone(),
4713 }
4714 }
4715
4716 async fn local_git_graph_data(
4717 this: WeakEntity<Self>,
4718 backend: Arc<dyn GitRepository>,
4719 log_source: LogSource,
4720 log_order: LogOrder,
4721 cx: &mut AsyncApp,
4722 ) -> Result<(), SharedString> {
4723 let (request_tx, request_rx) =
4724 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4725
4726 let task = cx.background_executor().spawn({
4727 let log_source = log_source.clone();
4728 async move {
4729 backend
4730 .initial_graph_data(log_source, log_order, request_tx)
4731 .await
4732 .map_err(|err| SharedString::from(err.to_string()))
4733 }
4734 });
4735
4736 let graph_data_key = (log_source, log_order);
4737
4738 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4739 this.update(cx, |repository, cx| {
4740 let graph_data = repository
4741 .initial_graph_data
4742 .entry(graph_data_key.clone())
4743 .and_modify(|graph_data| {
4744 for commit_data in initial_graph_commit_data {
4745 graph_data
4746 .commit_oid_to_index
4747 .insert(commit_data.sha, graph_data.commit_data.len());
4748 graph_data.commit_data.push(commit_data);
4749 }
4750 cx.emit(RepositoryEvent::GraphEvent(
4751 graph_data_key.clone(),
4752 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4753 ));
4754 });
4755
4756 match &graph_data {
4757 Entry::Occupied(_) => {}
4758 Entry::Vacant(_) => {
4759 debug_panic!("This task should be dropped if data doesn't exist");
4760 }
4761 }
4762 })
4763 .ok();
4764 }
4765
4766 task.await?;
4767 Ok(())
4768 }
4769
4770 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4771 if !self.commit_data.contains_key(&sha) {
4772 match &self.graph_commit_data_handler {
4773 GraphCommitHandlerState::Open(handler) => {
4774 if handler.commit_data_request.try_send(sha).is_ok() {
4775 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4776 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4777 }
4778 }
4779 GraphCommitHandlerState::Closed => {
4780 self.open_graph_commit_data_handler(cx);
4781 }
4782 GraphCommitHandlerState::Starting => {}
4783 }
4784 }
4785
4786 self.commit_data
4787 .get(&sha)
4788 .unwrap_or(&CommitDataState::Loading)
4789 }
4790
4791 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4792 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4793
4794 let state = self.repository_state.clone();
4795 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4796 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4797
4798 let foreground_task = cx.spawn(async move |this, cx| {
4799 while let Ok((sha, commit_data)) = result_rx.recv().await {
4800 let result = this.update(cx, |this, cx| {
4801 let old_value = this
4802 .commit_data
4803 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4804 debug_assert!(
4805 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4806 "We should never overwrite commit data"
4807 );
4808
4809 cx.notify();
4810 });
4811 if result.is_err() {
4812 break;
4813 }
4814 }
4815
4816 this.update(cx, |this, _cx| {
4817 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4818 })
4819 .ok();
4820 });
4821
4822 let request_tx_for_handler = request_tx;
4823 let background_executor = cx.background_executor().clone();
4824
4825 cx.background_spawn(async move {
4826 let backend = match state.await {
4827 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4828 Ok(RepositoryState::Remote(_)) => {
4829 log::error!("commit_data_reader not supported for remote repositories");
4830 return;
4831 }
4832 Err(error) => {
4833 log::error!("failed to get repository state: {error}");
4834 return;
4835 }
4836 };
4837
4838 let reader = match backend.commit_data_reader() {
4839 Ok(reader) => reader,
4840 Err(error) => {
4841 log::error!("failed to create commit data reader: {error:?}");
4842 return;
4843 }
4844 };
4845
4846 loop {
4847 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4848
4849 futures::select_biased! {
4850 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4851 let Ok(sha) = sha else {
4852 break;
4853 };
4854
4855 match reader.read(sha).await {
4856 Ok(commit_data) => {
4857 if result_tx.send((sha, commit_data)).await.is_err() {
4858 break;
4859 }
4860 }
4861 Err(error) => {
4862 log::error!("failed to read commit data for {sha}: {error:?}");
4863 }
4864 }
4865 }
4866 _ = futures::FutureExt::fuse(timeout) => {
4867 break;
4868 }
4869 }
4870 }
4871
4872 drop(result_tx);
4873 })
4874 .detach();
4875
4876 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4877 _task: foreground_task,
4878 commit_data_request: request_tx_for_handler,
4879 });
4880 }
4881
4882 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4883 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4884 }
4885
4886 fn save_buffers<'a>(
4887 &self,
4888 entries: impl IntoIterator<Item = &'a RepoPath>,
4889 cx: &mut Context<Self>,
4890 ) -> Vec<Task<anyhow::Result<()>>> {
4891 let mut save_futures = Vec::new();
4892 if let Some(buffer_store) = self.buffer_store(cx) {
4893 buffer_store.update(cx, |buffer_store, cx| {
4894 for path in entries {
4895 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4896 continue;
4897 };
4898 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4899 && buffer
4900 .read(cx)
4901 .file()
4902 .is_some_and(|file| file.disk_state().exists())
4903 && buffer.read(cx).has_unsaved_edits()
4904 {
4905 save_futures.push(buffer_store.save_buffer(buffer, cx));
4906 }
4907 }
4908 })
4909 }
4910 save_futures
4911 }
4912
4913 pub fn stage_entries(
4914 &mut self,
4915 entries: Vec<RepoPath>,
4916 cx: &mut Context<Self>,
4917 ) -> Task<anyhow::Result<()>> {
4918 self.stage_or_unstage_entries(true, entries, cx)
4919 }
4920
4921 pub fn unstage_entries(
4922 &mut self,
4923 entries: Vec<RepoPath>,
4924 cx: &mut Context<Self>,
4925 ) -> Task<anyhow::Result<()>> {
4926 self.stage_or_unstage_entries(false, entries, cx)
4927 }
4928
4929 fn stage_or_unstage_entries(
4930 &mut self,
4931 stage: bool,
4932 entries: Vec<RepoPath>,
4933 cx: &mut Context<Self>,
4934 ) -> Task<anyhow::Result<()>> {
4935 if entries.is_empty() {
4936 return Task::ready(Ok(()));
4937 }
4938 let Some(git_store) = self.git_store.upgrade() else {
4939 return Task::ready(Ok(()));
4940 };
4941 let id = self.id;
4942 let save_tasks = self.save_buffers(&entries, cx);
4943 let paths = entries
4944 .iter()
4945 .map(|p| p.as_unix_str())
4946 .collect::<Vec<_>>()
4947 .join(" ");
4948 let status = if stage {
4949 format!("git add {paths}")
4950 } else {
4951 format!("git reset {paths}")
4952 };
4953 let job_key = GitJobKey::WriteIndex(entries.clone());
4954
4955 self.spawn_job_with_tracking(
4956 entries.clone(),
4957 if stage {
4958 pending_op::GitStatus::Staged
4959 } else {
4960 pending_op::GitStatus::Unstaged
4961 },
4962 cx,
4963 async move |this, cx| {
4964 for save_task in save_tasks {
4965 save_task.await?;
4966 }
4967
4968 this.update(cx, |this, cx| {
4969 let weak_this = cx.weak_entity();
4970 this.send_keyed_job(
4971 Some(job_key),
4972 Some(status.into()),
4973 move |git_repo, mut cx| async move {
4974 let hunk_staging_operation_counts = weak_this
4975 .update(&mut cx, |this, cx| {
4976 let mut hunk_staging_operation_counts = HashMap::default();
4977 for path in &entries {
4978 let Some(project_path) =
4979 this.repo_path_to_project_path(path, cx)
4980 else {
4981 continue;
4982 };
4983 let Some(buffer) = git_store
4984 .read(cx)
4985 .buffer_store
4986 .read(cx)
4987 .get_by_path(&project_path)
4988 else {
4989 continue;
4990 };
4991 let Some(diff_state) = git_store
4992 .read(cx)
4993 .diffs
4994 .get(&buffer.read(cx).remote_id())
4995 .cloned()
4996 else {
4997 continue;
4998 };
4999 let Some(uncommitted_diff) =
5000 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5001 |uncommitted_diff| uncommitted_diff.upgrade(),
5002 )
5003 else {
5004 continue;
5005 };
5006 let buffer_snapshot = buffer.read(cx).text_snapshot();
5007 let file_exists = buffer
5008 .read(cx)
5009 .file()
5010 .is_some_and(|file| file.disk_state().exists());
5011 let hunk_staging_operation_count =
5012 diff_state.update(cx, |diff_state, cx| {
5013 uncommitted_diff.update(
5014 cx,
5015 |uncommitted_diff, cx| {
5016 uncommitted_diff
5017 .stage_or_unstage_all_hunks(
5018 stage,
5019 &buffer_snapshot,
5020 file_exists,
5021 cx,
5022 );
5023 },
5024 );
5025
5026 diff_state.hunk_staging_operation_count += 1;
5027 diff_state.hunk_staging_operation_count
5028 });
5029 hunk_staging_operation_counts.insert(
5030 diff_state.downgrade(),
5031 hunk_staging_operation_count,
5032 );
5033 }
5034 hunk_staging_operation_counts
5035 })
5036 .unwrap_or_default();
5037
5038 let result = match git_repo {
5039 RepositoryState::Local(LocalRepositoryState {
5040 backend,
5041 environment,
5042 ..
5043 }) => {
5044 if stage {
5045 backend.stage_paths(entries, environment.clone()).await
5046 } else {
5047 backend.unstage_paths(entries, environment.clone()).await
5048 }
5049 }
5050 RepositoryState::Remote(RemoteRepositoryState {
5051 project_id,
5052 client,
5053 }) => {
5054 if stage {
5055 client
5056 .request(proto::Stage {
5057 project_id: project_id.0,
5058 repository_id: id.to_proto(),
5059 paths: entries
5060 .into_iter()
5061 .map(|repo_path| repo_path.to_proto())
5062 .collect(),
5063 })
5064 .await
5065 .context("sending stage request")
5066 .map(|_| ())
5067 } else {
5068 client
5069 .request(proto::Unstage {
5070 project_id: project_id.0,
5071 repository_id: id.to_proto(),
5072 paths: entries
5073 .into_iter()
5074 .map(|repo_path| repo_path.to_proto())
5075 .collect(),
5076 })
5077 .await
5078 .context("sending unstage request")
5079 .map(|_| ())
5080 }
5081 }
5082 };
5083
5084 for (diff_state, hunk_staging_operation_count) in
5085 hunk_staging_operation_counts
5086 {
5087 diff_state
5088 .update(&mut cx, |diff_state, cx| {
5089 if result.is_ok() {
5090 diff_state.hunk_staging_operation_count_as_of_write =
5091 hunk_staging_operation_count;
5092 } else if let Some(uncommitted_diff) =
5093 &diff_state.uncommitted_diff
5094 {
5095 uncommitted_diff
5096 .update(cx, |uncommitted_diff, cx| {
5097 uncommitted_diff.clear_pending_hunks(cx);
5098 })
5099 .ok();
5100 }
5101 })
5102 .ok();
5103 }
5104
5105 result
5106 },
5107 )
5108 })?
5109 .await?
5110 },
5111 )
5112 }
5113
5114 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5115 let snapshot = self.snapshot.clone();
5116 let pending_ops = self.pending_ops.clone();
5117 let to_stage = cx.background_spawn(async move {
5118 snapshot
5119 .status()
5120 .filter_map(|entry| {
5121 if let Some(ops) =
5122 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5123 {
5124 if ops.staging() || ops.staged() {
5125 None
5126 } else {
5127 Some(entry.repo_path)
5128 }
5129 } else if entry.status.staging().is_fully_staged() {
5130 None
5131 } else {
5132 Some(entry.repo_path)
5133 }
5134 })
5135 .collect()
5136 });
5137
5138 cx.spawn(async move |this, cx| {
5139 let to_stage = to_stage.await;
5140 this.update(cx, |this, cx| {
5141 this.stage_or_unstage_entries(true, to_stage, cx)
5142 })?
5143 .await
5144 })
5145 }
5146
5147 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5148 let snapshot = self.snapshot.clone();
5149 let pending_ops = self.pending_ops.clone();
5150 let to_unstage = cx.background_spawn(async move {
5151 snapshot
5152 .status()
5153 .filter_map(|entry| {
5154 if let Some(ops) =
5155 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5156 {
5157 if !ops.staging() && !ops.staged() {
5158 None
5159 } else {
5160 Some(entry.repo_path)
5161 }
5162 } else if entry.status.staging().is_fully_unstaged() {
5163 None
5164 } else {
5165 Some(entry.repo_path)
5166 }
5167 })
5168 .collect()
5169 });
5170
5171 cx.spawn(async move |this, cx| {
5172 let to_unstage = to_unstage.await;
5173 this.update(cx, |this, cx| {
5174 this.stage_or_unstage_entries(false, to_unstage, cx)
5175 })?
5176 .await
5177 })
5178 }
5179
5180 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5181 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5182
5183 self.stash_entries(to_stash, cx)
5184 }
5185
5186 pub fn stash_entries(
5187 &mut self,
5188 entries: Vec<RepoPath>,
5189 cx: &mut Context<Self>,
5190 ) -> Task<anyhow::Result<()>> {
5191 let id = self.id;
5192
5193 cx.spawn(async move |this, cx| {
5194 this.update(cx, |this, _| {
5195 this.send_job(None, move |git_repo, _cx| async move {
5196 match git_repo {
5197 RepositoryState::Local(LocalRepositoryState {
5198 backend,
5199 environment,
5200 ..
5201 }) => backend.stash_paths(entries, environment).await,
5202 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5203 client
5204 .request(proto::Stash {
5205 project_id: project_id.0,
5206 repository_id: id.to_proto(),
5207 paths: entries
5208 .into_iter()
5209 .map(|repo_path| repo_path.to_proto())
5210 .collect(),
5211 })
5212 .await?;
5213 Ok(())
5214 }
5215 }
5216 })
5217 })?
5218 .await??;
5219 Ok(())
5220 })
5221 }
5222
5223 pub fn stash_pop(
5224 &mut self,
5225 index: Option<usize>,
5226 cx: &mut Context<Self>,
5227 ) -> Task<anyhow::Result<()>> {
5228 let id = self.id;
5229 cx.spawn(async move |this, cx| {
5230 this.update(cx, |this, _| {
5231 this.send_job(None, move |git_repo, _cx| async move {
5232 match git_repo {
5233 RepositoryState::Local(LocalRepositoryState {
5234 backend,
5235 environment,
5236 ..
5237 }) => backend.stash_pop(index, environment).await,
5238 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5239 client
5240 .request(proto::StashPop {
5241 project_id: project_id.0,
5242 repository_id: id.to_proto(),
5243 stash_index: index.map(|i| i as u64),
5244 })
5245 .await
5246 .context("sending stash pop request")?;
5247 Ok(())
5248 }
5249 }
5250 })
5251 })?
5252 .await??;
5253 Ok(())
5254 })
5255 }
5256
5257 pub fn stash_apply(
5258 &mut self,
5259 index: Option<usize>,
5260 cx: &mut Context<Self>,
5261 ) -> Task<anyhow::Result<()>> {
5262 let id = self.id;
5263 cx.spawn(async move |this, cx| {
5264 this.update(cx, |this, _| {
5265 this.send_job(None, move |git_repo, _cx| async move {
5266 match git_repo {
5267 RepositoryState::Local(LocalRepositoryState {
5268 backend,
5269 environment,
5270 ..
5271 }) => backend.stash_apply(index, environment).await,
5272 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5273 client
5274 .request(proto::StashApply {
5275 project_id: project_id.0,
5276 repository_id: id.to_proto(),
5277 stash_index: index.map(|i| i as u64),
5278 })
5279 .await
5280 .context("sending stash apply request")?;
5281 Ok(())
5282 }
5283 }
5284 })
5285 })?
5286 .await??;
5287 Ok(())
5288 })
5289 }
5290
5291 pub fn stash_drop(
5292 &mut self,
5293 index: Option<usize>,
5294 cx: &mut Context<Self>,
5295 ) -> oneshot::Receiver<anyhow::Result<()>> {
5296 let id = self.id;
5297 let updates_tx = self
5298 .git_store()
5299 .and_then(|git_store| match &git_store.read(cx).state {
5300 GitStoreState::Local { downstream, .. } => downstream
5301 .as_ref()
5302 .map(|downstream| downstream.updates_tx.clone()),
5303 _ => None,
5304 });
5305 let this = cx.weak_entity();
5306 self.send_job(None, move |git_repo, mut cx| async move {
5307 match git_repo {
5308 RepositoryState::Local(LocalRepositoryState {
5309 backend,
5310 environment,
5311 ..
5312 }) => {
5313 // TODO would be nice to not have to do this manually
5314 let result = backend.stash_drop(index, environment).await;
5315 if result.is_ok()
5316 && let Ok(stash_entries) = backend.stash_entries().await
5317 {
5318 let snapshot = this.update(&mut cx, |this, cx| {
5319 this.snapshot.stash_entries = stash_entries;
5320 cx.emit(RepositoryEvent::StashEntriesChanged);
5321 this.snapshot.clone()
5322 })?;
5323 if let Some(updates_tx) = updates_tx {
5324 updates_tx
5325 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5326 .ok();
5327 }
5328 }
5329
5330 result
5331 }
5332 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5333 client
5334 .request(proto::StashDrop {
5335 project_id: project_id.0,
5336 repository_id: id.to_proto(),
5337 stash_index: index.map(|i| i as u64),
5338 })
5339 .await
5340 .context("sending stash pop request")?;
5341 Ok(())
5342 }
5343 }
5344 })
5345 }
5346
5347 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5348 let id = self.id;
5349 self.send_job(
5350 Some(format!("git hook {}", hook.as_str()).into()),
5351 move |git_repo, _cx| async move {
5352 match git_repo {
5353 RepositoryState::Local(LocalRepositoryState {
5354 backend,
5355 environment,
5356 ..
5357 }) => backend.run_hook(hook, environment.clone()).await,
5358 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5359 client
5360 .request(proto::RunGitHook {
5361 project_id: project_id.0,
5362 repository_id: id.to_proto(),
5363 hook: hook.to_proto(),
5364 })
5365 .await?;
5366
5367 Ok(())
5368 }
5369 }
5370 },
5371 )
5372 }
5373
5374 pub fn commit(
5375 &mut self,
5376 message: SharedString,
5377 name_and_email: Option<(SharedString, SharedString)>,
5378 options: CommitOptions,
5379 askpass: AskPassDelegate,
5380 cx: &mut App,
5381 ) -> oneshot::Receiver<Result<()>> {
5382 let id = self.id;
5383 let askpass_delegates = self.askpass_delegates.clone();
5384 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5385
5386 let rx = self.run_hook(RunHook::PreCommit, cx);
5387
5388 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5389 rx.await??;
5390
5391 match git_repo {
5392 RepositoryState::Local(LocalRepositoryState {
5393 backend,
5394 environment,
5395 ..
5396 }) => {
5397 backend
5398 .commit(message, name_and_email, options, askpass, environment)
5399 .await
5400 }
5401 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5402 askpass_delegates.lock().insert(askpass_id, askpass);
5403 let _defer = util::defer(|| {
5404 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5405 debug_assert!(askpass_delegate.is_some());
5406 });
5407 let (name, email) = name_and_email.unzip();
5408 client
5409 .request(proto::Commit {
5410 project_id: project_id.0,
5411 repository_id: id.to_proto(),
5412 message: String::from(message),
5413 name: name.map(String::from),
5414 email: email.map(String::from),
5415 options: Some(proto::commit::CommitOptions {
5416 amend: options.amend,
5417 signoff: options.signoff,
5418 allow_empty: options.allow_empty,
5419 }),
5420 askpass_id,
5421 })
5422 .await?;
5423
5424 Ok(())
5425 }
5426 }
5427 })
5428 }
5429
5430 pub fn fetch(
5431 &mut self,
5432 fetch_options: FetchOptions,
5433 askpass: AskPassDelegate,
5434 _cx: &mut App,
5435 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5436 let askpass_delegates = self.askpass_delegates.clone();
5437 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5438 let id = self.id;
5439
5440 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5441 match git_repo {
5442 RepositoryState::Local(LocalRepositoryState {
5443 backend,
5444 environment,
5445 ..
5446 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5447 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5448 askpass_delegates.lock().insert(askpass_id, askpass);
5449 let _defer = util::defer(|| {
5450 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5451 debug_assert!(askpass_delegate.is_some());
5452 });
5453
5454 let response = client
5455 .request(proto::Fetch {
5456 project_id: project_id.0,
5457 repository_id: id.to_proto(),
5458 askpass_id,
5459 remote: fetch_options.to_proto(),
5460 })
5461 .await?;
5462
5463 Ok(RemoteCommandOutput {
5464 stdout: response.stdout,
5465 stderr: response.stderr,
5466 })
5467 }
5468 }
5469 })
5470 }
5471
5472 pub fn push(
5473 &mut self,
5474 branch: SharedString,
5475 remote_branch: SharedString,
5476 remote: SharedString,
5477 options: Option<PushOptions>,
5478 askpass: AskPassDelegate,
5479 cx: &mut Context<Self>,
5480 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5481 let askpass_delegates = self.askpass_delegates.clone();
5482 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5483 let id = self.id;
5484
5485 let args = options
5486 .map(|option| match option {
5487 PushOptions::SetUpstream => " --set-upstream",
5488 PushOptions::Force => " --force-with-lease",
5489 })
5490 .unwrap_or("");
5491
5492 let updates_tx = self
5493 .git_store()
5494 .and_then(|git_store| match &git_store.read(cx).state {
5495 GitStoreState::Local { downstream, .. } => downstream
5496 .as_ref()
5497 .map(|downstream| downstream.updates_tx.clone()),
5498 _ => None,
5499 });
5500
5501 let this = cx.weak_entity();
5502 self.send_job(
5503 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5504 move |git_repo, mut cx| async move {
5505 match git_repo {
5506 RepositoryState::Local(LocalRepositoryState {
5507 backend,
5508 environment,
5509 ..
5510 }) => {
5511 let result = backend
5512 .push(
5513 branch.to_string(),
5514 remote_branch.to_string(),
5515 remote.to_string(),
5516 options,
5517 askpass,
5518 environment.clone(),
5519 cx.clone(),
5520 )
5521 .await;
5522 // TODO would be nice to not have to do this manually
5523 if result.is_ok() {
5524 let branches = backend.branches().await?;
5525 let branch = branches.into_iter().find(|branch| branch.is_head);
5526 log::info!("head branch after scan is {branch:?}");
5527 let snapshot = this.update(&mut cx, |this, cx| {
5528 this.snapshot.branch = branch;
5529 cx.emit(RepositoryEvent::HeadChanged);
5530 this.snapshot.clone()
5531 })?;
5532 if let Some(updates_tx) = updates_tx {
5533 updates_tx
5534 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5535 .ok();
5536 }
5537 }
5538 result
5539 }
5540 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5541 askpass_delegates.lock().insert(askpass_id, askpass);
5542 let _defer = util::defer(|| {
5543 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5544 debug_assert!(askpass_delegate.is_some());
5545 });
5546 let response = client
5547 .request(proto::Push {
5548 project_id: project_id.0,
5549 repository_id: id.to_proto(),
5550 askpass_id,
5551 branch_name: branch.to_string(),
5552 remote_branch_name: remote_branch.to_string(),
5553 remote_name: remote.to_string(),
5554 options: options.map(|options| match options {
5555 PushOptions::Force => proto::push::PushOptions::Force,
5556 PushOptions::SetUpstream => {
5557 proto::push::PushOptions::SetUpstream
5558 }
5559 }
5560 as i32),
5561 })
5562 .await?;
5563
5564 Ok(RemoteCommandOutput {
5565 stdout: response.stdout,
5566 stderr: response.stderr,
5567 })
5568 }
5569 }
5570 },
5571 )
5572 }
5573
5574 pub fn pull(
5575 &mut self,
5576 branch: Option<SharedString>,
5577 remote: SharedString,
5578 rebase: bool,
5579 askpass: AskPassDelegate,
5580 _cx: &mut App,
5581 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5582 let askpass_delegates = self.askpass_delegates.clone();
5583 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5584 let id = self.id;
5585
5586 let mut status = "git pull".to_string();
5587 if rebase {
5588 status.push_str(" --rebase");
5589 }
5590 status.push_str(&format!(" {}", remote));
5591 if let Some(b) = &branch {
5592 status.push_str(&format!(" {}", b));
5593 }
5594
5595 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5596 match git_repo {
5597 RepositoryState::Local(LocalRepositoryState {
5598 backend,
5599 environment,
5600 ..
5601 }) => {
5602 backend
5603 .pull(
5604 branch.as_ref().map(|b| b.to_string()),
5605 remote.to_string(),
5606 rebase,
5607 askpass,
5608 environment.clone(),
5609 cx,
5610 )
5611 .await
5612 }
5613 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5614 askpass_delegates.lock().insert(askpass_id, askpass);
5615 let _defer = util::defer(|| {
5616 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5617 debug_assert!(askpass_delegate.is_some());
5618 });
5619 let response = client
5620 .request(proto::Pull {
5621 project_id: project_id.0,
5622 repository_id: id.to_proto(),
5623 askpass_id,
5624 rebase,
5625 branch_name: branch.as_ref().map(|b| b.to_string()),
5626 remote_name: remote.to_string(),
5627 })
5628 .await?;
5629
5630 Ok(RemoteCommandOutput {
5631 stdout: response.stdout,
5632 stderr: response.stderr,
5633 })
5634 }
5635 }
5636 })
5637 }
5638
5639 fn spawn_set_index_text_job(
5640 &mut self,
5641 path: RepoPath,
5642 content: Option<String>,
5643 hunk_staging_operation_count: Option<usize>,
5644 cx: &mut Context<Self>,
5645 ) -> oneshot::Receiver<anyhow::Result<()>> {
5646 let id = self.id;
5647 let this = cx.weak_entity();
5648 let git_store = self.git_store.clone();
5649 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5650 self.send_keyed_job(
5651 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5652 None,
5653 move |git_repo, mut cx| async move {
5654 log::debug!(
5655 "start updating index text for buffer {}",
5656 path.as_unix_str()
5657 );
5658
5659 match git_repo {
5660 RepositoryState::Local(LocalRepositoryState {
5661 fs,
5662 backend,
5663 environment,
5664 ..
5665 }) => {
5666 let executable = match fs.metadata(&abs_path).await {
5667 Ok(Some(meta)) => meta.is_executable,
5668 Ok(None) => false,
5669 Err(_err) => false,
5670 };
5671 backend
5672 .set_index_text(path.clone(), content, environment.clone(), executable)
5673 .await?;
5674 }
5675 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5676 client
5677 .request(proto::SetIndexText {
5678 project_id: project_id.0,
5679 repository_id: id.to_proto(),
5680 path: path.to_proto(),
5681 text: content,
5682 })
5683 .await?;
5684 }
5685 }
5686 log::debug!(
5687 "finish updating index text for buffer {}",
5688 path.as_unix_str()
5689 );
5690
5691 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5692 let project_path = this
5693 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5694 .ok()
5695 .flatten();
5696 git_store
5697 .update(&mut cx, |git_store, cx| {
5698 let buffer_id = git_store
5699 .buffer_store
5700 .read(cx)
5701 .get_by_path(&project_path?)?
5702 .read(cx)
5703 .remote_id();
5704 let diff_state = git_store.diffs.get(&buffer_id)?;
5705 diff_state.update(cx, |diff_state, _| {
5706 diff_state.hunk_staging_operation_count_as_of_write =
5707 hunk_staging_operation_count;
5708 });
5709 Some(())
5710 })
5711 .context("Git store dropped")?;
5712 }
5713 Ok(())
5714 },
5715 )
5716 }
5717
5718 pub fn create_remote(
5719 &mut self,
5720 remote_name: String,
5721 remote_url: String,
5722 ) -> oneshot::Receiver<Result<()>> {
5723 let id = self.id;
5724 self.send_job(
5725 Some(format!("git remote add {remote_name} {remote_url}").into()),
5726 move |repo, _cx| async move {
5727 match repo {
5728 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5729 backend.create_remote(remote_name, remote_url).await
5730 }
5731 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5732 client
5733 .request(proto::GitCreateRemote {
5734 project_id: project_id.0,
5735 repository_id: id.to_proto(),
5736 remote_name,
5737 remote_url,
5738 })
5739 .await?;
5740
5741 Ok(())
5742 }
5743 }
5744 },
5745 )
5746 }
5747
5748 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5749 let id = self.id;
5750 self.send_job(
5751 Some(format!("git remove remote {remote_name}").into()),
5752 move |repo, _cx| async move {
5753 match repo {
5754 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5755 backend.remove_remote(remote_name).await
5756 }
5757 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5758 client
5759 .request(proto::GitRemoveRemote {
5760 project_id: project_id.0,
5761 repository_id: id.to_proto(),
5762 remote_name,
5763 })
5764 .await?;
5765
5766 Ok(())
5767 }
5768 }
5769 },
5770 )
5771 }
5772
5773 pub fn get_remotes(
5774 &mut self,
5775 branch_name: Option<String>,
5776 is_push: bool,
5777 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5778 let id = self.id;
5779 self.send_job(None, move |repo, _cx| async move {
5780 match repo {
5781 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5782 let remote = if let Some(branch_name) = branch_name {
5783 if is_push {
5784 backend.get_push_remote(branch_name).await?
5785 } else {
5786 backend.get_branch_remote(branch_name).await?
5787 }
5788 } else {
5789 None
5790 };
5791
5792 match remote {
5793 Some(remote) => Ok(vec![remote]),
5794 None => backend.get_all_remotes().await,
5795 }
5796 }
5797 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5798 let response = client
5799 .request(proto::GetRemotes {
5800 project_id: project_id.0,
5801 repository_id: id.to_proto(),
5802 branch_name,
5803 is_push,
5804 })
5805 .await?;
5806
5807 let remotes = response
5808 .remotes
5809 .into_iter()
5810 .map(|remotes| Remote {
5811 name: remotes.name.into(),
5812 })
5813 .collect();
5814
5815 Ok(remotes)
5816 }
5817 }
5818 })
5819 }
5820
5821 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5822 let id = self.id;
5823 self.send_job(None, move |repo, _| async move {
5824 match repo {
5825 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5826 backend.branches().await
5827 }
5828 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5829 let response = client
5830 .request(proto::GitGetBranches {
5831 project_id: project_id.0,
5832 repository_id: id.to_proto(),
5833 })
5834 .await?;
5835
5836 let branches = response
5837 .branches
5838 .into_iter()
5839 .map(|branch| proto_to_branch(&branch))
5840 .collect();
5841
5842 Ok(branches)
5843 }
5844 }
5845 })
5846 }
5847
5848 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5849 /// returns the pathed for the linked worktree.
5850 ///
5851 /// Returns None if this is the main checkout.
5852 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5853 if self.work_directory_abs_path != self.original_repo_abs_path {
5854 Some(&self.work_directory_abs_path)
5855 } else {
5856 None
5857 }
5858 }
5859
5860 pub fn path_for_new_linked_worktree(
5861 &self,
5862 branch_name: &str,
5863 worktree_directory_setting: &str,
5864 ) -> Result<PathBuf> {
5865 let original_repo = self.original_repo_abs_path.clone();
5866 let project_name = original_repo
5867 .file_name()
5868 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5869 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5870 Ok(directory.join(branch_name).join(project_name))
5871 }
5872
5873 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5874 let id = self.id;
5875 self.send_job(None, move |repo, _| async move {
5876 match repo {
5877 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5878 backend.worktrees().await
5879 }
5880 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5881 let response = client
5882 .request(proto::GitGetWorktrees {
5883 project_id: project_id.0,
5884 repository_id: id.to_proto(),
5885 })
5886 .await?;
5887
5888 let worktrees = response
5889 .worktrees
5890 .into_iter()
5891 .map(|worktree| proto_to_worktree(&worktree))
5892 .collect();
5893
5894 Ok(worktrees)
5895 }
5896 }
5897 })
5898 }
5899
5900 fn create_worktree_with_start_point(
5901 &mut self,
5902 start_point: CreateWorktreeStartPoint,
5903 path: PathBuf,
5904 commit: Option<String>,
5905 ) -> oneshot::Receiver<Result<()>> {
5906 if matches!(
5907 &start_point,
5908 CreateWorktreeStartPoint::Branched { name } if name.is_empty()
5909 ) {
5910 let (sender, receiver) = oneshot::channel();
5911 sender
5912 .send(Err(anyhow!("branch name cannot be empty")))
5913 .ok();
5914 return receiver;
5915 }
5916
5917 let id = self.id;
5918 let message = match &start_point {
5919 CreateWorktreeStartPoint::Detached => "git worktree add (detached)".into(),
5920 CreateWorktreeStartPoint::Branched { name } => {
5921 format!("git worktree add: {name}").into()
5922 }
5923 };
5924
5925 self.send_job(Some(message), move |repo, _cx| async move {
5926 let branch_name = match start_point {
5927 CreateWorktreeStartPoint::Detached => None,
5928 CreateWorktreeStartPoint::Branched { name } => Some(name),
5929 };
5930 let remote_name = branch_name.clone().unwrap_or_default();
5931
5932 match repo {
5933 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5934 backend.create_worktree(branch_name, path, commit).await
5935 }
5936 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5937 client
5938 .request(proto::GitCreateWorktree {
5939 project_id: project_id.0,
5940 repository_id: id.to_proto(),
5941 name: remote_name,
5942 directory: path.to_string_lossy().to_string(),
5943 commit,
5944 })
5945 .await?;
5946
5947 Ok(())
5948 }
5949 }
5950 })
5951 }
5952
5953 pub fn create_worktree(
5954 &mut self,
5955 branch_name: String,
5956 path: PathBuf,
5957 commit: Option<String>,
5958 ) -> oneshot::Receiver<Result<()>> {
5959 self.create_worktree_with_start_point(
5960 CreateWorktreeStartPoint::Branched { name: branch_name },
5961 path,
5962 commit,
5963 )
5964 }
5965
5966 pub fn create_worktree_detached(
5967 &mut self,
5968 path: PathBuf,
5969 commit: String,
5970 ) -> oneshot::Receiver<Result<()>> {
5971 self.create_worktree_with_start_point(
5972 CreateWorktreeStartPoint::Detached,
5973 path,
5974 Some(commit),
5975 )
5976 }
5977
5978 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
5979 let id = self.id;
5980 self.send_job(None, move |repo, _cx| async move {
5981 match repo {
5982 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5983 Ok(backend.head_sha().await)
5984 }
5985 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5986 let response = client
5987 .request(proto::GitGetHeadSha {
5988 project_id: project_id.0,
5989 repository_id: id.to_proto(),
5990 })
5991 .await?;
5992
5993 Ok(response.sha)
5994 }
5995 }
5996 })
5997 }
5998
5999 pub fn update_ref(
6000 &mut self,
6001 ref_name: String,
6002 commit: String,
6003 ) -> oneshot::Receiver<Result<()>> {
6004 self.send_job(None, move |repo, _cx| async move {
6005 match repo {
6006 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6007 backend.update_ref(ref_name, commit).await
6008 }
6009 RepositoryState::Remote(_) => {
6010 anyhow::bail!("update_ref is not supported for remote repositories")
6011 }
6012 }
6013 })
6014 }
6015
6016 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6017 self.send_job(None, move |repo, _cx| async move {
6018 match repo {
6019 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6020 backend.delete_ref(ref_name).await
6021 }
6022 RepositoryState::Remote(_) => {
6023 anyhow::bail!("delete_ref is not supported for remote repositories")
6024 }
6025 }
6026 })
6027 }
6028
6029 pub fn resolve_commit(&mut self, sha: String) -> oneshot::Receiver<Result<bool>> {
6030 self.send_job(None, move |repo, _cx| async move {
6031 match repo {
6032 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6033 let results = backend.revparse_batch(vec![sha]).await?;
6034 Ok(results.into_iter().next().flatten().is_some())
6035 }
6036 RepositoryState::Remote(_) => {
6037 anyhow::bail!("resolve_commit is not supported for remote repositories")
6038 }
6039 }
6040 })
6041 }
6042
6043 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6044 self.send_job(None, move |repo, _cx| async move {
6045 match repo {
6046 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6047 backend.repair_worktrees().await
6048 }
6049 RepositoryState::Remote(_) => {
6050 anyhow::bail!("repair_worktrees is not supported for remote repositories")
6051 }
6052 }
6053 })
6054 }
6055
6056 pub fn commit_exists(&mut self, sha: String) -> oneshot::Receiver<Result<bool>> {
6057 self.send_job(None, move |repo, _cx| async move {
6058 match repo {
6059 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6060 let results = backend.revparse_batch(vec![sha]).await?;
6061 Ok(results.into_iter().next().flatten().is_some())
6062 }
6063 RepositoryState::Remote(_) => {
6064 anyhow::bail!("commit_exists is not supported for remote repositories")
6065 }
6066 }
6067 })
6068 }
6069
6070 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6071 let id = self.id;
6072 self.send_job(
6073 Some(format!("git worktree remove: {}", path.display()).into()),
6074 move |repo, _cx| async move {
6075 match repo {
6076 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6077 backend.remove_worktree(path, force).await
6078 }
6079 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6080 client
6081 .request(proto::GitRemoveWorktree {
6082 project_id: project_id.0,
6083 repository_id: id.to_proto(),
6084 path: path.to_string_lossy().to_string(),
6085 force,
6086 })
6087 .await?;
6088
6089 Ok(())
6090 }
6091 }
6092 },
6093 )
6094 }
6095
6096 pub fn rename_worktree(
6097 &mut self,
6098 old_path: PathBuf,
6099 new_path: PathBuf,
6100 ) -> oneshot::Receiver<Result<()>> {
6101 let id = self.id;
6102 self.send_job(
6103 Some(format!("git worktree move: {}", old_path.display()).into()),
6104 move |repo, _cx| async move {
6105 match repo {
6106 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6107 backend.rename_worktree(old_path, new_path).await
6108 }
6109 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6110 client
6111 .request(proto::GitRenameWorktree {
6112 project_id: project_id.0,
6113 repository_id: id.to_proto(),
6114 old_path: old_path.to_string_lossy().to_string(),
6115 new_path: new_path.to_string_lossy().to_string(),
6116 })
6117 .await?;
6118
6119 Ok(())
6120 }
6121 }
6122 },
6123 )
6124 }
6125
6126 pub fn default_branch(
6127 &mut self,
6128 include_remote_name: bool,
6129 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6130 let id = self.id;
6131 self.send_job(None, move |repo, _| async move {
6132 match repo {
6133 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6134 backend.default_branch(include_remote_name).await
6135 }
6136 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6137 let response = client
6138 .request(proto::GetDefaultBranch {
6139 project_id: project_id.0,
6140 repository_id: id.to_proto(),
6141 })
6142 .await?;
6143
6144 anyhow::Ok(response.branch.map(SharedString::from))
6145 }
6146 }
6147 })
6148 }
6149
6150 pub fn diff_tree(
6151 &mut self,
6152 diff_type: DiffTreeType,
6153 _cx: &App,
6154 ) -> oneshot::Receiver<Result<TreeDiff>> {
6155 let repository_id = self.snapshot.id;
6156 self.send_job(None, move |repo, _cx| async move {
6157 match repo {
6158 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6159 backend.diff_tree(diff_type).await
6160 }
6161 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6162 let response = client
6163 .request(proto::GetTreeDiff {
6164 project_id: project_id.0,
6165 repository_id: repository_id.0,
6166 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6167 base: diff_type.base().to_string(),
6168 head: diff_type.head().to_string(),
6169 })
6170 .await?;
6171
6172 let entries = response
6173 .entries
6174 .into_iter()
6175 .filter_map(|entry| {
6176 let status = match entry.status() {
6177 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6178 proto::tree_diff_status::Status::Modified => {
6179 TreeDiffStatus::Modified {
6180 old: git::Oid::from_str(
6181 &entry.oid.context("missing oid").log_err()?,
6182 )
6183 .log_err()?,
6184 }
6185 }
6186 proto::tree_diff_status::Status::Deleted => {
6187 TreeDiffStatus::Deleted {
6188 old: git::Oid::from_str(
6189 &entry.oid.context("missing oid").log_err()?,
6190 )
6191 .log_err()?,
6192 }
6193 }
6194 };
6195 Some((
6196 RepoPath::from_rel_path(
6197 &RelPath::from_proto(&entry.path).log_err()?,
6198 ),
6199 status,
6200 ))
6201 })
6202 .collect();
6203
6204 Ok(TreeDiff { entries })
6205 }
6206 }
6207 })
6208 }
6209
6210 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6211 let id = self.id;
6212 self.send_job(None, move |repo, _cx| async move {
6213 match repo {
6214 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6215 backend.diff(diff_type).await
6216 }
6217 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6218 let (proto_diff_type, merge_base_ref) = match &diff_type {
6219 DiffType::HeadToIndex => {
6220 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6221 }
6222 DiffType::HeadToWorktree => {
6223 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6224 }
6225 DiffType::MergeBase { base_ref } => (
6226 proto::git_diff::DiffType::MergeBase.into(),
6227 Some(base_ref.to_string()),
6228 ),
6229 };
6230 let response = client
6231 .request(proto::GitDiff {
6232 project_id: project_id.0,
6233 repository_id: id.to_proto(),
6234 diff_type: proto_diff_type,
6235 merge_base_ref,
6236 })
6237 .await?;
6238
6239 Ok(response.diff)
6240 }
6241 }
6242 })
6243 }
6244
6245 pub fn create_branch(
6246 &mut self,
6247 branch_name: String,
6248 base_branch: Option<String>,
6249 ) -> oneshot::Receiver<Result<()>> {
6250 let id = self.id;
6251 let status_msg = if let Some(ref base) = base_branch {
6252 format!("git switch -c {branch_name} {base}").into()
6253 } else {
6254 format!("git switch -c {branch_name}").into()
6255 };
6256 self.send_job(Some(status_msg), move |repo, _cx| async move {
6257 match repo {
6258 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6259 backend.create_branch(branch_name, base_branch).await
6260 }
6261 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6262 client
6263 .request(proto::GitCreateBranch {
6264 project_id: project_id.0,
6265 repository_id: id.to_proto(),
6266 branch_name,
6267 })
6268 .await?;
6269
6270 Ok(())
6271 }
6272 }
6273 })
6274 }
6275
6276 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6277 let id = self.id;
6278 self.send_job(
6279 Some(format!("git switch {branch_name}").into()),
6280 move |repo, _cx| async move {
6281 match repo {
6282 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6283 backend.change_branch(branch_name).await
6284 }
6285 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6286 client
6287 .request(proto::GitChangeBranch {
6288 project_id: project_id.0,
6289 repository_id: id.to_proto(),
6290 branch_name,
6291 })
6292 .await?;
6293
6294 Ok(())
6295 }
6296 }
6297 },
6298 )
6299 }
6300
6301 pub fn delete_branch(
6302 &mut self,
6303 is_remote: bool,
6304 branch_name: String,
6305 ) -> oneshot::Receiver<Result<()>> {
6306 let id = self.id;
6307 self.send_job(
6308 Some(
6309 format!(
6310 "git branch {} {}",
6311 if is_remote { "-dr" } else { "-d" },
6312 branch_name
6313 )
6314 .into(),
6315 ),
6316 move |repo, _cx| async move {
6317 match repo {
6318 RepositoryState::Local(state) => {
6319 state.backend.delete_branch(is_remote, branch_name).await
6320 }
6321 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6322 client
6323 .request(proto::GitDeleteBranch {
6324 project_id: project_id.0,
6325 repository_id: id.to_proto(),
6326 is_remote,
6327 branch_name,
6328 })
6329 .await?;
6330
6331 Ok(())
6332 }
6333 }
6334 },
6335 )
6336 }
6337
6338 pub fn rename_branch(
6339 &mut self,
6340 branch: String,
6341 new_name: String,
6342 ) -> oneshot::Receiver<Result<()>> {
6343 let id = self.id;
6344 self.send_job(
6345 Some(format!("git branch -m {branch} {new_name}").into()),
6346 move |repo, _cx| async move {
6347 match repo {
6348 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6349 backend.rename_branch(branch, new_name).await
6350 }
6351 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6352 client
6353 .request(proto::GitRenameBranch {
6354 project_id: project_id.0,
6355 repository_id: id.to_proto(),
6356 branch,
6357 new_name,
6358 })
6359 .await?;
6360
6361 Ok(())
6362 }
6363 }
6364 },
6365 )
6366 }
6367
6368 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6369 let id = self.id;
6370 self.send_job(None, move |repo, _cx| async move {
6371 match repo {
6372 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6373 backend.check_for_pushed_commit().await
6374 }
6375 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6376 let response = client
6377 .request(proto::CheckForPushedCommits {
6378 project_id: project_id.0,
6379 repository_id: id.to_proto(),
6380 })
6381 .await?;
6382
6383 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6384
6385 Ok(branches)
6386 }
6387 }
6388 })
6389 }
6390
6391 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6392 let id = self.id;
6393 self.send_job(None, move |repo, _cx| async move {
6394 match repo {
6395 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6396 backend.checkpoint().await
6397 }
6398 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6399 let response = client
6400 .request(proto::GitCreateCheckpoint {
6401 project_id: project_id.0,
6402 repository_id: id.to_proto(),
6403 })
6404 .await?;
6405
6406 Ok(GitRepositoryCheckpoint {
6407 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6408 })
6409 }
6410 }
6411 })
6412 }
6413
6414 pub fn restore_checkpoint(
6415 &mut self,
6416 checkpoint: GitRepositoryCheckpoint,
6417 ) -> oneshot::Receiver<Result<()>> {
6418 let id = self.id;
6419 self.send_job(None, move |repo, _cx| async move {
6420 match repo {
6421 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6422 backend.restore_checkpoint(checkpoint).await
6423 }
6424 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6425 client
6426 .request(proto::GitRestoreCheckpoint {
6427 project_id: project_id.0,
6428 repository_id: id.to_proto(),
6429 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6430 })
6431 .await?;
6432 Ok(())
6433 }
6434 }
6435 })
6436 }
6437
6438 pub(crate) fn apply_remote_update(
6439 &mut self,
6440 update: proto::UpdateRepository,
6441 cx: &mut Context<Self>,
6442 ) -> Result<()> {
6443 if let Some(main_path) = &update.original_repo_abs_path {
6444 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6445 }
6446
6447 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6448 let new_head_commit = update
6449 .head_commit_details
6450 .as_ref()
6451 .map(proto_to_commit_details);
6452 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6453 cx.emit(RepositoryEvent::HeadChanged)
6454 }
6455 self.snapshot.branch = new_branch;
6456 self.snapshot.head_commit = new_head_commit;
6457
6458 // We don't store any merge head state for downstream projects; the upstream
6459 // will track it and we will just get the updated conflicts
6460 let new_merge_heads = TreeMap::from_ordered_entries(
6461 update
6462 .current_merge_conflicts
6463 .into_iter()
6464 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6465 );
6466 let conflicts_changed =
6467 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6468 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6469 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6470 let new_stash_entries = GitStash {
6471 entries: update
6472 .stash_entries
6473 .iter()
6474 .filter_map(|entry| proto_to_stash(entry).ok())
6475 .collect(),
6476 };
6477 if self.snapshot.stash_entries != new_stash_entries {
6478 cx.emit(RepositoryEvent::StashEntriesChanged)
6479 }
6480 self.snapshot.stash_entries = new_stash_entries;
6481 let new_linked_worktrees: Arc<[GitWorktree]> = update
6482 .linked_worktrees
6483 .iter()
6484 .map(proto_to_worktree)
6485 .collect();
6486 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6487 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6488 }
6489 self.snapshot.linked_worktrees = new_linked_worktrees;
6490 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6491 self.snapshot.remote_origin_url = update.remote_origin_url;
6492
6493 let edits = update
6494 .removed_statuses
6495 .into_iter()
6496 .filter_map(|path| {
6497 Some(sum_tree::Edit::Remove(PathKey(
6498 RelPath::from_proto(&path).log_err()?,
6499 )))
6500 })
6501 .chain(
6502 update
6503 .updated_statuses
6504 .into_iter()
6505 .filter_map(|updated_status| {
6506 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6507 }),
6508 )
6509 .collect::<Vec<_>>();
6510 if conflicts_changed || !edits.is_empty() {
6511 cx.emit(RepositoryEvent::StatusesChanged);
6512 }
6513 self.snapshot.statuses_by_path.edit(edits, ());
6514
6515 if update.is_last_update {
6516 self.snapshot.scan_id = update.scan_id;
6517 }
6518 self.clear_pending_ops(cx);
6519 Ok(())
6520 }
6521
6522 pub fn compare_checkpoints(
6523 &mut self,
6524 left: GitRepositoryCheckpoint,
6525 right: GitRepositoryCheckpoint,
6526 ) -> oneshot::Receiver<Result<bool>> {
6527 let id = self.id;
6528 self.send_job(None, move |repo, _cx| async move {
6529 match repo {
6530 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6531 backend.compare_checkpoints(left, right).await
6532 }
6533 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6534 let response = client
6535 .request(proto::GitCompareCheckpoints {
6536 project_id: project_id.0,
6537 repository_id: id.to_proto(),
6538 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6539 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6540 })
6541 .await?;
6542 Ok(response.equal)
6543 }
6544 }
6545 })
6546 }
6547
6548 pub fn diff_checkpoints(
6549 &mut self,
6550 base_checkpoint: GitRepositoryCheckpoint,
6551 target_checkpoint: GitRepositoryCheckpoint,
6552 ) -> oneshot::Receiver<Result<String>> {
6553 let id = self.id;
6554 self.send_job(None, move |repo, _cx| async move {
6555 match repo {
6556 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6557 backend
6558 .diff_checkpoints(base_checkpoint, target_checkpoint)
6559 .await
6560 }
6561 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6562 let response = client
6563 .request(proto::GitDiffCheckpoints {
6564 project_id: project_id.0,
6565 repository_id: id.to_proto(),
6566 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6567 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6568 })
6569 .await?;
6570 Ok(response.diff)
6571 }
6572 }
6573 })
6574 }
6575
6576 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6577 let updated = SumTree::from_iter(
6578 self.pending_ops.iter().filter_map(|ops| {
6579 let inner_ops: Vec<PendingOp> =
6580 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6581 if inner_ops.is_empty() {
6582 None
6583 } else {
6584 Some(PendingOps {
6585 repo_path: ops.repo_path.clone(),
6586 ops: inner_ops,
6587 })
6588 }
6589 }),
6590 (),
6591 );
6592
6593 if updated != self.pending_ops {
6594 cx.emit(RepositoryEvent::PendingOpsChanged {
6595 pending_ops: self.pending_ops.clone(),
6596 })
6597 }
6598
6599 self.pending_ops = updated;
6600 }
6601
6602 fn schedule_scan(
6603 &mut self,
6604 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6605 cx: &mut Context<Self>,
6606 ) {
6607 let this = cx.weak_entity();
6608 let _ = self.send_keyed_job(
6609 Some(GitJobKey::ReloadGitState),
6610 None,
6611 |state, mut cx| async move {
6612 log::debug!("run scheduled git status scan");
6613
6614 let Some(this) = this.upgrade() else {
6615 return Ok(());
6616 };
6617 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6618 bail!("not a local repository")
6619 };
6620 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6621 this.update(&mut cx, |this, cx| {
6622 this.clear_pending_ops(cx);
6623 });
6624 if let Some(updates_tx) = updates_tx {
6625 updates_tx
6626 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6627 .ok();
6628 }
6629 Ok(())
6630 },
6631 );
6632 }
6633
6634 fn spawn_local_git_worker(
6635 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6636 cx: &mut Context<Self>,
6637 ) -> mpsc::UnboundedSender<GitJob> {
6638 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6639
6640 cx.spawn(async move |_, cx| {
6641 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6642 if let Some(git_hosting_provider_registry) =
6643 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6644 {
6645 git_hosting_providers::register_additional_providers(
6646 git_hosting_provider_registry,
6647 state.backend.clone(),
6648 )
6649 .await;
6650 }
6651 let state = RepositoryState::Local(state);
6652 let mut jobs = VecDeque::new();
6653 loop {
6654 while let Ok(next_job) = job_rx.try_recv() {
6655 jobs.push_back(next_job);
6656 }
6657
6658 if let Some(job) = jobs.pop_front() {
6659 if let Some(current_key) = &job.key
6660 && jobs
6661 .iter()
6662 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6663 {
6664 continue;
6665 }
6666 (job.job)(state.clone(), cx).await;
6667 } else if let Some(job) = job_rx.next().await {
6668 jobs.push_back(job);
6669 } else {
6670 break;
6671 }
6672 }
6673 anyhow::Ok(())
6674 })
6675 .detach_and_log_err(cx);
6676
6677 job_tx
6678 }
6679
6680 fn spawn_remote_git_worker(
6681 state: RemoteRepositoryState,
6682 cx: &mut Context<Self>,
6683 ) -> mpsc::UnboundedSender<GitJob> {
6684 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6685
6686 cx.spawn(async move |_, cx| {
6687 let state = RepositoryState::Remote(state);
6688 let mut jobs = VecDeque::new();
6689 loop {
6690 while let Ok(next_job) = job_rx.try_recv() {
6691 jobs.push_back(next_job);
6692 }
6693
6694 if let Some(job) = jobs.pop_front() {
6695 if let Some(current_key) = &job.key
6696 && jobs
6697 .iter()
6698 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6699 {
6700 continue;
6701 }
6702 (job.job)(state.clone(), cx).await;
6703 } else if let Some(job) = job_rx.next().await {
6704 jobs.push_back(job);
6705 } else {
6706 break;
6707 }
6708 }
6709 anyhow::Ok(())
6710 })
6711 .detach_and_log_err(cx);
6712
6713 job_tx
6714 }
6715
6716 fn load_staged_text(
6717 &mut self,
6718 buffer_id: BufferId,
6719 repo_path: RepoPath,
6720 cx: &App,
6721 ) -> Task<Result<Option<String>>> {
6722 let rx = self.send_job(None, move |state, _| async move {
6723 match state {
6724 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6725 anyhow::Ok(backend.load_index_text(repo_path).await)
6726 }
6727 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6728 let response = client
6729 .request(proto::OpenUnstagedDiff {
6730 project_id: project_id.to_proto(),
6731 buffer_id: buffer_id.to_proto(),
6732 })
6733 .await?;
6734 Ok(response.staged_text)
6735 }
6736 }
6737 });
6738 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6739 }
6740
6741 fn load_committed_text(
6742 &mut self,
6743 buffer_id: BufferId,
6744 repo_path: RepoPath,
6745 cx: &App,
6746 ) -> Task<Result<DiffBasesChange>> {
6747 let rx = self.send_job(None, move |state, _| async move {
6748 match state {
6749 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6750 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6751 let staged_text = backend.load_index_text(repo_path).await;
6752 let diff_bases_change = if committed_text == staged_text {
6753 DiffBasesChange::SetBoth(committed_text)
6754 } else {
6755 DiffBasesChange::SetEach {
6756 index: staged_text,
6757 head: committed_text,
6758 }
6759 };
6760 anyhow::Ok(diff_bases_change)
6761 }
6762 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6763 use proto::open_uncommitted_diff_response::Mode;
6764
6765 let response = client
6766 .request(proto::OpenUncommittedDiff {
6767 project_id: project_id.to_proto(),
6768 buffer_id: buffer_id.to_proto(),
6769 })
6770 .await?;
6771 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6772 let bases = match mode {
6773 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6774 Mode::IndexAndHead => DiffBasesChange::SetEach {
6775 head: response.committed_text,
6776 index: response.staged_text,
6777 },
6778 };
6779 Ok(bases)
6780 }
6781 }
6782 });
6783
6784 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6785 }
6786
6787 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6788 let repository_id = self.snapshot.id;
6789 let rx = self.send_job(None, move |state, _| async move {
6790 match state {
6791 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6792 backend.load_blob_content(oid).await
6793 }
6794 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6795 let response = client
6796 .request(proto::GetBlobContent {
6797 project_id: project_id.to_proto(),
6798 repository_id: repository_id.0,
6799 oid: oid.to_string(),
6800 })
6801 .await?;
6802 Ok(response.content)
6803 }
6804 }
6805 });
6806 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6807 }
6808
6809 fn paths_changed(
6810 &mut self,
6811 paths: Vec<RepoPath>,
6812 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6813 cx: &mut Context<Self>,
6814 ) {
6815 if !paths.is_empty() {
6816 self.paths_needing_status_update.push(paths);
6817 }
6818
6819 let this = cx.weak_entity();
6820 let _ = self.send_keyed_job(
6821 Some(GitJobKey::RefreshStatuses),
6822 None,
6823 |state, mut cx| async move {
6824 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6825 (
6826 this.snapshot.clone(),
6827 mem::take(&mut this.paths_needing_status_update),
6828 )
6829 })?;
6830 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6831 bail!("not a local repository")
6832 };
6833
6834 if changed_paths.is_empty() {
6835 return Ok(());
6836 }
6837
6838 let has_head = prev_snapshot.head_commit.is_some();
6839
6840 let stash_entries = backend.stash_entries().await?;
6841 let changed_path_statuses = cx
6842 .background_spawn(async move {
6843 let mut changed_paths =
6844 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6845 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6846
6847 let status_task = backend.status(&changed_paths_vec);
6848 let diff_stat_future = if has_head {
6849 backend.diff_stat(&changed_paths_vec)
6850 } else {
6851 future::ready(Ok(status::GitDiffStat {
6852 entries: Arc::default(),
6853 }))
6854 .boxed()
6855 };
6856
6857 let (statuses, diff_stats) =
6858 futures::future::try_join(status_task, diff_stat_future).await?;
6859
6860 let diff_stats: HashMap<RepoPath, DiffStat> =
6861 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6862
6863 let mut changed_path_statuses = Vec::new();
6864 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6865 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6866
6867 for (repo_path, status) in &*statuses.entries {
6868 let current_diff_stat = diff_stats.get(repo_path).copied();
6869
6870 changed_paths.remove(repo_path);
6871 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6872 && cursor.item().is_some_and(|entry| {
6873 entry.status == *status && entry.diff_stat == current_diff_stat
6874 })
6875 {
6876 continue;
6877 }
6878
6879 changed_path_statuses.push(Edit::Insert(StatusEntry {
6880 repo_path: repo_path.clone(),
6881 status: *status,
6882 diff_stat: current_diff_stat,
6883 }));
6884 }
6885 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6886 for path in changed_paths.into_iter() {
6887 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6888 changed_path_statuses
6889 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6890 }
6891 }
6892 anyhow::Ok(changed_path_statuses)
6893 })
6894 .await?;
6895
6896 this.update(&mut cx, |this, cx| {
6897 if this.snapshot.stash_entries != stash_entries {
6898 cx.emit(RepositoryEvent::StashEntriesChanged);
6899 this.snapshot.stash_entries = stash_entries;
6900 }
6901
6902 if !changed_path_statuses.is_empty() {
6903 cx.emit(RepositoryEvent::StatusesChanged);
6904 this.snapshot
6905 .statuses_by_path
6906 .edit(changed_path_statuses, ());
6907 this.snapshot.scan_id += 1;
6908 }
6909
6910 if let Some(updates_tx) = updates_tx {
6911 updates_tx
6912 .unbounded_send(DownstreamUpdate::UpdateRepository(
6913 this.snapshot.clone(),
6914 ))
6915 .ok();
6916 }
6917 })
6918 },
6919 );
6920 }
6921
6922 /// currently running git command and when it started
6923 pub fn current_job(&self) -> Option<JobInfo> {
6924 self.active_jobs.values().next().cloned()
6925 }
6926
6927 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6928 self.send_job(None, |_, _| async {})
6929 }
6930
6931 fn spawn_job_with_tracking<AsyncFn>(
6932 &mut self,
6933 paths: Vec<RepoPath>,
6934 git_status: pending_op::GitStatus,
6935 cx: &mut Context<Self>,
6936 f: AsyncFn,
6937 ) -> Task<Result<()>>
6938 where
6939 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6940 {
6941 let ids = self.new_pending_ops_for_paths(paths, git_status);
6942
6943 cx.spawn(async move |this, cx| {
6944 let (job_status, result) = match f(this.clone(), cx).await {
6945 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6946 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6947 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6948 };
6949
6950 this.update(cx, |this, _| {
6951 let mut edits = Vec::with_capacity(ids.len());
6952 for (id, entry) in ids {
6953 if let Some(mut ops) = this
6954 .pending_ops
6955 .get(&PathKey(entry.as_ref().clone()), ())
6956 .cloned()
6957 {
6958 if let Some(op) = ops.op_by_id_mut(id) {
6959 op.job_status = job_status;
6960 }
6961 edits.push(sum_tree::Edit::Insert(ops));
6962 }
6963 }
6964 this.pending_ops.edit(edits, ());
6965 })?;
6966
6967 result
6968 })
6969 }
6970
6971 fn new_pending_ops_for_paths(
6972 &mut self,
6973 paths: Vec<RepoPath>,
6974 git_status: pending_op::GitStatus,
6975 ) -> Vec<(PendingOpId, RepoPath)> {
6976 let mut edits = Vec::with_capacity(paths.len());
6977 let mut ids = Vec::with_capacity(paths.len());
6978 for path in paths {
6979 let mut ops = self
6980 .pending_ops
6981 .get(&PathKey(path.as_ref().clone()), ())
6982 .cloned()
6983 .unwrap_or_else(|| PendingOps::new(&path));
6984 let id = ops.max_id() + 1;
6985 ops.ops.push(PendingOp {
6986 id,
6987 git_status,
6988 job_status: pending_op::JobStatus::Running,
6989 });
6990 edits.push(sum_tree::Edit::Insert(ops));
6991 ids.push((id, path));
6992 }
6993 self.pending_ops.edit(edits, ());
6994 ids
6995 }
6996 pub fn default_remote_url(&self) -> Option<String> {
6997 self.remote_upstream_url
6998 .clone()
6999 .or(self.remote_origin_url.clone())
7000 }
7001}
7002
7003/// If `path` is a git linked worktree checkout, resolves it to the main
7004/// repository's working directory path. Returns `None` if `path` is a normal
7005/// repository, not a git repo, or if resolution fails.
7006///
7007/// Resolution works by:
7008/// 1. Reading the `.git` file to get the `gitdir:` pointer
7009/// 2. Following that to the worktree-specific git directory
7010/// 3. Reading the `commondir` file to find the shared `.git` directory
7011/// 4. Deriving the main repo's working directory from the common dir
7012pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7013 let dot_git = path.join(".git");
7014 let metadata = fs.metadata(&dot_git).await.ok()??;
7015 if metadata.is_dir {
7016 return None; // Normal repo, not a linked worktree
7017 }
7018 // It's a .git file — parse the gitdir: pointer
7019 let content = fs.load(&dot_git).await.ok()?;
7020 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7021 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7022 // Read commondir to find the main .git directory
7023 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7024 let common_dir = fs
7025 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7026 .await
7027 .ok()?;
7028 Some(git::repository::original_repo_path_from_common_dir(
7029 &common_dir,
7030 ))
7031}
7032
7033/// Validates that the resolved worktree directory is acceptable:
7034/// - The setting must not be an absolute path.
7035/// - The resolved path must be either a subdirectory of the working
7036/// directory or a subdirectory of its parent (i.e., a sibling).
7037///
7038/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7039pub fn worktrees_directory_for_repo(
7040 original_repo_abs_path: &Path,
7041 worktree_directory_setting: &str,
7042) -> Result<PathBuf> {
7043 // Check the original setting before trimming, since a path like "///"
7044 // is absolute but becomes "" after stripping trailing separators.
7045 // Also check for leading `/` or `\` explicitly, because on Windows
7046 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7047 // would slip through even though it's clearly not a relative path.
7048 if Path::new(worktree_directory_setting).is_absolute()
7049 || worktree_directory_setting.starts_with('/')
7050 || worktree_directory_setting.starts_with('\\')
7051 {
7052 anyhow::bail!(
7053 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7054 );
7055 }
7056
7057 if worktree_directory_setting.is_empty() {
7058 anyhow::bail!("git.worktree_directory must not be empty");
7059 }
7060
7061 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7062 if trimmed == ".." {
7063 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7064 }
7065
7066 let joined = original_repo_abs_path.join(trimmed);
7067 let resolved = util::normalize_path(&joined);
7068 let resolved = if resolved.starts_with(original_repo_abs_path) {
7069 resolved
7070 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7071 resolved.join(repo_dir_name)
7072 } else {
7073 resolved
7074 };
7075
7076 let parent = original_repo_abs_path
7077 .parent()
7078 .unwrap_or(original_repo_abs_path);
7079
7080 if !resolved.starts_with(parent) {
7081 anyhow::bail!(
7082 "git.worktree_directory resolved to {resolved:?}, which is outside \
7083 the project root and its parent directory. It must resolve to a \
7084 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7085 );
7086 }
7087
7088 Ok(resolved)
7089}
7090
7091/// Returns a short name for a linked worktree suitable for UI display
7092///
7093/// Uses the main worktree path to come up with a short name that disambiguates
7094/// the linked worktree from the main worktree.
7095pub fn linked_worktree_short_name(
7096 main_worktree_path: &Path,
7097 linked_worktree_path: &Path,
7098) -> Option<SharedString> {
7099 if main_worktree_path == linked_worktree_path {
7100 return None;
7101 }
7102
7103 let project_name = main_worktree_path.file_name()?.to_str()?;
7104 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7105 let name = if directory_name != project_name {
7106 directory_name.to_string()
7107 } else {
7108 linked_worktree_path
7109 .parent()?
7110 .file_name()?
7111 .to_str()?
7112 .to_string()
7113 };
7114 Some(name.into())
7115}
7116
7117fn get_permalink_in_rust_registry_src(
7118 provider_registry: Arc<GitHostingProviderRegistry>,
7119 path: PathBuf,
7120 selection: Range<u32>,
7121) -> Result<url::Url> {
7122 #[derive(Deserialize)]
7123 struct CargoVcsGit {
7124 sha1: String,
7125 }
7126
7127 #[derive(Deserialize)]
7128 struct CargoVcsInfo {
7129 git: CargoVcsGit,
7130 path_in_vcs: String,
7131 }
7132
7133 #[derive(Deserialize)]
7134 struct CargoPackage {
7135 repository: String,
7136 }
7137
7138 #[derive(Deserialize)]
7139 struct CargoToml {
7140 package: CargoPackage,
7141 }
7142
7143 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7144 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7145 Some((dir, json))
7146 }) else {
7147 bail!("No .cargo_vcs_info.json found in parent directories")
7148 };
7149 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7150 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7151 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7152 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7153 .context("parsing package.repository field of manifest")?;
7154 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7155 let permalink = provider.build_permalink(
7156 remote,
7157 BuildPermalinkParams::new(
7158 &cargo_vcs_info.git.sha1,
7159 &RepoPath::from_rel_path(
7160 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7161 ),
7162 Some(selection),
7163 ),
7164 );
7165 Ok(permalink)
7166}
7167
7168fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7169 let Some(blame) = blame else {
7170 return proto::BlameBufferResponse {
7171 blame_response: None,
7172 };
7173 };
7174
7175 let entries = blame
7176 .entries
7177 .into_iter()
7178 .map(|entry| proto::BlameEntry {
7179 sha: entry.sha.as_bytes().into(),
7180 start_line: entry.range.start,
7181 end_line: entry.range.end,
7182 original_line_number: entry.original_line_number,
7183 author: entry.author,
7184 author_mail: entry.author_mail,
7185 author_time: entry.author_time,
7186 author_tz: entry.author_tz,
7187 committer: entry.committer_name,
7188 committer_mail: entry.committer_email,
7189 committer_time: entry.committer_time,
7190 committer_tz: entry.committer_tz,
7191 summary: entry.summary,
7192 previous: entry.previous,
7193 filename: entry.filename,
7194 })
7195 .collect::<Vec<_>>();
7196
7197 let messages = blame
7198 .messages
7199 .into_iter()
7200 .map(|(oid, message)| proto::CommitMessage {
7201 oid: oid.as_bytes().into(),
7202 message,
7203 })
7204 .collect::<Vec<_>>();
7205
7206 proto::BlameBufferResponse {
7207 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7208 }
7209}
7210
7211fn deserialize_blame_buffer_response(
7212 response: proto::BlameBufferResponse,
7213) -> Option<git::blame::Blame> {
7214 let response = response.blame_response?;
7215 let entries = response
7216 .entries
7217 .into_iter()
7218 .filter_map(|entry| {
7219 Some(git::blame::BlameEntry {
7220 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7221 range: entry.start_line..entry.end_line,
7222 original_line_number: entry.original_line_number,
7223 committer_name: entry.committer,
7224 committer_time: entry.committer_time,
7225 committer_tz: entry.committer_tz,
7226 committer_email: entry.committer_mail,
7227 author: entry.author,
7228 author_mail: entry.author_mail,
7229 author_time: entry.author_time,
7230 author_tz: entry.author_tz,
7231 summary: entry.summary,
7232 previous: entry.previous,
7233 filename: entry.filename,
7234 })
7235 })
7236 .collect::<Vec<_>>();
7237
7238 let messages = response
7239 .messages
7240 .into_iter()
7241 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7242 .collect::<HashMap<_, _>>();
7243
7244 Some(Blame { entries, messages })
7245}
7246
7247fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7248 proto::Branch {
7249 is_head: branch.is_head,
7250 ref_name: branch.ref_name.to_string(),
7251 unix_timestamp: branch
7252 .most_recent_commit
7253 .as_ref()
7254 .map(|commit| commit.commit_timestamp as u64),
7255 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7256 ref_name: upstream.ref_name.to_string(),
7257 tracking: upstream
7258 .tracking
7259 .status()
7260 .map(|upstream| proto::UpstreamTracking {
7261 ahead: upstream.ahead as u64,
7262 behind: upstream.behind as u64,
7263 }),
7264 }),
7265 most_recent_commit: branch
7266 .most_recent_commit
7267 .as_ref()
7268 .map(|commit| proto::CommitSummary {
7269 sha: commit.sha.to_string(),
7270 subject: commit.subject.to_string(),
7271 commit_timestamp: commit.commit_timestamp,
7272 author_name: commit.author_name.to_string(),
7273 }),
7274 }
7275}
7276
7277fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7278 proto::Worktree {
7279 path: worktree.path.to_string_lossy().to_string(),
7280 ref_name: worktree
7281 .ref_name
7282 .as_ref()
7283 .map(|s| s.to_string())
7284 .unwrap_or_default(),
7285 sha: worktree.sha.to_string(),
7286 is_main: worktree.is_main,
7287 }
7288}
7289
7290fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7291 git::repository::Worktree {
7292 path: PathBuf::from(proto.path.clone()),
7293 ref_name: Some(SharedString::from(&proto.ref_name)),
7294 sha: proto.sha.clone().into(),
7295 is_main: proto.is_main,
7296 }
7297}
7298
7299fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7300 git::repository::Branch {
7301 is_head: proto.is_head,
7302 ref_name: proto.ref_name.clone().into(),
7303 upstream: proto
7304 .upstream
7305 .as_ref()
7306 .map(|upstream| git::repository::Upstream {
7307 ref_name: upstream.ref_name.to_string().into(),
7308 tracking: upstream
7309 .tracking
7310 .as_ref()
7311 .map(|tracking| {
7312 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7313 ahead: tracking.ahead as u32,
7314 behind: tracking.behind as u32,
7315 })
7316 })
7317 .unwrap_or(git::repository::UpstreamTracking::Gone),
7318 }),
7319 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7320 git::repository::CommitSummary {
7321 sha: commit.sha.to_string().into(),
7322 subject: commit.subject.to_string().into(),
7323 commit_timestamp: commit.commit_timestamp,
7324 author_name: commit.author_name.to_string().into(),
7325 has_parent: true,
7326 }
7327 }),
7328 }
7329}
7330
7331fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7332 proto::GitCommitDetails {
7333 sha: commit.sha.to_string(),
7334 message: commit.message.to_string(),
7335 commit_timestamp: commit.commit_timestamp,
7336 author_email: commit.author_email.to_string(),
7337 author_name: commit.author_name.to_string(),
7338 }
7339}
7340
7341fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7342 CommitDetails {
7343 sha: proto.sha.clone().into(),
7344 message: proto.message.clone().into(),
7345 commit_timestamp: proto.commit_timestamp,
7346 author_email: proto.author_email.clone().into(),
7347 author_name: proto.author_name.clone().into(),
7348 }
7349}
7350
7351/// This snapshot computes the repository state on the foreground thread while
7352/// running the git commands on the background thread. We update branch, head,
7353/// remotes, and worktrees first so the UI can react sooner, then compute file
7354/// state and emit those events immediately after.
7355async fn compute_snapshot(
7356 this: Entity<Repository>,
7357 backend: Arc<dyn GitRepository>,
7358 cx: &mut AsyncApp,
7359) -> Result<RepositorySnapshot> {
7360 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7361 this.paths_needing_status_update.clear();
7362 (
7363 this.id,
7364 this.work_directory_abs_path.clone(),
7365 this.snapshot.clone(),
7366 )
7367 });
7368
7369 let head_commit_future = {
7370 let backend = backend.clone();
7371 async move {
7372 Ok(match backend.head_sha().await {
7373 Some(head_sha) => backend.show(head_sha).await.log_err(),
7374 None => None,
7375 })
7376 }
7377 };
7378 let (branches, head_commit, all_worktrees) = cx
7379 .background_spawn({
7380 let backend = backend.clone();
7381 async move {
7382 futures::future::try_join3(
7383 backend.branches(),
7384 head_commit_future,
7385 backend.worktrees(),
7386 )
7387 .await
7388 }
7389 })
7390 .await?;
7391 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7392 let branch_list: Arc<[Branch]> = branches.into();
7393
7394 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7395 .into_iter()
7396 .filter(|wt| wt.path != *work_directory_abs_path)
7397 .collect();
7398
7399 let (remote_origin_url, remote_upstream_url) = cx
7400 .background_spawn({
7401 let backend = backend.clone();
7402 async move {
7403 Ok::<_, anyhow::Error>(
7404 futures::future::join(
7405 backend.remote_url("origin"),
7406 backend.remote_url("upstream"),
7407 )
7408 .await,
7409 )
7410 }
7411 })
7412 .await?;
7413
7414 let snapshot = this.update(cx, |this, cx| {
7415 let head_changed =
7416 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7417 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7418 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7419
7420 this.snapshot = RepositorySnapshot {
7421 id,
7422 work_directory_abs_path,
7423 branch,
7424 branch_list: branch_list.clone(),
7425 head_commit,
7426 remote_origin_url,
7427 remote_upstream_url,
7428 linked_worktrees,
7429 scan_id: prev_snapshot.scan_id + 1,
7430 ..prev_snapshot
7431 };
7432
7433 if head_changed {
7434 cx.emit(RepositoryEvent::HeadChanged);
7435 }
7436
7437 if branch_list_changed {
7438 cx.emit(RepositoryEvent::BranchListChanged);
7439 }
7440
7441 if worktrees_changed {
7442 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7443 }
7444
7445 this.snapshot.clone()
7446 });
7447
7448 let (statuses, diff_stats, stash_entries) = cx
7449 .background_spawn({
7450 let backend = backend.clone();
7451 let snapshot = snapshot.clone();
7452 async move {
7453 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7454 if snapshot.head_commit.is_some() {
7455 backend.diff_stat(&[])
7456 } else {
7457 future::ready(Ok(status::GitDiffStat {
7458 entries: Arc::default(),
7459 }))
7460 .boxed()
7461 };
7462 futures::future::try_join3(
7463 backend.status(&[RepoPath::from_rel_path(
7464 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7465 )]),
7466 diff_stat_future,
7467 backend.stash_entries(),
7468 )
7469 .await
7470 }
7471 })
7472 .await?;
7473
7474 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7475 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7476 let mut conflicted_paths = Vec::new();
7477 let statuses_by_path = SumTree::from_iter(
7478 statuses.entries.iter().map(|(repo_path, status)| {
7479 if status.is_conflicted() {
7480 conflicted_paths.push(repo_path.clone());
7481 }
7482 StatusEntry {
7483 repo_path: repo_path.clone(),
7484 status: *status,
7485 diff_stat: diff_stat_map.get(repo_path).copied(),
7486 }
7487 }),
7488 (),
7489 );
7490
7491 let merge_details = cx
7492 .background_spawn({
7493 let backend = backend.clone();
7494 let mut merge_details = snapshot.merge.clone();
7495 async move {
7496 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7497 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7498 }
7499 })
7500 .await?;
7501 let (merge_details, conflicts_changed) = merge_details;
7502 log::debug!("new merge details: {merge_details:?}");
7503
7504 Ok(this.update(cx, |this, cx| {
7505 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7506 cx.emit(RepositoryEvent::StatusesChanged);
7507 }
7508 if stash_entries != this.snapshot.stash_entries {
7509 cx.emit(RepositoryEvent::StashEntriesChanged);
7510 }
7511
7512 this.snapshot.scan_id += 1;
7513 this.snapshot.merge = merge_details;
7514 this.snapshot.statuses_by_path = statuses_by_path;
7515 this.snapshot.stash_entries = stash_entries;
7516
7517 this.snapshot.clone()
7518 }))
7519}
7520
7521fn status_from_proto(
7522 simple_status: i32,
7523 status: Option<proto::GitFileStatus>,
7524) -> anyhow::Result<FileStatus> {
7525 use proto::git_file_status::Variant;
7526
7527 let Some(variant) = status.and_then(|status| status.variant) else {
7528 let code = proto::GitStatus::from_i32(simple_status)
7529 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7530 let result = match code {
7531 proto::GitStatus::Added => TrackedStatus {
7532 worktree_status: StatusCode::Added,
7533 index_status: StatusCode::Unmodified,
7534 }
7535 .into(),
7536 proto::GitStatus::Modified => TrackedStatus {
7537 worktree_status: StatusCode::Modified,
7538 index_status: StatusCode::Unmodified,
7539 }
7540 .into(),
7541 proto::GitStatus::Conflict => UnmergedStatus {
7542 first_head: UnmergedStatusCode::Updated,
7543 second_head: UnmergedStatusCode::Updated,
7544 }
7545 .into(),
7546 proto::GitStatus::Deleted => TrackedStatus {
7547 worktree_status: StatusCode::Deleted,
7548 index_status: StatusCode::Unmodified,
7549 }
7550 .into(),
7551 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7552 };
7553 return Ok(result);
7554 };
7555
7556 let result = match variant {
7557 Variant::Untracked(_) => FileStatus::Untracked,
7558 Variant::Ignored(_) => FileStatus::Ignored,
7559 Variant::Unmerged(unmerged) => {
7560 let [first_head, second_head] =
7561 [unmerged.first_head, unmerged.second_head].map(|head| {
7562 let code = proto::GitStatus::from_i32(head)
7563 .with_context(|| format!("Invalid git status code: {head}"))?;
7564 let result = match code {
7565 proto::GitStatus::Added => UnmergedStatusCode::Added,
7566 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7567 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7568 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7569 };
7570 Ok(result)
7571 });
7572 let [first_head, second_head] = [first_head?, second_head?];
7573 UnmergedStatus {
7574 first_head,
7575 second_head,
7576 }
7577 .into()
7578 }
7579 Variant::Tracked(tracked) => {
7580 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7581 .map(|status| {
7582 let code = proto::GitStatus::from_i32(status)
7583 .with_context(|| format!("Invalid git status code: {status}"))?;
7584 let result = match code {
7585 proto::GitStatus::Modified => StatusCode::Modified,
7586 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7587 proto::GitStatus::Added => StatusCode::Added,
7588 proto::GitStatus::Deleted => StatusCode::Deleted,
7589 proto::GitStatus::Renamed => StatusCode::Renamed,
7590 proto::GitStatus::Copied => StatusCode::Copied,
7591 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7592 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7593 };
7594 Ok(result)
7595 });
7596 let [index_status, worktree_status] = [index_status?, worktree_status?];
7597 TrackedStatus {
7598 index_status,
7599 worktree_status,
7600 }
7601 .into()
7602 }
7603 };
7604 Ok(result)
7605}
7606
7607fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7608 use proto::git_file_status::{Tracked, Unmerged, Variant};
7609
7610 let variant = match status {
7611 FileStatus::Untracked => Variant::Untracked(Default::default()),
7612 FileStatus::Ignored => Variant::Ignored(Default::default()),
7613 FileStatus::Unmerged(UnmergedStatus {
7614 first_head,
7615 second_head,
7616 }) => Variant::Unmerged(Unmerged {
7617 first_head: unmerged_status_to_proto(first_head),
7618 second_head: unmerged_status_to_proto(second_head),
7619 }),
7620 FileStatus::Tracked(TrackedStatus {
7621 index_status,
7622 worktree_status,
7623 }) => Variant::Tracked(Tracked {
7624 index_status: tracked_status_to_proto(index_status),
7625 worktree_status: tracked_status_to_proto(worktree_status),
7626 }),
7627 };
7628 proto::GitFileStatus {
7629 variant: Some(variant),
7630 }
7631}
7632
7633fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7634 match code {
7635 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7636 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7637 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7638 }
7639}
7640
7641fn tracked_status_to_proto(code: StatusCode) -> i32 {
7642 match code {
7643 StatusCode::Added => proto::GitStatus::Added as _,
7644 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7645 StatusCode::Modified => proto::GitStatus::Modified as _,
7646 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7647 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7648 StatusCode::Copied => proto::GitStatus::Copied as _,
7649 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7650 }
7651}