1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332#[derive(Clone, Debug)]
333enum CreateWorktreeStartPoint {
334 Detached,
335 Branched { name: String },
336}
337
338pub struct Repository {
339 this: WeakEntity<Self>,
340 snapshot: RepositorySnapshot,
341 commit_message_buffer: Option<Entity<Buffer>>,
342 git_store: WeakEntity<GitStore>,
343 // For a local repository, holds paths that have had worktree events since the last status scan completed,
344 // and that should be examined during the next status scan.
345 paths_needing_status_update: Vec<Vec<RepoPath>>,
346 job_sender: mpsc::UnboundedSender<GitJob>,
347 active_jobs: HashMap<JobId, JobInfo>,
348 pending_ops: SumTree<PendingOps>,
349 job_id: JobId,
350 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
351 latest_askpass_id: u64,
352 repository_state: Shared<Task<Result<RepositoryState, String>>>,
353 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
354 graph_commit_data_handler: GraphCommitHandlerState,
355 commit_data: HashMap<Oid, CommitDataState>,
356}
357
358impl std::ops::Deref for Repository {
359 type Target = RepositorySnapshot;
360
361 fn deref(&self) -> &Self::Target {
362 &self.snapshot
363 }
364}
365
366#[derive(Clone)]
367pub struct LocalRepositoryState {
368 pub fs: Arc<dyn Fs>,
369 pub backend: Arc<dyn GitRepository>,
370 pub environment: Arc<HashMap<String, String>>,
371}
372
373impl LocalRepositoryState {
374 async fn new(
375 work_directory_abs_path: Arc<Path>,
376 dot_git_abs_path: Arc<Path>,
377 project_environment: WeakEntity<ProjectEnvironment>,
378 fs: Arc<dyn Fs>,
379 is_trusted: bool,
380 cx: &mut AsyncApp,
381 ) -> anyhow::Result<Self> {
382 let environment = project_environment
383 .update(cx, |project_environment, cx| {
384 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
385 })?
386 .await
387 .unwrap_or_else(|| {
388 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
389 HashMap::default()
390 });
391 let search_paths = environment.get("PATH").map(|val| val.to_owned());
392 let backend = cx
393 .background_spawn({
394 let fs = fs.clone();
395 async move {
396 let system_git_binary_path = search_paths
397 .and_then(|search_paths| {
398 which::which_in("git", Some(search_paths), &work_directory_abs_path)
399 .ok()
400 })
401 .or_else(|| which::which("git").ok());
402 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
403 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
404 }
405 })
406 .await?;
407 backend.set_trusted(is_trusted);
408 Ok(LocalRepositoryState {
409 backend,
410 environment: Arc::new(environment),
411 fs,
412 })
413 }
414}
415
416#[derive(Clone)]
417pub struct RemoteRepositoryState {
418 pub project_id: ProjectId,
419 pub client: AnyProtoClient,
420}
421
422#[derive(Clone)]
423pub enum RepositoryState {
424 Local(LocalRepositoryState),
425 Remote(RemoteRepositoryState),
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum GitGraphEvent {
430 CountUpdated(usize),
431 FullyLoaded,
432 LoadingError,
433}
434
435#[derive(Clone, Debug, PartialEq, Eq)]
436pub enum RepositoryEvent {
437 StatusesChanged,
438 HeadChanged,
439 BranchListChanged,
440 StashEntriesChanged,
441 GitWorktreeListChanged,
442 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
443 GraphEvent((LogSource, LogOrder), GitGraphEvent),
444}
445
446#[derive(Clone, Debug)]
447pub struct JobsUpdated;
448
449#[derive(Debug)]
450pub enum GitStoreEvent {
451 ActiveRepositoryChanged(Option<RepositoryId>),
452 /// Bool is true when the repository that's updated is the active repository
453 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
454 RepositoryAdded,
455 RepositoryRemoved(RepositoryId),
456 IndexWriteError(anyhow::Error),
457 JobsUpdated,
458 ConflictsUpdated,
459}
460
461impl EventEmitter<RepositoryEvent> for Repository {}
462impl EventEmitter<JobsUpdated> for Repository {}
463impl EventEmitter<GitStoreEvent> for GitStore {}
464
465pub struct GitJob {
466 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
467 key: Option<GitJobKey>,
468}
469
470#[derive(PartialEq, Eq)]
471enum GitJobKey {
472 WriteIndex(Vec<RepoPath>),
473 ReloadBufferDiffBases,
474 RefreshStatuses,
475 ReloadGitState,
476}
477
478impl GitStore {
479 pub fn local(
480 worktree_store: &Entity<WorktreeStore>,
481 buffer_store: Entity<BufferStore>,
482 environment: Entity<ProjectEnvironment>,
483 fs: Arc<dyn Fs>,
484 cx: &mut Context<Self>,
485 ) -> Self {
486 Self::new(
487 worktree_store.clone(),
488 buffer_store,
489 GitStoreState::Local {
490 next_repository_id: Arc::new(AtomicU64::new(1)),
491 downstream: None,
492 project_environment: environment,
493 fs,
494 },
495 cx,
496 )
497 }
498
499 pub fn remote(
500 worktree_store: &Entity<WorktreeStore>,
501 buffer_store: Entity<BufferStore>,
502 upstream_client: AnyProtoClient,
503 project_id: u64,
504 cx: &mut Context<Self>,
505 ) -> Self {
506 Self::new(
507 worktree_store.clone(),
508 buffer_store,
509 GitStoreState::Remote {
510 upstream_client,
511 upstream_project_id: project_id,
512 downstream: None,
513 },
514 cx,
515 )
516 }
517
518 fn new(
519 worktree_store: Entity<WorktreeStore>,
520 buffer_store: Entity<BufferStore>,
521 state: GitStoreState,
522 cx: &mut Context<Self>,
523 ) -> Self {
524 let mut _subscriptions = vec![
525 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
526 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
527 ];
528
529 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
530 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
531 }
532
533 GitStore {
534 state,
535 buffer_store,
536 worktree_store,
537 repositories: HashMap::default(),
538 worktree_ids: HashMap::default(),
539 active_repo_id: None,
540 _subscriptions,
541 loading_diffs: HashMap::default(),
542 shared_diffs: HashMap::default(),
543 diffs: HashMap::default(),
544 }
545 }
546
547 pub fn init(client: &AnyProtoClient) {
548 client.add_entity_request_handler(Self::handle_get_remotes);
549 client.add_entity_request_handler(Self::handle_get_branches);
550 client.add_entity_request_handler(Self::handle_get_default_branch);
551 client.add_entity_request_handler(Self::handle_change_branch);
552 client.add_entity_request_handler(Self::handle_create_branch);
553 client.add_entity_request_handler(Self::handle_rename_branch);
554 client.add_entity_request_handler(Self::handle_create_remote);
555 client.add_entity_request_handler(Self::handle_remove_remote);
556 client.add_entity_request_handler(Self::handle_delete_branch);
557 client.add_entity_request_handler(Self::handle_git_init);
558 client.add_entity_request_handler(Self::handle_push);
559 client.add_entity_request_handler(Self::handle_pull);
560 client.add_entity_request_handler(Self::handle_fetch);
561 client.add_entity_request_handler(Self::handle_stage);
562 client.add_entity_request_handler(Self::handle_unstage);
563 client.add_entity_request_handler(Self::handle_stash);
564 client.add_entity_request_handler(Self::handle_stash_pop);
565 client.add_entity_request_handler(Self::handle_stash_apply);
566 client.add_entity_request_handler(Self::handle_stash_drop);
567 client.add_entity_request_handler(Self::handle_commit);
568 client.add_entity_request_handler(Self::handle_run_hook);
569 client.add_entity_request_handler(Self::handle_reset);
570 client.add_entity_request_handler(Self::handle_show);
571 client.add_entity_request_handler(Self::handle_create_checkpoint);
572 client.add_entity_request_handler(Self::handle_restore_checkpoint);
573 client.add_entity_request_handler(Self::handle_compare_checkpoints);
574 client.add_entity_request_handler(Self::handle_diff_checkpoints);
575 client.add_entity_request_handler(Self::handle_load_commit_diff);
576 client.add_entity_request_handler(Self::handle_file_history);
577 client.add_entity_request_handler(Self::handle_checkout_files);
578 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
579 client.add_entity_request_handler(Self::handle_set_index_text);
580 client.add_entity_request_handler(Self::handle_askpass);
581 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
582 client.add_entity_request_handler(Self::handle_git_diff);
583 client.add_entity_request_handler(Self::handle_tree_diff);
584 client.add_entity_request_handler(Self::handle_get_blob_content);
585 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
586 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
587 client.add_entity_message_handler(Self::handle_update_diff_bases);
588 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
589 client.add_entity_request_handler(Self::handle_blame_buffer);
590 client.add_entity_message_handler(Self::handle_update_repository);
591 client.add_entity_message_handler(Self::handle_remove_repository);
592 client.add_entity_request_handler(Self::handle_git_clone);
593 client.add_entity_request_handler(Self::handle_get_worktrees);
594 client.add_entity_request_handler(Self::handle_create_worktree);
595 client.add_entity_request_handler(Self::handle_remove_worktree);
596 client.add_entity_request_handler(Self::handle_rename_worktree);
597 client.add_entity_request_handler(Self::handle_get_head_sha);
598 }
599
600 pub fn is_local(&self) -> bool {
601 matches!(self.state, GitStoreState::Local { .. })
602 }
603 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
604 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
605 let id = repo.read(cx).id;
606 if self.active_repo_id != Some(id) {
607 self.active_repo_id = Some(id);
608 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
609 }
610 }
611 }
612
613 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
614 match &mut self.state {
615 GitStoreState::Remote {
616 downstream: downstream_client,
617 ..
618 } => {
619 for repo in self.repositories.values() {
620 let update = repo.read(cx).snapshot.initial_update(project_id);
621 for update in split_repository_update(update) {
622 client.send(update).log_err();
623 }
624 }
625 *downstream_client = Some((client, ProjectId(project_id)));
626 }
627 GitStoreState::Local {
628 downstream: downstream_client,
629 ..
630 } => {
631 let mut snapshots = HashMap::default();
632 let (updates_tx, mut updates_rx) = mpsc::unbounded();
633 for repo in self.repositories.values() {
634 updates_tx
635 .unbounded_send(DownstreamUpdate::UpdateRepository(
636 repo.read(cx).snapshot.clone(),
637 ))
638 .ok();
639 }
640 *downstream_client = Some(LocalDownstreamState {
641 client: client.clone(),
642 project_id: ProjectId(project_id),
643 updates_tx,
644 _task: cx.spawn(async move |this, cx| {
645 cx.background_spawn(async move {
646 while let Some(update) = updates_rx.next().await {
647 match update {
648 DownstreamUpdate::UpdateRepository(snapshot) => {
649 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
650 {
651 let update =
652 snapshot.build_update(old_snapshot, project_id);
653 *old_snapshot = snapshot;
654 for update in split_repository_update(update) {
655 client.send(update)?;
656 }
657 } else {
658 let update = snapshot.initial_update(project_id);
659 for update in split_repository_update(update) {
660 client.send(update)?;
661 }
662 snapshots.insert(snapshot.id, snapshot);
663 }
664 }
665 DownstreamUpdate::RemoveRepository(id) => {
666 client.send(proto::RemoveRepository {
667 project_id,
668 id: id.to_proto(),
669 })?;
670 }
671 }
672 }
673 anyhow::Ok(())
674 })
675 .await
676 .ok();
677 this.update(cx, |this, _| {
678 if let GitStoreState::Local {
679 downstream: downstream_client,
680 ..
681 } = &mut this.state
682 {
683 downstream_client.take();
684 } else {
685 unreachable!("unshared called on remote store");
686 }
687 })
688 }),
689 });
690 }
691 }
692 }
693
694 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
695 match &mut self.state {
696 GitStoreState::Local {
697 downstream: downstream_client,
698 ..
699 } => {
700 downstream_client.take();
701 }
702 GitStoreState::Remote {
703 downstream: downstream_client,
704 ..
705 } => {
706 downstream_client.take();
707 }
708 }
709 self.shared_diffs.clear();
710 }
711
712 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
713 self.shared_diffs.remove(peer_id);
714 }
715
716 pub fn active_repository(&self) -> Option<Entity<Repository>> {
717 self.active_repo_id
718 .as_ref()
719 .map(|id| self.repositories[id].clone())
720 }
721
722 pub fn open_unstaged_diff(
723 &mut self,
724 buffer: Entity<Buffer>,
725 cx: &mut Context<Self>,
726 ) -> Task<Result<Entity<BufferDiff>>> {
727 let buffer_id = buffer.read(cx).remote_id();
728 if let Some(diff_state) = self.diffs.get(&buffer_id)
729 && let Some(unstaged_diff) = diff_state
730 .read(cx)
731 .unstaged_diff
732 .as_ref()
733 .and_then(|weak| weak.upgrade())
734 {
735 if let Some(task) =
736 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
737 {
738 return cx.background_executor().spawn(async move {
739 task.await;
740 Ok(unstaged_diff)
741 });
742 }
743 return Task::ready(Ok(unstaged_diff));
744 }
745
746 let Some((repo, repo_path)) =
747 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
748 else {
749 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
750 };
751
752 let task = self
753 .loading_diffs
754 .entry((buffer_id, DiffKind::Unstaged))
755 .or_insert_with(|| {
756 let staged_text = repo.update(cx, |repo, cx| {
757 repo.load_staged_text(buffer_id, repo_path, cx)
758 });
759 cx.spawn(async move |this, cx| {
760 Self::open_diff_internal(
761 this,
762 DiffKind::Unstaged,
763 staged_text.await.map(DiffBasesChange::SetIndex),
764 buffer,
765 cx,
766 )
767 .await
768 .map_err(Arc::new)
769 })
770 .shared()
771 })
772 .clone();
773
774 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
775 }
776
777 pub fn open_diff_since(
778 &mut self,
779 oid: Option<git::Oid>,
780 buffer: Entity<Buffer>,
781 repo: Entity<Repository>,
782 cx: &mut Context<Self>,
783 ) -> Task<Result<Entity<BufferDiff>>> {
784 let buffer_id = buffer.read(cx).remote_id();
785
786 if let Some(diff_state) = self.diffs.get(&buffer_id)
787 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
788 {
789 if let Some(task) =
790 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
791 {
792 return cx.background_executor().spawn(async move {
793 task.await;
794 Ok(oid_diff)
795 });
796 }
797 return Task::ready(Ok(oid_diff));
798 }
799
800 let diff_kind = DiffKind::SinceOid(oid);
801 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
802 let task = task.clone();
803 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
804 }
805
806 let task = cx
807 .spawn(async move |this, cx| {
808 let result: Result<Entity<BufferDiff>> = async {
809 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
810 let language_registry =
811 buffer.update(cx, |buffer, _| buffer.language_registry());
812 let content: Option<Arc<str>> = match oid {
813 None => None,
814 Some(oid) => Some(
815 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
816 .await?
817 .into(),
818 ),
819 };
820 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
821
822 buffer_diff
823 .update(cx, |buffer_diff, cx| {
824 buffer_diff.language_changed(
825 buffer_snapshot.language().cloned(),
826 language_registry,
827 cx,
828 );
829 buffer_diff.set_base_text(
830 content.clone(),
831 buffer_snapshot.language().cloned(),
832 buffer_snapshot.text,
833 cx,
834 )
835 })
836 .await?;
837 let unstaged_diff = this
838 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
839 .await?;
840 buffer_diff.update(cx, |buffer_diff, _| {
841 buffer_diff.set_secondary_diff(unstaged_diff);
842 });
843
844 this.update(cx, |this, cx| {
845 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
846 .detach();
847
848 this.loading_diffs.remove(&(buffer_id, diff_kind));
849
850 let git_store = cx.weak_entity();
851 let diff_state = this
852 .diffs
853 .entry(buffer_id)
854 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
855
856 diff_state.update(cx, |state, _| {
857 if let Some(oid) = oid {
858 if let Some(content) = content {
859 state.oid_texts.insert(oid, content);
860 }
861 }
862 state.oid_diffs.insert(oid, buffer_diff.downgrade());
863 });
864 })?;
865
866 Ok(buffer_diff)
867 }
868 .await;
869 result.map_err(Arc::new)
870 })
871 .shared();
872
873 self.loading_diffs
874 .insert((buffer_id, diff_kind), task.clone());
875 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
876 }
877
878 #[ztracing::instrument(skip_all)]
879 pub fn open_uncommitted_diff(
880 &mut self,
881 buffer: Entity<Buffer>,
882 cx: &mut Context<Self>,
883 ) -> Task<Result<Entity<BufferDiff>>> {
884 let buffer_id = buffer.read(cx).remote_id();
885
886 if let Some(diff_state) = self.diffs.get(&buffer_id)
887 && let Some(uncommitted_diff) = diff_state
888 .read(cx)
889 .uncommitted_diff
890 .as_ref()
891 .and_then(|weak| weak.upgrade())
892 {
893 if let Some(task) =
894 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
895 {
896 return cx.background_executor().spawn(async move {
897 task.await;
898 Ok(uncommitted_diff)
899 });
900 }
901 return Task::ready(Ok(uncommitted_diff));
902 }
903
904 let Some((repo, repo_path)) =
905 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
906 else {
907 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
908 };
909
910 let task = self
911 .loading_diffs
912 .entry((buffer_id, DiffKind::Uncommitted))
913 .or_insert_with(|| {
914 let changes = repo.update(cx, |repo, cx| {
915 repo.load_committed_text(buffer_id, repo_path, cx)
916 });
917
918 // todo(lw): hot foreground spawn
919 cx.spawn(async move |this, cx| {
920 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
921 .await
922 .map_err(Arc::new)
923 })
924 .shared()
925 })
926 .clone();
927
928 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
929 }
930
931 #[ztracing::instrument(skip_all)]
932 async fn open_diff_internal(
933 this: WeakEntity<Self>,
934 kind: DiffKind,
935 texts: Result<DiffBasesChange>,
936 buffer_entity: Entity<Buffer>,
937 cx: &mut AsyncApp,
938 ) -> Result<Entity<BufferDiff>> {
939 let diff_bases_change = match texts {
940 Err(e) => {
941 this.update(cx, |this, cx| {
942 let buffer = buffer_entity.read(cx);
943 let buffer_id = buffer.remote_id();
944 this.loading_diffs.remove(&(buffer_id, kind));
945 })?;
946 return Err(e);
947 }
948 Ok(change) => change,
949 };
950
951 this.update(cx, |this, cx| {
952 let buffer = buffer_entity.read(cx);
953 let buffer_id = buffer.remote_id();
954 let language = buffer.language().cloned();
955 let language_registry = buffer.language_registry();
956 let text_snapshot = buffer.text_snapshot();
957 this.loading_diffs.remove(&(buffer_id, kind));
958
959 let git_store = cx.weak_entity();
960 let diff_state = this
961 .diffs
962 .entry(buffer_id)
963 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
964
965 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
966
967 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
968 diff_state.update(cx, |diff_state, cx| {
969 diff_state.language_changed = true;
970 diff_state.language = language;
971 diff_state.language_registry = language_registry;
972
973 match kind {
974 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
975 DiffKind::Uncommitted => {
976 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
977 diff
978 } else {
979 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
980 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
981 unstaged_diff
982 };
983
984 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
985 diff_state.uncommitted_diff = Some(diff.downgrade())
986 }
987 DiffKind::SinceOid(_) => {
988 unreachable!("open_diff_internal is not used for OID diffs")
989 }
990 }
991
992 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
993 let rx = diff_state.wait_for_recalculation();
994
995 anyhow::Ok(async move {
996 if let Some(rx) = rx {
997 rx.await;
998 }
999 Ok(diff)
1000 })
1001 })
1002 })??
1003 .await
1004 }
1005
1006 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1007 let diff_state = self.diffs.get(&buffer_id)?;
1008 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1009 }
1010
1011 pub fn get_uncommitted_diff(
1012 &self,
1013 buffer_id: BufferId,
1014 cx: &App,
1015 ) -> Option<Entity<BufferDiff>> {
1016 let diff_state = self.diffs.get(&buffer_id)?;
1017 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1018 }
1019
1020 pub fn get_diff_since_oid(
1021 &self,
1022 buffer_id: BufferId,
1023 oid: Option<git::Oid>,
1024 cx: &App,
1025 ) -> Option<Entity<BufferDiff>> {
1026 let diff_state = self.diffs.get(&buffer_id)?;
1027 diff_state.read(cx).oid_diff(oid)
1028 }
1029
1030 pub fn open_conflict_set(
1031 &mut self,
1032 buffer: Entity<Buffer>,
1033 cx: &mut Context<Self>,
1034 ) -> Entity<ConflictSet> {
1035 log::debug!("open conflict set");
1036 let buffer_id = buffer.read(cx).remote_id();
1037
1038 if let Some(git_state) = self.diffs.get(&buffer_id)
1039 && let Some(conflict_set) = git_state
1040 .read(cx)
1041 .conflict_set
1042 .as_ref()
1043 .and_then(|weak| weak.upgrade())
1044 {
1045 let conflict_set = conflict_set;
1046 let buffer_snapshot = buffer.read(cx).text_snapshot();
1047
1048 git_state.update(cx, |state, cx| {
1049 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1050 });
1051
1052 return conflict_set;
1053 }
1054
1055 let is_unmerged = self
1056 .repository_and_path_for_buffer_id(buffer_id, cx)
1057 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1058 let git_store = cx.weak_entity();
1059 let buffer_git_state = self
1060 .diffs
1061 .entry(buffer_id)
1062 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1063 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1064
1065 self._subscriptions
1066 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1067 cx.emit(GitStoreEvent::ConflictsUpdated);
1068 }));
1069
1070 buffer_git_state.update(cx, |state, cx| {
1071 state.conflict_set = Some(conflict_set.downgrade());
1072 let buffer_snapshot = buffer.read(cx).text_snapshot();
1073 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1074 });
1075
1076 conflict_set
1077 }
1078
1079 pub fn project_path_git_status(
1080 &self,
1081 project_path: &ProjectPath,
1082 cx: &App,
1083 ) -> Option<FileStatus> {
1084 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1085 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1086 }
1087
1088 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1089 let mut work_directory_abs_paths = Vec::new();
1090 let mut checkpoints = Vec::new();
1091 for repository in self.repositories.values() {
1092 repository.update(cx, |repository, _| {
1093 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1094 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1095 });
1096 }
1097
1098 cx.background_executor().spawn(async move {
1099 let checkpoints = future::try_join_all(checkpoints).await?;
1100 Ok(GitStoreCheckpoint {
1101 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1102 .into_iter()
1103 .zip(checkpoints)
1104 .collect(),
1105 })
1106 })
1107 }
1108
1109 pub fn restore_checkpoint(
1110 &self,
1111 checkpoint: GitStoreCheckpoint,
1112 cx: &mut App,
1113 ) -> Task<Result<()>> {
1114 let repositories_by_work_dir_abs_path = self
1115 .repositories
1116 .values()
1117 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1118 .collect::<HashMap<_, _>>();
1119
1120 let mut tasks = Vec::new();
1121 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1122 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1123 let restore = repository.update(cx, |repository, _| {
1124 repository.restore_checkpoint(checkpoint)
1125 });
1126 tasks.push(async move { restore.await? });
1127 }
1128 }
1129 cx.background_spawn(async move {
1130 future::try_join_all(tasks).await?;
1131 Ok(())
1132 })
1133 }
1134
1135 /// Compares two checkpoints, returning true if they are equal.
1136 pub fn compare_checkpoints(
1137 &self,
1138 left: GitStoreCheckpoint,
1139 mut right: GitStoreCheckpoint,
1140 cx: &mut App,
1141 ) -> Task<Result<bool>> {
1142 let repositories_by_work_dir_abs_path = self
1143 .repositories
1144 .values()
1145 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1146 .collect::<HashMap<_, _>>();
1147
1148 let mut tasks = Vec::new();
1149 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1150 if let Some(right_checkpoint) = right
1151 .checkpoints_by_work_dir_abs_path
1152 .remove(&work_dir_abs_path)
1153 {
1154 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1155 {
1156 let compare = repository.update(cx, |repository, _| {
1157 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1158 });
1159
1160 tasks.push(async move { compare.await? });
1161 }
1162 } else {
1163 return Task::ready(Ok(false));
1164 }
1165 }
1166 cx.background_spawn(async move {
1167 Ok(future::try_join_all(tasks)
1168 .await?
1169 .into_iter()
1170 .all(|result| result))
1171 })
1172 }
1173
1174 /// Blames a buffer.
1175 pub fn blame_buffer(
1176 &self,
1177 buffer: &Entity<Buffer>,
1178 version: Option<clock::Global>,
1179 cx: &mut Context<Self>,
1180 ) -> Task<Result<Option<Blame>>> {
1181 let buffer = buffer.read(cx);
1182 let Some((repo, repo_path)) =
1183 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1184 else {
1185 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1186 };
1187 let content = match &version {
1188 Some(version) => buffer.rope_for_version(version),
1189 None => buffer.as_rope().clone(),
1190 };
1191 let line_ending = buffer.line_ending();
1192 let version = version.unwrap_or(buffer.version());
1193 let buffer_id = buffer.remote_id();
1194
1195 let repo = repo.downgrade();
1196 cx.spawn(async move |_, cx| {
1197 let repository_state = repo
1198 .update(cx, |repo, _| repo.repository_state.clone())?
1199 .await
1200 .map_err(|err| anyhow::anyhow!(err))?;
1201 match repository_state {
1202 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1203 .blame(repo_path.clone(), content, line_ending)
1204 .await
1205 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1206 .map(Some),
1207 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1208 let response = client
1209 .request(proto::BlameBuffer {
1210 project_id: project_id.to_proto(),
1211 buffer_id: buffer_id.into(),
1212 version: serialize_version(&version),
1213 })
1214 .await?;
1215 Ok(deserialize_blame_buffer_response(response))
1216 }
1217 }
1218 })
1219 }
1220
1221 pub fn file_history(
1222 &self,
1223 repo: &Entity<Repository>,
1224 path: RepoPath,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn file_history_paginated(
1233 &self,
1234 repo: &Entity<Repository>,
1235 path: RepoPath,
1236 skip: usize,
1237 limit: Option<usize>,
1238 cx: &mut App,
1239 ) -> Task<Result<git::repository::FileHistory>> {
1240 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1241
1242 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1243 }
1244
1245 pub fn get_permalink_to_line(
1246 &self,
1247 buffer: &Entity<Buffer>,
1248 selection: Range<u32>,
1249 cx: &mut App,
1250 ) -> Task<Result<url::Url>> {
1251 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1252 return Task::ready(Err(anyhow!("buffer has no file")));
1253 };
1254
1255 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1256 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1257 cx,
1258 ) else {
1259 // If we're not in a Git repo, check whether this is a Rust source
1260 // file in the Cargo registry (presumably opened with go-to-definition
1261 // from a normal Rust file). If so, we can put together a permalink
1262 // using crate metadata.
1263 if buffer
1264 .read(cx)
1265 .language()
1266 .is_none_or(|lang| lang.name() != "Rust")
1267 {
1268 return Task::ready(Err(anyhow!("no permalink available")));
1269 }
1270 let file_path = file.worktree.read(cx).absolutize(&file.path);
1271 return cx.spawn(async move |cx| {
1272 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1273 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1274 .context("no permalink available")
1275 });
1276 };
1277
1278 let buffer_id = buffer.read(cx).remote_id();
1279 let branch = repo.read(cx).branch.clone();
1280 let remote = branch
1281 .as_ref()
1282 .and_then(|b| b.upstream.as_ref())
1283 .and_then(|b| b.remote_name())
1284 .unwrap_or("origin")
1285 .to_string();
1286
1287 let rx = repo.update(cx, |repo, _| {
1288 repo.send_job(None, move |state, cx| async move {
1289 match state {
1290 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1291 let origin_url = backend
1292 .remote_url(&remote)
1293 .await
1294 .with_context(|| format!("remote \"{remote}\" not found"))?;
1295
1296 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1297
1298 let provider_registry =
1299 cx.update(GitHostingProviderRegistry::default_global);
1300
1301 let (provider, remote) =
1302 parse_git_remote_url(provider_registry, &origin_url)
1303 .context("parsing Git remote URL")?;
1304
1305 Ok(provider.build_permalink(
1306 remote,
1307 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1308 ))
1309 }
1310 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1311 let response = client
1312 .request(proto::GetPermalinkToLine {
1313 project_id: project_id.to_proto(),
1314 buffer_id: buffer_id.into(),
1315 selection: Some(proto::Range {
1316 start: selection.start as u64,
1317 end: selection.end as u64,
1318 }),
1319 })
1320 .await?;
1321
1322 url::Url::parse(&response.permalink).context("failed to parse permalink")
1323 }
1324 }
1325 })
1326 });
1327 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1328 }
1329
1330 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1331 match &self.state {
1332 GitStoreState::Local {
1333 downstream: downstream_client,
1334 ..
1335 } => downstream_client
1336 .as_ref()
1337 .map(|state| (state.client.clone(), state.project_id)),
1338 GitStoreState::Remote {
1339 downstream: downstream_client,
1340 ..
1341 } => downstream_client.clone(),
1342 }
1343 }
1344
1345 fn upstream_client(&self) -> Option<AnyProtoClient> {
1346 match &self.state {
1347 GitStoreState::Local { .. } => None,
1348 GitStoreState::Remote {
1349 upstream_client, ..
1350 } => Some(upstream_client.clone()),
1351 }
1352 }
1353
1354 fn on_worktree_store_event(
1355 &mut self,
1356 worktree_store: Entity<WorktreeStore>,
1357 event: &WorktreeStoreEvent,
1358 cx: &mut Context<Self>,
1359 ) {
1360 let GitStoreState::Local {
1361 project_environment,
1362 downstream,
1363 next_repository_id,
1364 fs,
1365 } = &self.state
1366 else {
1367 return;
1368 };
1369
1370 match event {
1371 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1372 if let Some(worktree) = self
1373 .worktree_store
1374 .read(cx)
1375 .worktree_for_id(*worktree_id, cx)
1376 {
1377 let paths_by_git_repo =
1378 self.process_updated_entries(&worktree, updated_entries, cx);
1379 let downstream = downstream
1380 .as_ref()
1381 .map(|downstream| downstream.updates_tx.clone());
1382 cx.spawn(async move |_, cx| {
1383 let paths_by_git_repo = paths_by_git_repo.await;
1384 for (repo, paths) in paths_by_git_repo {
1385 repo.update(cx, |repo, cx| {
1386 repo.paths_changed(paths, downstream.clone(), cx);
1387 });
1388 }
1389 })
1390 .detach();
1391 }
1392 }
1393 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1394 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1395 else {
1396 return;
1397 };
1398 if !worktree.read(cx).is_visible() {
1399 log::debug!(
1400 "not adding repositories for local worktree {:?} because it's not visible",
1401 worktree.read(cx).abs_path()
1402 );
1403 return;
1404 }
1405 self.update_repositories_from_worktree(
1406 *worktree_id,
1407 project_environment.clone(),
1408 next_repository_id.clone(),
1409 downstream
1410 .as_ref()
1411 .map(|downstream| downstream.updates_tx.clone()),
1412 changed_repos.clone(),
1413 fs.clone(),
1414 cx,
1415 );
1416 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1417 }
1418 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1419 let repos_without_worktree: Vec<RepositoryId> = self
1420 .worktree_ids
1421 .iter_mut()
1422 .filter_map(|(repo_id, worktree_ids)| {
1423 worktree_ids.remove(worktree_id);
1424 if worktree_ids.is_empty() {
1425 Some(*repo_id)
1426 } else {
1427 None
1428 }
1429 })
1430 .collect();
1431 let is_active_repo_removed = repos_without_worktree
1432 .iter()
1433 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1434
1435 for repo_id in repos_without_worktree {
1436 self.repositories.remove(&repo_id);
1437 self.worktree_ids.remove(&repo_id);
1438 if let Some(updates_tx) =
1439 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1440 {
1441 updates_tx
1442 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1443 .ok();
1444 }
1445 }
1446
1447 if is_active_repo_removed {
1448 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1449 self.active_repo_id = Some(repo_id);
1450 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1451 } else {
1452 self.active_repo_id = None;
1453 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1454 }
1455 }
1456 }
1457 _ => {}
1458 }
1459 }
1460 fn on_repository_event(
1461 &mut self,
1462 repo: Entity<Repository>,
1463 event: &RepositoryEvent,
1464 cx: &mut Context<Self>,
1465 ) {
1466 let id = repo.read(cx).id;
1467 let repo_snapshot = repo.read(cx).snapshot.clone();
1468 for (buffer_id, diff) in self.diffs.iter() {
1469 if let Some((buffer_repo, repo_path)) =
1470 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1471 && buffer_repo == repo
1472 {
1473 diff.update(cx, |diff, cx| {
1474 if let Some(conflict_set) = &diff.conflict_set {
1475 let conflict_status_changed =
1476 conflict_set.update(cx, |conflict_set, cx| {
1477 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1478 conflict_set.set_has_conflict(has_conflict, cx)
1479 })?;
1480 if conflict_status_changed {
1481 let buffer_store = self.buffer_store.read(cx);
1482 if let Some(buffer) = buffer_store.get(*buffer_id) {
1483 let _ = diff
1484 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1485 }
1486 }
1487 }
1488 anyhow::Ok(())
1489 })
1490 .ok();
1491 }
1492 }
1493 cx.emit(GitStoreEvent::RepositoryUpdated(
1494 id,
1495 event.clone(),
1496 self.active_repo_id == Some(id),
1497 ))
1498 }
1499
1500 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1501 cx.emit(GitStoreEvent::JobsUpdated)
1502 }
1503
1504 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1505 fn update_repositories_from_worktree(
1506 &mut self,
1507 worktree_id: WorktreeId,
1508 project_environment: Entity<ProjectEnvironment>,
1509 next_repository_id: Arc<AtomicU64>,
1510 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1511 updated_git_repositories: UpdatedGitRepositoriesSet,
1512 fs: Arc<dyn Fs>,
1513 cx: &mut Context<Self>,
1514 ) {
1515 let mut removed_ids = Vec::new();
1516 for update in updated_git_repositories.iter() {
1517 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1518 let existing_work_directory_abs_path =
1519 repo.read(cx).work_directory_abs_path.clone();
1520 Some(&existing_work_directory_abs_path)
1521 == update.old_work_directory_abs_path.as_ref()
1522 || Some(&existing_work_directory_abs_path)
1523 == update.new_work_directory_abs_path.as_ref()
1524 }) {
1525 let repo_id = *id;
1526 if let Some(new_work_directory_abs_path) =
1527 update.new_work_directory_abs_path.clone()
1528 {
1529 self.worktree_ids
1530 .entry(repo_id)
1531 .or_insert_with(HashSet::new)
1532 .insert(worktree_id);
1533 existing.update(cx, |existing, cx| {
1534 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1535 existing.schedule_scan(updates_tx.clone(), cx);
1536 });
1537 } else {
1538 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1539 worktree_ids.remove(&worktree_id);
1540 if worktree_ids.is_empty() {
1541 removed_ids.push(repo_id);
1542 }
1543 }
1544 }
1545 } else if let UpdatedGitRepository {
1546 new_work_directory_abs_path: Some(work_directory_abs_path),
1547 dot_git_abs_path: Some(dot_git_abs_path),
1548 repository_dir_abs_path: Some(repository_dir_abs_path),
1549 common_dir_abs_path: Some(common_dir_abs_path),
1550 ..
1551 } = update
1552 {
1553 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1554 work_directory_abs_path,
1555 common_dir_abs_path,
1556 repository_dir_abs_path,
1557 )
1558 .into();
1559 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1560 let is_trusted = TrustedWorktrees::try_get_global(cx)
1561 .map(|trusted_worktrees| {
1562 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1563 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1564 })
1565 })
1566 .unwrap_or(false);
1567 let git_store = cx.weak_entity();
1568 let repo = cx.new(|cx| {
1569 let mut repo = Repository::local(
1570 id,
1571 work_directory_abs_path.clone(),
1572 original_repo_abs_path.clone(),
1573 dot_git_abs_path.clone(),
1574 project_environment.downgrade(),
1575 fs.clone(),
1576 is_trusted,
1577 git_store,
1578 cx,
1579 );
1580 if let Some(updates_tx) = updates_tx.as_ref() {
1581 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1582 updates_tx
1583 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1584 .ok();
1585 }
1586 repo.schedule_scan(updates_tx.clone(), cx);
1587 repo
1588 });
1589 self._subscriptions
1590 .push(cx.subscribe(&repo, Self::on_repository_event));
1591 self._subscriptions
1592 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1593 self.repositories.insert(id, repo);
1594 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1595 cx.emit(GitStoreEvent::RepositoryAdded);
1596 self.active_repo_id.get_or_insert_with(|| {
1597 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1598 id
1599 });
1600 }
1601 }
1602
1603 for id in removed_ids {
1604 if self.active_repo_id == Some(id) {
1605 self.active_repo_id = None;
1606 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1607 }
1608 self.repositories.remove(&id);
1609 if let Some(updates_tx) = updates_tx.as_ref() {
1610 updates_tx
1611 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1612 .ok();
1613 }
1614 }
1615 }
1616
1617 fn on_trusted_worktrees_event(
1618 &mut self,
1619 _: Entity<TrustedWorktreesStore>,
1620 event: &TrustedWorktreesEvent,
1621 cx: &mut Context<Self>,
1622 ) {
1623 if !matches!(self.state, GitStoreState::Local { .. }) {
1624 return;
1625 }
1626
1627 let (is_trusted, event_paths) = match event {
1628 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1629 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1630 };
1631
1632 for (repo_id, worktree_ids) in &self.worktree_ids {
1633 if worktree_ids
1634 .iter()
1635 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1636 {
1637 if let Some(repo) = self.repositories.get(repo_id) {
1638 let repository_state = repo.read(cx).repository_state.clone();
1639 cx.background_spawn(async move {
1640 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1641 state.backend.set_trusted(is_trusted);
1642 }
1643 })
1644 .detach();
1645 }
1646 }
1647 }
1648 }
1649
1650 fn on_buffer_store_event(
1651 &mut self,
1652 _: Entity<BufferStore>,
1653 event: &BufferStoreEvent,
1654 cx: &mut Context<Self>,
1655 ) {
1656 match event {
1657 BufferStoreEvent::BufferAdded(buffer) => {
1658 cx.subscribe(buffer, |this, buffer, event, cx| {
1659 if let BufferEvent::LanguageChanged(_) = event {
1660 let buffer_id = buffer.read(cx).remote_id();
1661 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1662 diff_state.update(cx, |diff_state, cx| {
1663 diff_state.buffer_language_changed(buffer, cx);
1664 });
1665 }
1666 }
1667 })
1668 .detach();
1669 }
1670 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1671 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1672 diffs.remove(buffer_id);
1673 }
1674 }
1675 BufferStoreEvent::BufferDropped(buffer_id) => {
1676 self.diffs.remove(buffer_id);
1677 for diffs in self.shared_diffs.values_mut() {
1678 diffs.remove(buffer_id);
1679 }
1680 }
1681 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1682 // Whenever a buffer's file path changes, it's possible that the
1683 // new path is actually a path that is being tracked by a git
1684 // repository. In that case, we'll want to update the buffer's
1685 // `BufferDiffState`, in case it already has one.
1686 let buffer_id = buffer.read(cx).remote_id();
1687 let diff_state = self.diffs.get(&buffer_id);
1688 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1689
1690 if let Some(diff_state) = diff_state
1691 && let Some((repo, repo_path)) = repo
1692 {
1693 let buffer = buffer.clone();
1694 let diff_state = diff_state.clone();
1695
1696 cx.spawn(async move |_git_store, cx| {
1697 async {
1698 let diff_bases_change = repo
1699 .update(cx, |repo, cx| {
1700 repo.load_committed_text(buffer_id, repo_path, cx)
1701 })
1702 .await?;
1703
1704 diff_state.update(cx, |diff_state, cx| {
1705 let buffer_snapshot = buffer.read(cx).text_snapshot();
1706 diff_state.diff_bases_changed(
1707 buffer_snapshot,
1708 Some(diff_bases_change),
1709 cx,
1710 );
1711 });
1712 anyhow::Ok(())
1713 }
1714 .await
1715 .log_err();
1716 })
1717 .detach();
1718 }
1719 }
1720 }
1721 }
1722
1723 pub fn recalculate_buffer_diffs(
1724 &mut self,
1725 buffers: Vec<Entity<Buffer>>,
1726 cx: &mut Context<Self>,
1727 ) -> impl Future<Output = ()> + use<> {
1728 let mut futures = Vec::new();
1729 for buffer in buffers {
1730 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1731 let buffer = buffer.read(cx).text_snapshot();
1732 diff_state.update(cx, |diff_state, cx| {
1733 diff_state.recalculate_diffs(buffer.clone(), cx);
1734 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1735 });
1736 futures.push(diff_state.update(cx, |diff_state, cx| {
1737 diff_state
1738 .reparse_conflict_markers(buffer, cx)
1739 .map(|_| {})
1740 .boxed()
1741 }));
1742 }
1743 }
1744 async move {
1745 futures::future::join_all(futures).await;
1746 }
1747 }
1748
1749 fn on_buffer_diff_event(
1750 &mut self,
1751 diff: Entity<buffer_diff::BufferDiff>,
1752 event: &BufferDiffEvent,
1753 cx: &mut Context<Self>,
1754 ) {
1755 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1756 let buffer_id = diff.read(cx).buffer_id;
1757 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1758 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1759 diff_state.hunk_staging_operation_count += 1;
1760 diff_state.hunk_staging_operation_count
1761 });
1762 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1763 let recv = repo.update(cx, |repo, cx| {
1764 log::debug!("hunks changed for {}", path.as_unix_str());
1765 repo.spawn_set_index_text_job(
1766 path,
1767 new_index_text.as_ref().map(|rope| rope.to_string()),
1768 Some(hunk_staging_operation_count),
1769 cx,
1770 )
1771 });
1772 let diff = diff.downgrade();
1773 cx.spawn(async move |this, cx| {
1774 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1775 diff.update(cx, |diff, cx| {
1776 diff.clear_pending_hunks(cx);
1777 })
1778 .ok();
1779 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1780 .ok();
1781 }
1782 })
1783 .detach();
1784 }
1785 }
1786 }
1787 }
1788
1789 fn local_worktree_git_repos_changed(
1790 &mut self,
1791 worktree: Entity<Worktree>,
1792 changed_repos: &UpdatedGitRepositoriesSet,
1793 cx: &mut Context<Self>,
1794 ) {
1795 log::debug!("local worktree repos changed");
1796 debug_assert!(worktree.read(cx).is_local());
1797
1798 for repository in self.repositories.values() {
1799 repository.update(cx, |repository, cx| {
1800 let repo_abs_path = &repository.work_directory_abs_path;
1801 if changed_repos.iter().any(|update| {
1802 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1803 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1804 }) {
1805 repository.reload_buffer_diff_bases(cx);
1806 }
1807 });
1808 }
1809 }
1810
1811 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1812 &self.repositories
1813 }
1814
1815 /// Returns the original (main) repository working directory for the given worktree.
1816 /// For normal checkouts this equals the worktree's own path; for linked
1817 /// worktrees it points back to the original repo.
1818 pub fn original_repo_path_for_worktree(
1819 &self,
1820 worktree_id: WorktreeId,
1821 cx: &App,
1822 ) -> Option<Arc<Path>> {
1823 self.active_repo_id
1824 .iter()
1825 .chain(self.worktree_ids.keys())
1826 .find(|repo_id| {
1827 self.worktree_ids
1828 .get(repo_id)
1829 .is_some_and(|ids| ids.contains(&worktree_id))
1830 })
1831 .and_then(|repo_id| self.repositories.get(repo_id))
1832 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1833 }
1834
1835 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1836 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1837 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1838 Some(status.status)
1839 }
1840
1841 pub fn repository_and_path_for_buffer_id(
1842 &self,
1843 buffer_id: BufferId,
1844 cx: &App,
1845 ) -> Option<(Entity<Repository>, RepoPath)> {
1846 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1847 let project_path = buffer.read(cx).project_path(cx)?;
1848 self.repository_and_path_for_project_path(&project_path, cx)
1849 }
1850
1851 pub fn repository_and_path_for_project_path(
1852 &self,
1853 path: &ProjectPath,
1854 cx: &App,
1855 ) -> Option<(Entity<Repository>, RepoPath)> {
1856 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1857 self.repositories
1858 .values()
1859 .filter_map(|repo| {
1860 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1861 Some((repo.clone(), repo_path))
1862 })
1863 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1864 }
1865
1866 pub fn git_init(
1867 &self,
1868 path: Arc<Path>,
1869 fallback_branch_name: String,
1870 cx: &App,
1871 ) -> Task<Result<()>> {
1872 match &self.state {
1873 GitStoreState::Local { fs, .. } => {
1874 let fs = fs.clone();
1875 cx.background_executor()
1876 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1877 }
1878 GitStoreState::Remote {
1879 upstream_client,
1880 upstream_project_id: project_id,
1881 ..
1882 } => {
1883 let client = upstream_client.clone();
1884 let project_id = *project_id;
1885 cx.background_executor().spawn(async move {
1886 client
1887 .request(proto::GitInit {
1888 project_id: project_id,
1889 abs_path: path.to_string_lossy().into_owned(),
1890 fallback_branch_name,
1891 })
1892 .await?;
1893 Ok(())
1894 })
1895 }
1896 }
1897 }
1898
1899 pub fn git_clone(
1900 &self,
1901 repo: String,
1902 path: impl Into<Arc<std::path::Path>>,
1903 cx: &App,
1904 ) -> Task<Result<()>> {
1905 let path = path.into();
1906 match &self.state {
1907 GitStoreState::Local { fs, .. } => {
1908 let fs = fs.clone();
1909 cx.background_executor()
1910 .spawn(async move { fs.git_clone(&repo, &path).await })
1911 }
1912 GitStoreState::Remote {
1913 upstream_client,
1914 upstream_project_id,
1915 ..
1916 } => {
1917 if upstream_client.is_via_collab() {
1918 return Task::ready(Err(anyhow!(
1919 "Git Clone isn't supported for project guests"
1920 )));
1921 }
1922 let request = upstream_client.request(proto::GitClone {
1923 project_id: *upstream_project_id,
1924 abs_path: path.to_string_lossy().into_owned(),
1925 remote_repo: repo,
1926 });
1927
1928 cx.background_spawn(async move {
1929 let result = request.await?;
1930
1931 match result.success {
1932 true => Ok(()),
1933 false => Err(anyhow!("Git Clone failed")),
1934 }
1935 })
1936 }
1937 }
1938 }
1939
1940 async fn handle_update_repository(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::UpdateRepository>,
1943 mut cx: AsyncApp,
1944 ) -> Result<()> {
1945 this.update(&mut cx, |this, cx| {
1946 let path_style = this.worktree_store.read(cx).path_style();
1947 let mut update = envelope.payload;
1948
1949 let id = RepositoryId::from_proto(update.id);
1950 let client = this.upstream_client().context("no upstream client")?;
1951
1952 let original_repo_abs_path: Option<Arc<Path>> = update
1953 .original_repo_abs_path
1954 .as_deref()
1955 .map(|p| Path::new(p).into());
1956
1957 let mut repo_subscription = None;
1958 let repo = this.repositories.entry(id).or_insert_with(|| {
1959 let git_store = cx.weak_entity();
1960 let repo = cx.new(|cx| {
1961 Repository::remote(
1962 id,
1963 Path::new(&update.abs_path).into(),
1964 original_repo_abs_path.clone(),
1965 path_style,
1966 ProjectId(update.project_id),
1967 client,
1968 git_store,
1969 cx,
1970 )
1971 });
1972 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1973 cx.emit(GitStoreEvent::RepositoryAdded);
1974 repo
1975 });
1976 this._subscriptions.extend(repo_subscription);
1977
1978 repo.update(cx, {
1979 let update = update.clone();
1980 |repo, cx| repo.apply_remote_update(update, cx)
1981 })?;
1982
1983 this.active_repo_id.get_or_insert_with(|| {
1984 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1985 id
1986 });
1987
1988 if let Some((client, project_id)) = this.downstream_client() {
1989 update.project_id = project_id.to_proto();
1990 client.send(update).log_err();
1991 }
1992 Ok(())
1993 })
1994 }
1995
1996 async fn handle_remove_repository(
1997 this: Entity<Self>,
1998 envelope: TypedEnvelope<proto::RemoveRepository>,
1999 mut cx: AsyncApp,
2000 ) -> Result<()> {
2001 this.update(&mut cx, |this, cx| {
2002 let mut update = envelope.payload;
2003 let id = RepositoryId::from_proto(update.id);
2004 this.repositories.remove(&id);
2005 if let Some((client, project_id)) = this.downstream_client() {
2006 update.project_id = project_id.to_proto();
2007 client.send(update).log_err();
2008 }
2009 if this.active_repo_id == Some(id) {
2010 this.active_repo_id = None;
2011 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2012 }
2013 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2014 });
2015 Ok(())
2016 }
2017
2018 async fn handle_git_init(
2019 this: Entity<Self>,
2020 envelope: TypedEnvelope<proto::GitInit>,
2021 cx: AsyncApp,
2022 ) -> Result<proto::Ack> {
2023 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2024 let name = envelope.payload.fallback_branch_name;
2025 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2026 .await?;
2027
2028 Ok(proto::Ack {})
2029 }
2030
2031 async fn handle_git_clone(
2032 this: Entity<Self>,
2033 envelope: TypedEnvelope<proto::GitClone>,
2034 cx: AsyncApp,
2035 ) -> Result<proto::GitCloneResponse> {
2036 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2037 let repo_name = envelope.payload.remote_repo;
2038 let result = cx
2039 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2040 .await;
2041
2042 Ok(proto::GitCloneResponse {
2043 success: result.is_ok(),
2044 })
2045 }
2046
2047 async fn handle_fetch(
2048 this: Entity<Self>,
2049 envelope: TypedEnvelope<proto::Fetch>,
2050 mut cx: AsyncApp,
2051 ) -> Result<proto::RemoteMessageResponse> {
2052 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2053 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2054 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2055 let askpass_id = envelope.payload.askpass_id;
2056
2057 let askpass = make_remote_delegate(
2058 this,
2059 envelope.payload.project_id,
2060 repository_id,
2061 askpass_id,
2062 &mut cx,
2063 );
2064
2065 let remote_output = repository_handle
2066 .update(&mut cx, |repository_handle, cx| {
2067 repository_handle.fetch(fetch_options, askpass, cx)
2068 })
2069 .await??;
2070
2071 Ok(proto::RemoteMessageResponse {
2072 stdout: remote_output.stdout,
2073 stderr: remote_output.stderr,
2074 })
2075 }
2076
2077 async fn handle_push(
2078 this: Entity<Self>,
2079 envelope: TypedEnvelope<proto::Push>,
2080 mut cx: AsyncApp,
2081 ) -> Result<proto::RemoteMessageResponse> {
2082 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2083 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2084
2085 let askpass_id = envelope.payload.askpass_id;
2086 let askpass = make_remote_delegate(
2087 this,
2088 envelope.payload.project_id,
2089 repository_id,
2090 askpass_id,
2091 &mut cx,
2092 );
2093
2094 let options = envelope
2095 .payload
2096 .options
2097 .as_ref()
2098 .map(|_| match envelope.payload.options() {
2099 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2100 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2101 });
2102
2103 let branch_name = envelope.payload.branch_name.into();
2104 let remote_branch_name = envelope.payload.remote_branch_name.into();
2105 let remote_name = envelope.payload.remote_name.into();
2106
2107 let remote_output = repository_handle
2108 .update(&mut cx, |repository_handle, cx| {
2109 repository_handle.push(
2110 branch_name,
2111 remote_branch_name,
2112 remote_name,
2113 options,
2114 askpass,
2115 cx,
2116 )
2117 })
2118 .await??;
2119 Ok(proto::RemoteMessageResponse {
2120 stdout: remote_output.stdout,
2121 stderr: remote_output.stderr,
2122 })
2123 }
2124
2125 async fn handle_pull(
2126 this: Entity<Self>,
2127 envelope: TypedEnvelope<proto::Pull>,
2128 mut cx: AsyncApp,
2129 ) -> Result<proto::RemoteMessageResponse> {
2130 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2131 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2132 let askpass_id = envelope.payload.askpass_id;
2133 let askpass = make_remote_delegate(
2134 this,
2135 envelope.payload.project_id,
2136 repository_id,
2137 askpass_id,
2138 &mut cx,
2139 );
2140
2141 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2142 let remote_name = envelope.payload.remote_name.into();
2143 let rebase = envelope.payload.rebase;
2144
2145 let remote_message = repository_handle
2146 .update(&mut cx, |repository_handle, cx| {
2147 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2148 })
2149 .await??;
2150
2151 Ok(proto::RemoteMessageResponse {
2152 stdout: remote_message.stdout,
2153 stderr: remote_message.stderr,
2154 })
2155 }
2156
2157 async fn handle_stage(
2158 this: Entity<Self>,
2159 envelope: TypedEnvelope<proto::Stage>,
2160 mut cx: AsyncApp,
2161 ) -> Result<proto::Ack> {
2162 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2163 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2164
2165 let entries = envelope
2166 .payload
2167 .paths
2168 .into_iter()
2169 .map(|path| RepoPath::new(&path))
2170 .collect::<Result<Vec<_>>>()?;
2171
2172 repository_handle
2173 .update(&mut cx, |repository_handle, cx| {
2174 repository_handle.stage_entries(entries, cx)
2175 })
2176 .await?;
2177 Ok(proto::Ack {})
2178 }
2179
2180 async fn handle_unstage(
2181 this: Entity<Self>,
2182 envelope: TypedEnvelope<proto::Unstage>,
2183 mut cx: AsyncApp,
2184 ) -> Result<proto::Ack> {
2185 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2186 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2187
2188 let entries = envelope
2189 .payload
2190 .paths
2191 .into_iter()
2192 .map(|path| RepoPath::new(&path))
2193 .collect::<Result<Vec<_>>>()?;
2194
2195 repository_handle
2196 .update(&mut cx, |repository_handle, cx| {
2197 repository_handle.unstage_entries(entries, cx)
2198 })
2199 .await?;
2200
2201 Ok(proto::Ack {})
2202 }
2203
2204 async fn handle_stash(
2205 this: Entity<Self>,
2206 envelope: TypedEnvelope<proto::Stash>,
2207 mut cx: AsyncApp,
2208 ) -> Result<proto::Ack> {
2209 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2210 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2211
2212 let entries = envelope
2213 .payload
2214 .paths
2215 .into_iter()
2216 .map(|path| RepoPath::new(&path))
2217 .collect::<Result<Vec<_>>>()?;
2218
2219 repository_handle
2220 .update(&mut cx, |repository_handle, cx| {
2221 repository_handle.stash_entries(entries, cx)
2222 })
2223 .await?;
2224
2225 Ok(proto::Ack {})
2226 }
2227
2228 async fn handle_stash_pop(
2229 this: Entity<Self>,
2230 envelope: TypedEnvelope<proto::StashPop>,
2231 mut cx: AsyncApp,
2232 ) -> Result<proto::Ack> {
2233 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2234 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2235 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2236
2237 repository_handle
2238 .update(&mut cx, |repository_handle, cx| {
2239 repository_handle.stash_pop(stash_index, cx)
2240 })
2241 .await?;
2242
2243 Ok(proto::Ack {})
2244 }
2245
2246 async fn handle_stash_apply(
2247 this: Entity<Self>,
2248 envelope: TypedEnvelope<proto::StashApply>,
2249 mut cx: AsyncApp,
2250 ) -> Result<proto::Ack> {
2251 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2252 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2253 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2254
2255 repository_handle
2256 .update(&mut cx, |repository_handle, cx| {
2257 repository_handle.stash_apply(stash_index, cx)
2258 })
2259 .await?;
2260
2261 Ok(proto::Ack {})
2262 }
2263
2264 async fn handle_stash_drop(
2265 this: Entity<Self>,
2266 envelope: TypedEnvelope<proto::StashDrop>,
2267 mut cx: AsyncApp,
2268 ) -> Result<proto::Ack> {
2269 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2270 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2271 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2272
2273 repository_handle
2274 .update(&mut cx, |repository_handle, cx| {
2275 repository_handle.stash_drop(stash_index, cx)
2276 })
2277 .await??;
2278
2279 Ok(proto::Ack {})
2280 }
2281
2282 async fn handle_set_index_text(
2283 this: Entity<Self>,
2284 envelope: TypedEnvelope<proto::SetIndexText>,
2285 mut cx: AsyncApp,
2286 ) -> Result<proto::Ack> {
2287 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2288 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2289 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2290
2291 repository_handle
2292 .update(&mut cx, |repository_handle, cx| {
2293 repository_handle.spawn_set_index_text_job(
2294 repo_path,
2295 envelope.payload.text,
2296 None,
2297 cx,
2298 )
2299 })
2300 .await??;
2301 Ok(proto::Ack {})
2302 }
2303
2304 async fn handle_run_hook(
2305 this: Entity<Self>,
2306 envelope: TypedEnvelope<proto::RunGitHook>,
2307 mut cx: AsyncApp,
2308 ) -> Result<proto::Ack> {
2309 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2310 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2311 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2312 repository_handle
2313 .update(&mut cx, |repository_handle, cx| {
2314 repository_handle.run_hook(hook, cx)
2315 })
2316 .await??;
2317 Ok(proto::Ack {})
2318 }
2319
2320 async fn handle_commit(
2321 this: Entity<Self>,
2322 envelope: TypedEnvelope<proto::Commit>,
2323 mut cx: AsyncApp,
2324 ) -> Result<proto::Ack> {
2325 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2326 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2327 let askpass_id = envelope.payload.askpass_id;
2328
2329 let askpass = make_remote_delegate(
2330 this,
2331 envelope.payload.project_id,
2332 repository_id,
2333 askpass_id,
2334 &mut cx,
2335 );
2336
2337 let message = SharedString::from(envelope.payload.message);
2338 let name = envelope.payload.name.map(SharedString::from);
2339 let email = envelope.payload.email.map(SharedString::from);
2340 let options = envelope.payload.options.unwrap_or_default();
2341
2342 repository_handle
2343 .update(&mut cx, |repository_handle, cx| {
2344 repository_handle.commit(
2345 message,
2346 name.zip(email),
2347 CommitOptions {
2348 amend: options.amend,
2349 signoff: options.signoff,
2350 allow_empty: options.allow_empty,
2351 },
2352 askpass,
2353 cx,
2354 )
2355 })
2356 .await??;
2357 Ok(proto::Ack {})
2358 }
2359
2360 async fn handle_get_remotes(
2361 this: Entity<Self>,
2362 envelope: TypedEnvelope<proto::GetRemotes>,
2363 mut cx: AsyncApp,
2364 ) -> Result<proto::GetRemotesResponse> {
2365 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2366 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2367
2368 let branch_name = envelope.payload.branch_name;
2369 let is_push = envelope.payload.is_push;
2370
2371 let remotes = repository_handle
2372 .update(&mut cx, |repository_handle, _| {
2373 repository_handle.get_remotes(branch_name, is_push)
2374 })
2375 .await??;
2376
2377 Ok(proto::GetRemotesResponse {
2378 remotes: remotes
2379 .into_iter()
2380 .map(|remotes| proto::get_remotes_response::Remote {
2381 name: remotes.name.to_string(),
2382 })
2383 .collect::<Vec<_>>(),
2384 })
2385 }
2386
2387 async fn handle_get_worktrees(
2388 this: Entity<Self>,
2389 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2390 mut cx: AsyncApp,
2391 ) -> Result<proto::GitWorktreesResponse> {
2392 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2393 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2394
2395 let worktrees = repository_handle
2396 .update(&mut cx, |repository_handle, _| {
2397 repository_handle.worktrees()
2398 })
2399 .await??;
2400
2401 Ok(proto::GitWorktreesResponse {
2402 worktrees: worktrees
2403 .into_iter()
2404 .map(|worktree| worktree_to_proto(&worktree))
2405 .collect::<Vec<_>>(),
2406 })
2407 }
2408
2409 async fn handle_create_worktree(
2410 this: Entity<Self>,
2411 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2412 mut cx: AsyncApp,
2413 ) -> Result<proto::Ack> {
2414 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2415 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2416 let directory = PathBuf::from(envelope.payload.directory);
2417 let start_point = match envelope.payload.name {
2418 Some(name) => CreateWorktreeStartPoint::Branched { name },
2419 None => CreateWorktreeStartPoint::Detached,
2420 };
2421 let commit = envelope.payload.commit;
2422
2423 repository_handle
2424 .update(&mut cx, |repository_handle, _| {
2425 repository_handle.create_worktree_with_start_point(start_point, directory, commit)
2426 })
2427 .await??;
2428
2429 Ok(proto::Ack {})
2430 }
2431
2432 async fn handle_remove_worktree(
2433 this: Entity<Self>,
2434 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2435 mut cx: AsyncApp,
2436 ) -> Result<proto::Ack> {
2437 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2438 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2439 let path = PathBuf::from(envelope.payload.path);
2440 let force = envelope.payload.force;
2441
2442 repository_handle
2443 .update(&mut cx, |repository_handle, _| {
2444 repository_handle.remove_worktree(path, force)
2445 })
2446 .await??;
2447
2448 Ok(proto::Ack {})
2449 }
2450
2451 async fn handle_rename_worktree(
2452 this: Entity<Self>,
2453 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2454 mut cx: AsyncApp,
2455 ) -> Result<proto::Ack> {
2456 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2457 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2458 let old_path = PathBuf::from(envelope.payload.old_path);
2459 let new_path = PathBuf::from(envelope.payload.new_path);
2460
2461 repository_handle
2462 .update(&mut cx, |repository_handle, _| {
2463 repository_handle.rename_worktree(old_path, new_path)
2464 })
2465 .await??;
2466
2467 Ok(proto::Ack {})
2468 }
2469
2470 async fn handle_get_head_sha(
2471 this: Entity<Self>,
2472 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2473 mut cx: AsyncApp,
2474 ) -> Result<proto::GitGetHeadShaResponse> {
2475 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2476 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2477
2478 let head_sha = repository_handle
2479 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2480 .await??;
2481
2482 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2483 }
2484
2485 async fn handle_get_branches(
2486 this: Entity<Self>,
2487 envelope: TypedEnvelope<proto::GitGetBranches>,
2488 mut cx: AsyncApp,
2489 ) -> Result<proto::GitBranchesResponse> {
2490 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2491 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2492
2493 let branches = repository_handle
2494 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2495 .await??;
2496
2497 Ok(proto::GitBranchesResponse {
2498 branches: branches
2499 .into_iter()
2500 .map(|branch| branch_to_proto(&branch))
2501 .collect::<Vec<_>>(),
2502 })
2503 }
2504 async fn handle_get_default_branch(
2505 this: Entity<Self>,
2506 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2507 mut cx: AsyncApp,
2508 ) -> Result<proto::GetDefaultBranchResponse> {
2509 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2510 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2511
2512 let branch = repository_handle
2513 .update(&mut cx, |repository_handle, _| {
2514 repository_handle.default_branch(false)
2515 })
2516 .await??
2517 .map(Into::into);
2518
2519 Ok(proto::GetDefaultBranchResponse { branch })
2520 }
2521 async fn handle_create_branch(
2522 this: Entity<Self>,
2523 envelope: TypedEnvelope<proto::GitCreateBranch>,
2524 mut cx: AsyncApp,
2525 ) -> Result<proto::Ack> {
2526 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2527 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2528 let branch_name = envelope.payload.branch_name;
2529
2530 repository_handle
2531 .update(&mut cx, |repository_handle, _| {
2532 repository_handle.create_branch(branch_name, None)
2533 })
2534 .await??;
2535
2536 Ok(proto::Ack {})
2537 }
2538
2539 async fn handle_change_branch(
2540 this: Entity<Self>,
2541 envelope: TypedEnvelope<proto::GitChangeBranch>,
2542 mut cx: AsyncApp,
2543 ) -> Result<proto::Ack> {
2544 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2545 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2546 let branch_name = envelope.payload.branch_name;
2547
2548 repository_handle
2549 .update(&mut cx, |repository_handle, _| {
2550 repository_handle.change_branch(branch_name)
2551 })
2552 .await??;
2553
2554 Ok(proto::Ack {})
2555 }
2556
2557 async fn handle_rename_branch(
2558 this: Entity<Self>,
2559 envelope: TypedEnvelope<proto::GitRenameBranch>,
2560 mut cx: AsyncApp,
2561 ) -> Result<proto::Ack> {
2562 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2563 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2564 let branch = envelope.payload.branch;
2565 let new_name = envelope.payload.new_name;
2566
2567 repository_handle
2568 .update(&mut cx, |repository_handle, _| {
2569 repository_handle.rename_branch(branch, new_name)
2570 })
2571 .await??;
2572
2573 Ok(proto::Ack {})
2574 }
2575
2576 async fn handle_create_remote(
2577 this: Entity<Self>,
2578 envelope: TypedEnvelope<proto::GitCreateRemote>,
2579 mut cx: AsyncApp,
2580 ) -> Result<proto::Ack> {
2581 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2582 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2583 let remote_name = envelope.payload.remote_name;
2584 let remote_url = envelope.payload.remote_url;
2585
2586 repository_handle
2587 .update(&mut cx, |repository_handle, _| {
2588 repository_handle.create_remote(remote_name, remote_url)
2589 })
2590 .await??;
2591
2592 Ok(proto::Ack {})
2593 }
2594
2595 async fn handle_delete_branch(
2596 this: Entity<Self>,
2597 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2598 mut cx: AsyncApp,
2599 ) -> Result<proto::Ack> {
2600 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2601 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2602 let is_remote = envelope.payload.is_remote;
2603 let branch_name = envelope.payload.branch_name;
2604
2605 repository_handle
2606 .update(&mut cx, |repository_handle, _| {
2607 repository_handle.delete_branch(is_remote, branch_name)
2608 })
2609 .await??;
2610
2611 Ok(proto::Ack {})
2612 }
2613
2614 async fn handle_remove_remote(
2615 this: Entity<Self>,
2616 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2617 mut cx: AsyncApp,
2618 ) -> Result<proto::Ack> {
2619 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2620 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2621 let remote_name = envelope.payload.remote_name;
2622
2623 repository_handle
2624 .update(&mut cx, |repository_handle, _| {
2625 repository_handle.remove_remote(remote_name)
2626 })
2627 .await??;
2628
2629 Ok(proto::Ack {})
2630 }
2631
2632 async fn handle_show(
2633 this: Entity<Self>,
2634 envelope: TypedEnvelope<proto::GitShow>,
2635 mut cx: AsyncApp,
2636 ) -> Result<proto::GitCommitDetails> {
2637 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2638 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2639
2640 let commit = repository_handle
2641 .update(&mut cx, |repository_handle, _| {
2642 repository_handle.show(envelope.payload.commit)
2643 })
2644 .await??;
2645 Ok(proto::GitCommitDetails {
2646 sha: commit.sha.into(),
2647 message: commit.message.into(),
2648 commit_timestamp: commit.commit_timestamp,
2649 author_email: commit.author_email.into(),
2650 author_name: commit.author_name.into(),
2651 })
2652 }
2653
2654 async fn handle_create_checkpoint(
2655 this: Entity<Self>,
2656 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2657 mut cx: AsyncApp,
2658 ) -> Result<proto::GitCreateCheckpointResponse> {
2659 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2660 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2661
2662 let checkpoint = repository_handle
2663 .update(&mut cx, |repository, _| repository.checkpoint())
2664 .await??;
2665
2666 Ok(proto::GitCreateCheckpointResponse {
2667 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2668 })
2669 }
2670
2671 async fn handle_restore_checkpoint(
2672 this: Entity<Self>,
2673 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2674 mut cx: AsyncApp,
2675 ) -> Result<proto::Ack> {
2676 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2677 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2678
2679 let checkpoint = GitRepositoryCheckpoint {
2680 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2681 };
2682
2683 repository_handle
2684 .update(&mut cx, |repository, _| {
2685 repository.restore_checkpoint(checkpoint)
2686 })
2687 .await??;
2688
2689 Ok(proto::Ack {})
2690 }
2691
2692 async fn handle_compare_checkpoints(
2693 this: Entity<Self>,
2694 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2695 mut cx: AsyncApp,
2696 ) -> Result<proto::GitCompareCheckpointsResponse> {
2697 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2698 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2699
2700 let left = GitRepositoryCheckpoint {
2701 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2702 };
2703 let right = GitRepositoryCheckpoint {
2704 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2705 };
2706
2707 let equal = repository_handle
2708 .update(&mut cx, |repository, _| {
2709 repository.compare_checkpoints(left, right)
2710 })
2711 .await??;
2712
2713 Ok(proto::GitCompareCheckpointsResponse { equal })
2714 }
2715
2716 async fn handle_diff_checkpoints(
2717 this: Entity<Self>,
2718 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2719 mut cx: AsyncApp,
2720 ) -> Result<proto::GitDiffCheckpointsResponse> {
2721 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2722 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2723
2724 let base = GitRepositoryCheckpoint {
2725 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2726 };
2727 let target = GitRepositoryCheckpoint {
2728 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2729 };
2730
2731 let diff = repository_handle
2732 .update(&mut cx, |repository, _| {
2733 repository.diff_checkpoints(base, target)
2734 })
2735 .await??;
2736
2737 Ok(proto::GitDiffCheckpointsResponse { diff })
2738 }
2739
2740 async fn handle_load_commit_diff(
2741 this: Entity<Self>,
2742 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2743 mut cx: AsyncApp,
2744 ) -> Result<proto::LoadCommitDiffResponse> {
2745 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2746 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2747
2748 let commit_diff = repository_handle
2749 .update(&mut cx, |repository_handle, _| {
2750 repository_handle.load_commit_diff(envelope.payload.commit)
2751 })
2752 .await??;
2753 Ok(proto::LoadCommitDiffResponse {
2754 files: commit_diff
2755 .files
2756 .into_iter()
2757 .map(|file| proto::CommitFile {
2758 path: file.path.to_proto(),
2759 old_text: file.old_text,
2760 new_text: file.new_text,
2761 is_binary: file.is_binary,
2762 })
2763 .collect(),
2764 })
2765 }
2766
2767 async fn handle_file_history(
2768 this: Entity<Self>,
2769 envelope: TypedEnvelope<proto::GitFileHistory>,
2770 mut cx: AsyncApp,
2771 ) -> Result<proto::GitFileHistoryResponse> {
2772 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2773 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2774 let path = RepoPath::from_proto(&envelope.payload.path)?;
2775 let skip = envelope.payload.skip as usize;
2776 let limit = envelope.payload.limit.map(|l| l as usize);
2777
2778 let file_history = repository_handle
2779 .update(&mut cx, |repository_handle, _| {
2780 repository_handle.file_history_paginated(path, skip, limit)
2781 })
2782 .await??;
2783
2784 Ok(proto::GitFileHistoryResponse {
2785 entries: file_history
2786 .entries
2787 .into_iter()
2788 .map(|entry| proto::FileHistoryEntry {
2789 sha: entry.sha.to_string(),
2790 subject: entry.subject.to_string(),
2791 message: entry.message.to_string(),
2792 commit_timestamp: entry.commit_timestamp,
2793 author_name: entry.author_name.to_string(),
2794 author_email: entry.author_email.to_string(),
2795 })
2796 .collect(),
2797 path: file_history.path.to_proto(),
2798 })
2799 }
2800
2801 async fn handle_reset(
2802 this: Entity<Self>,
2803 envelope: TypedEnvelope<proto::GitReset>,
2804 mut cx: AsyncApp,
2805 ) -> Result<proto::Ack> {
2806 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2807 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2808
2809 let mode = match envelope.payload.mode() {
2810 git_reset::ResetMode::Soft => ResetMode::Soft,
2811 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2812 };
2813
2814 repository_handle
2815 .update(&mut cx, |repository_handle, cx| {
2816 repository_handle.reset(envelope.payload.commit, mode, cx)
2817 })
2818 .await??;
2819 Ok(proto::Ack {})
2820 }
2821
2822 async fn handle_checkout_files(
2823 this: Entity<Self>,
2824 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2825 mut cx: AsyncApp,
2826 ) -> Result<proto::Ack> {
2827 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2828 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2829 let paths = envelope
2830 .payload
2831 .paths
2832 .iter()
2833 .map(|s| RepoPath::from_proto(s))
2834 .collect::<Result<Vec<_>>>()?;
2835
2836 repository_handle
2837 .update(&mut cx, |repository_handle, cx| {
2838 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2839 })
2840 .await?;
2841 Ok(proto::Ack {})
2842 }
2843
2844 async fn handle_open_commit_message_buffer(
2845 this: Entity<Self>,
2846 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2847 mut cx: AsyncApp,
2848 ) -> Result<proto::OpenBufferResponse> {
2849 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2850 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2851 let buffer = repository
2852 .update(&mut cx, |repository, cx| {
2853 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2854 })
2855 .await?;
2856
2857 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2858 this.update(&mut cx, |this, cx| {
2859 this.buffer_store.update(cx, |buffer_store, cx| {
2860 buffer_store
2861 .create_buffer_for_peer(
2862 &buffer,
2863 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2864 cx,
2865 )
2866 .detach_and_log_err(cx);
2867 })
2868 });
2869
2870 Ok(proto::OpenBufferResponse {
2871 buffer_id: buffer_id.to_proto(),
2872 })
2873 }
2874
2875 async fn handle_askpass(
2876 this: Entity<Self>,
2877 envelope: TypedEnvelope<proto::AskPassRequest>,
2878 mut cx: AsyncApp,
2879 ) -> Result<proto::AskPassResponse> {
2880 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2881 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2882
2883 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2884 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2885 debug_panic!("no askpass found");
2886 anyhow::bail!("no askpass found");
2887 };
2888
2889 let response = askpass
2890 .ask_password(envelope.payload.prompt)
2891 .await
2892 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2893
2894 delegates
2895 .lock()
2896 .insert(envelope.payload.askpass_id, askpass);
2897
2898 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2899 Ok(proto::AskPassResponse {
2900 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2901 })
2902 }
2903
2904 async fn handle_check_for_pushed_commits(
2905 this: Entity<Self>,
2906 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2907 mut cx: AsyncApp,
2908 ) -> Result<proto::CheckForPushedCommitsResponse> {
2909 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2910 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2911
2912 let branches = repository_handle
2913 .update(&mut cx, |repository_handle, _| {
2914 repository_handle.check_for_pushed_commits()
2915 })
2916 .await??;
2917 Ok(proto::CheckForPushedCommitsResponse {
2918 pushed_to: branches
2919 .into_iter()
2920 .map(|commit| commit.to_string())
2921 .collect(),
2922 })
2923 }
2924
2925 async fn handle_git_diff(
2926 this: Entity<Self>,
2927 envelope: TypedEnvelope<proto::GitDiff>,
2928 mut cx: AsyncApp,
2929 ) -> Result<proto::GitDiffResponse> {
2930 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2931 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2932 let diff_type = match envelope.payload.diff_type() {
2933 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2934 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2935 proto::git_diff::DiffType::MergeBase => {
2936 let base_ref = envelope
2937 .payload
2938 .merge_base_ref
2939 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2940 DiffType::MergeBase {
2941 base_ref: base_ref.into(),
2942 }
2943 }
2944 };
2945
2946 let mut diff = repository_handle
2947 .update(&mut cx, |repository_handle, cx| {
2948 repository_handle.diff(diff_type, cx)
2949 })
2950 .await??;
2951 const ONE_MB: usize = 1_000_000;
2952 if diff.len() > ONE_MB {
2953 diff = diff.chars().take(ONE_MB).collect()
2954 }
2955
2956 Ok(proto::GitDiffResponse { diff })
2957 }
2958
2959 async fn handle_tree_diff(
2960 this: Entity<Self>,
2961 request: TypedEnvelope<proto::GetTreeDiff>,
2962 mut cx: AsyncApp,
2963 ) -> Result<proto::GetTreeDiffResponse> {
2964 let repository_id = RepositoryId(request.payload.repository_id);
2965 let diff_type = if request.payload.is_merge {
2966 DiffTreeType::MergeBase {
2967 base: request.payload.base.into(),
2968 head: request.payload.head.into(),
2969 }
2970 } else {
2971 DiffTreeType::Since {
2972 base: request.payload.base.into(),
2973 head: request.payload.head.into(),
2974 }
2975 };
2976
2977 let diff = this
2978 .update(&mut cx, |this, cx| {
2979 let repository = this.repositories().get(&repository_id)?;
2980 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2981 })
2982 .context("missing repository")?
2983 .await??;
2984
2985 Ok(proto::GetTreeDiffResponse {
2986 entries: diff
2987 .entries
2988 .into_iter()
2989 .map(|(path, status)| proto::TreeDiffStatus {
2990 path: path.as_ref().to_proto(),
2991 status: match status {
2992 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2993 TreeDiffStatus::Modified { .. } => {
2994 proto::tree_diff_status::Status::Modified.into()
2995 }
2996 TreeDiffStatus::Deleted { .. } => {
2997 proto::tree_diff_status::Status::Deleted.into()
2998 }
2999 },
3000 oid: match status {
3001 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3002 Some(old.to_string())
3003 }
3004 TreeDiffStatus::Added => None,
3005 },
3006 })
3007 .collect(),
3008 })
3009 }
3010
3011 async fn handle_get_blob_content(
3012 this: Entity<Self>,
3013 request: TypedEnvelope<proto::GetBlobContent>,
3014 mut cx: AsyncApp,
3015 ) -> Result<proto::GetBlobContentResponse> {
3016 let oid = git::Oid::from_str(&request.payload.oid)?;
3017 let repository_id = RepositoryId(request.payload.repository_id);
3018 let content = this
3019 .update(&mut cx, |this, cx| {
3020 let repository = this.repositories().get(&repository_id)?;
3021 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3022 })
3023 .context("missing repository")?
3024 .await?;
3025 Ok(proto::GetBlobContentResponse { content })
3026 }
3027
3028 async fn handle_open_unstaged_diff(
3029 this: Entity<Self>,
3030 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3031 mut cx: AsyncApp,
3032 ) -> Result<proto::OpenUnstagedDiffResponse> {
3033 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3034 let diff = this
3035 .update(&mut cx, |this, cx| {
3036 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3037 Some(this.open_unstaged_diff(buffer, cx))
3038 })
3039 .context("missing buffer")?
3040 .await?;
3041 this.update(&mut cx, |this, _| {
3042 let shared_diffs = this
3043 .shared_diffs
3044 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3045 .or_default();
3046 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3047 });
3048 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3049 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3050 }
3051
3052 async fn handle_open_uncommitted_diff(
3053 this: Entity<Self>,
3054 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3055 mut cx: AsyncApp,
3056 ) -> Result<proto::OpenUncommittedDiffResponse> {
3057 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3058 let diff = this
3059 .update(&mut cx, |this, cx| {
3060 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3061 Some(this.open_uncommitted_diff(buffer, cx))
3062 })
3063 .context("missing buffer")?
3064 .await?;
3065 this.update(&mut cx, |this, _| {
3066 let shared_diffs = this
3067 .shared_diffs
3068 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3069 .or_default();
3070 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3071 });
3072 Ok(diff.read_with(&cx, |diff, cx| {
3073 use proto::open_uncommitted_diff_response::Mode;
3074
3075 let unstaged_diff = diff.secondary_diff();
3076 let index_snapshot = unstaged_diff.and_then(|diff| {
3077 let diff = diff.read(cx);
3078 diff.base_text_exists().then(|| diff.base_text(cx))
3079 });
3080
3081 let mode;
3082 let staged_text;
3083 let committed_text;
3084 if diff.base_text_exists() {
3085 let committed_snapshot = diff.base_text(cx);
3086 committed_text = Some(committed_snapshot.text());
3087 if let Some(index_text) = index_snapshot {
3088 if index_text.remote_id() == committed_snapshot.remote_id() {
3089 mode = Mode::IndexMatchesHead;
3090 staged_text = None;
3091 } else {
3092 mode = Mode::IndexAndHead;
3093 staged_text = Some(index_text.text());
3094 }
3095 } else {
3096 mode = Mode::IndexAndHead;
3097 staged_text = None;
3098 }
3099 } else {
3100 mode = Mode::IndexAndHead;
3101 committed_text = None;
3102 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3103 }
3104
3105 proto::OpenUncommittedDiffResponse {
3106 committed_text,
3107 staged_text,
3108 mode: mode.into(),
3109 }
3110 }))
3111 }
3112
3113 async fn handle_update_diff_bases(
3114 this: Entity<Self>,
3115 request: TypedEnvelope<proto::UpdateDiffBases>,
3116 mut cx: AsyncApp,
3117 ) -> Result<()> {
3118 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3119 this.update(&mut cx, |this, cx| {
3120 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3121 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3122 {
3123 let buffer = buffer.read(cx).text_snapshot();
3124 diff_state.update(cx, |diff_state, cx| {
3125 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3126 })
3127 }
3128 });
3129 Ok(())
3130 }
3131
3132 async fn handle_blame_buffer(
3133 this: Entity<Self>,
3134 envelope: TypedEnvelope<proto::BlameBuffer>,
3135 mut cx: AsyncApp,
3136 ) -> Result<proto::BlameBufferResponse> {
3137 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3138 let version = deserialize_version(&envelope.payload.version);
3139 let buffer = this.read_with(&cx, |this, cx| {
3140 this.buffer_store.read(cx).get_existing(buffer_id)
3141 })?;
3142 buffer
3143 .update(&mut cx, |buffer, _| {
3144 buffer.wait_for_version(version.clone())
3145 })
3146 .await?;
3147 let blame = this
3148 .update(&mut cx, |this, cx| {
3149 this.blame_buffer(&buffer, Some(version), cx)
3150 })
3151 .await?;
3152 Ok(serialize_blame_buffer_response(blame))
3153 }
3154
3155 async fn handle_get_permalink_to_line(
3156 this: Entity<Self>,
3157 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3158 mut cx: AsyncApp,
3159 ) -> Result<proto::GetPermalinkToLineResponse> {
3160 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3161 // let version = deserialize_version(&envelope.payload.version);
3162 let selection = {
3163 let proto_selection = envelope
3164 .payload
3165 .selection
3166 .context("no selection to get permalink for defined")?;
3167 proto_selection.start as u32..proto_selection.end as u32
3168 };
3169 let buffer = this.read_with(&cx, |this, cx| {
3170 this.buffer_store.read(cx).get_existing(buffer_id)
3171 })?;
3172 let permalink = this
3173 .update(&mut cx, |this, cx| {
3174 this.get_permalink_to_line(&buffer, selection, cx)
3175 })
3176 .await?;
3177 Ok(proto::GetPermalinkToLineResponse {
3178 permalink: permalink.to_string(),
3179 })
3180 }
3181
3182 fn repository_for_request(
3183 this: &Entity<Self>,
3184 id: RepositoryId,
3185 cx: &mut AsyncApp,
3186 ) -> Result<Entity<Repository>> {
3187 this.read_with(cx, |this, _| {
3188 this.repositories
3189 .get(&id)
3190 .context("missing repository handle")
3191 .cloned()
3192 })
3193 }
3194
3195 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3196 self.repositories
3197 .iter()
3198 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3199 .collect()
3200 }
3201
3202 fn process_updated_entries(
3203 &self,
3204 worktree: &Entity<Worktree>,
3205 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3206 cx: &mut App,
3207 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3208 let path_style = worktree.read(cx).path_style();
3209 let mut repo_paths = self
3210 .repositories
3211 .values()
3212 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3213 .collect::<Vec<_>>();
3214 let mut entries: Vec<_> = updated_entries
3215 .iter()
3216 .map(|(path, _, _)| path.clone())
3217 .collect();
3218 entries.sort();
3219 let worktree = worktree.read(cx);
3220
3221 let entries = entries
3222 .into_iter()
3223 .map(|path| worktree.absolutize(&path))
3224 .collect::<Arc<[_]>>();
3225
3226 let executor = cx.background_executor().clone();
3227 cx.background_executor().spawn(async move {
3228 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3229 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3230 let mut tasks = FuturesOrdered::new();
3231 for (repo_path, repo) in repo_paths.into_iter().rev() {
3232 let entries = entries.clone();
3233 let task = executor.spawn(async move {
3234 // Find all repository paths that belong to this repo
3235 let mut ix = entries.partition_point(|path| path < &*repo_path);
3236 if ix == entries.len() {
3237 return None;
3238 };
3239
3240 let mut paths = Vec::new();
3241 // All paths prefixed by a given repo will constitute a continuous range.
3242 while let Some(path) = entries.get(ix)
3243 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3244 &repo_path, path, path_style,
3245 )
3246 {
3247 paths.push((repo_path, ix));
3248 ix += 1;
3249 }
3250 if paths.is_empty() {
3251 None
3252 } else {
3253 Some((repo, paths))
3254 }
3255 });
3256 tasks.push_back(task);
3257 }
3258
3259 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3260 let mut path_was_used = vec![false; entries.len()];
3261 let tasks = tasks.collect::<Vec<_>>().await;
3262 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3263 // We always want to assign a path to it's innermost repository.
3264 for t in tasks {
3265 let Some((repo, paths)) = t else {
3266 continue;
3267 };
3268 let entry = paths_by_git_repo.entry(repo).or_default();
3269 for (repo_path, ix) in paths {
3270 if path_was_used[ix] {
3271 continue;
3272 }
3273 path_was_used[ix] = true;
3274 entry.push(repo_path);
3275 }
3276 }
3277
3278 paths_by_git_repo
3279 })
3280 }
3281}
3282
3283impl BufferGitState {
3284 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3285 Self {
3286 unstaged_diff: Default::default(),
3287 uncommitted_diff: Default::default(),
3288 oid_diffs: Default::default(),
3289 recalculate_diff_task: Default::default(),
3290 language: Default::default(),
3291 language_registry: Default::default(),
3292 recalculating_tx: postage::watch::channel_with(false).0,
3293 hunk_staging_operation_count: 0,
3294 hunk_staging_operation_count_as_of_write: 0,
3295 head_text: Default::default(),
3296 index_text: Default::default(),
3297 oid_texts: Default::default(),
3298 head_changed: Default::default(),
3299 index_changed: Default::default(),
3300 language_changed: Default::default(),
3301 conflict_updated_futures: Default::default(),
3302 conflict_set: Default::default(),
3303 reparse_conflict_markers_task: Default::default(),
3304 }
3305 }
3306
3307 #[ztracing::instrument(skip_all)]
3308 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3309 self.language = buffer.read(cx).language().cloned();
3310 self.language_changed = true;
3311 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3312 }
3313
3314 fn reparse_conflict_markers(
3315 &mut self,
3316 buffer: text::BufferSnapshot,
3317 cx: &mut Context<Self>,
3318 ) -> oneshot::Receiver<()> {
3319 let (tx, rx) = oneshot::channel();
3320
3321 let Some(conflict_set) = self
3322 .conflict_set
3323 .as_ref()
3324 .and_then(|conflict_set| conflict_set.upgrade())
3325 else {
3326 return rx;
3327 };
3328
3329 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3330 if conflict_set.has_conflict {
3331 Some(conflict_set.snapshot())
3332 } else {
3333 None
3334 }
3335 });
3336
3337 if let Some(old_snapshot) = old_snapshot {
3338 self.conflict_updated_futures.push(tx);
3339 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3340 let (snapshot, changed_range) = cx
3341 .background_spawn(async move {
3342 let new_snapshot = ConflictSet::parse(&buffer);
3343 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3344 (new_snapshot, changed_range)
3345 })
3346 .await;
3347 this.update(cx, |this, cx| {
3348 if let Some(conflict_set) = &this.conflict_set {
3349 conflict_set
3350 .update(cx, |conflict_set, cx| {
3351 conflict_set.set_snapshot(snapshot, changed_range, cx);
3352 })
3353 .ok();
3354 }
3355 let futures = std::mem::take(&mut this.conflict_updated_futures);
3356 for tx in futures {
3357 tx.send(()).ok();
3358 }
3359 })
3360 }))
3361 }
3362
3363 rx
3364 }
3365
3366 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3367 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3368 }
3369
3370 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3371 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3372 }
3373
3374 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3375 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3376 }
3377
3378 fn handle_base_texts_updated(
3379 &mut self,
3380 buffer: text::BufferSnapshot,
3381 message: proto::UpdateDiffBases,
3382 cx: &mut Context<Self>,
3383 ) {
3384 use proto::update_diff_bases::Mode;
3385
3386 let Some(mode) = Mode::from_i32(message.mode) else {
3387 return;
3388 };
3389
3390 let diff_bases_change = match mode {
3391 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3392 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3393 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3394 Mode::IndexAndHead => DiffBasesChange::SetEach {
3395 index: message.staged_text,
3396 head: message.committed_text,
3397 },
3398 };
3399
3400 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3401 }
3402
3403 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3404 if *self.recalculating_tx.borrow() {
3405 let mut rx = self.recalculating_tx.subscribe();
3406 Some(async move {
3407 loop {
3408 let is_recalculating = rx.recv().await;
3409 if is_recalculating != Some(true) {
3410 break;
3411 }
3412 }
3413 })
3414 } else {
3415 None
3416 }
3417 }
3418
3419 fn diff_bases_changed(
3420 &mut self,
3421 buffer: text::BufferSnapshot,
3422 diff_bases_change: Option<DiffBasesChange>,
3423 cx: &mut Context<Self>,
3424 ) {
3425 match diff_bases_change {
3426 Some(DiffBasesChange::SetIndex(index)) => {
3427 self.index_text = index.map(|mut index| {
3428 text::LineEnding::normalize(&mut index);
3429 Arc::from(index.as_str())
3430 });
3431 self.index_changed = true;
3432 }
3433 Some(DiffBasesChange::SetHead(head)) => {
3434 self.head_text = head.map(|mut head| {
3435 text::LineEnding::normalize(&mut head);
3436 Arc::from(head.as_str())
3437 });
3438 self.head_changed = true;
3439 }
3440 Some(DiffBasesChange::SetBoth(text)) => {
3441 let text = text.map(|mut text| {
3442 text::LineEnding::normalize(&mut text);
3443 Arc::from(text.as_str())
3444 });
3445 self.head_text = text.clone();
3446 self.index_text = text;
3447 self.head_changed = true;
3448 self.index_changed = true;
3449 }
3450 Some(DiffBasesChange::SetEach { index, head }) => {
3451 self.index_text = index.map(|mut index| {
3452 text::LineEnding::normalize(&mut index);
3453 Arc::from(index.as_str())
3454 });
3455 self.index_changed = true;
3456 self.head_text = head.map(|mut head| {
3457 text::LineEnding::normalize(&mut head);
3458 Arc::from(head.as_str())
3459 });
3460 self.head_changed = true;
3461 }
3462 None => {}
3463 }
3464
3465 self.recalculate_diffs(buffer, cx)
3466 }
3467
3468 #[ztracing::instrument(skip_all)]
3469 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3470 *self.recalculating_tx.borrow_mut() = true;
3471
3472 let language = self.language.clone();
3473 let language_registry = self.language_registry.clone();
3474 let unstaged_diff = self.unstaged_diff();
3475 let uncommitted_diff = self.uncommitted_diff();
3476 let head = self.head_text.clone();
3477 let index = self.index_text.clone();
3478 let index_changed = self.index_changed;
3479 let head_changed = self.head_changed;
3480 let language_changed = self.language_changed;
3481 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3482 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3483 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3484 (None, None) => true,
3485 _ => false,
3486 };
3487
3488 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3489 .oid_diffs
3490 .iter()
3491 .filter_map(|(oid, weak)| {
3492 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3493 weak.upgrade().map(|diff| (*oid, diff, base_text))
3494 })
3495 .collect();
3496
3497 self.oid_diffs.retain(|oid, weak| {
3498 let alive = weak.upgrade().is_some();
3499 if !alive {
3500 if let Some(oid) = oid {
3501 self.oid_texts.remove(oid);
3502 }
3503 }
3504 alive
3505 });
3506 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3507 log::debug!(
3508 "start recalculating diffs for buffer {}",
3509 buffer.remote_id()
3510 );
3511
3512 let mut new_unstaged_diff = None;
3513 if let Some(unstaged_diff) = &unstaged_diff {
3514 new_unstaged_diff = Some(
3515 cx.update(|cx| {
3516 unstaged_diff.read(cx).update_diff(
3517 buffer.clone(),
3518 index,
3519 index_changed.then_some(false),
3520 language.clone(),
3521 cx,
3522 )
3523 })
3524 .await,
3525 );
3526 }
3527
3528 // Dropping BufferDiff can be expensive, so yield back to the event loop
3529 // for a bit
3530 yield_now().await;
3531
3532 let mut new_uncommitted_diff = None;
3533 if let Some(uncommitted_diff) = &uncommitted_diff {
3534 new_uncommitted_diff = if index_matches_head {
3535 new_unstaged_diff.clone()
3536 } else {
3537 Some(
3538 cx.update(|cx| {
3539 uncommitted_diff.read(cx).update_diff(
3540 buffer.clone(),
3541 head,
3542 head_changed.then_some(true),
3543 language.clone(),
3544 cx,
3545 )
3546 })
3547 .await,
3548 )
3549 }
3550 }
3551
3552 // Dropping BufferDiff can be expensive, so yield back to the event loop
3553 // for a bit
3554 yield_now().await;
3555
3556 let cancel = this.update(cx, |this, _| {
3557 // This checks whether all pending stage/unstage operations
3558 // have quiesced (i.e. both the corresponding write and the
3559 // read of that write have completed). If not, then we cancel
3560 // this recalculation attempt to avoid invalidating pending
3561 // state too quickly; another recalculation will come along
3562 // later and clear the pending state once the state of the index has settled.
3563 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3564 *this.recalculating_tx.borrow_mut() = false;
3565 true
3566 } else {
3567 false
3568 }
3569 })?;
3570 if cancel {
3571 log::debug!(
3572 concat!(
3573 "aborting recalculating diffs for buffer {}",
3574 "due to subsequent hunk operations",
3575 ),
3576 buffer.remote_id()
3577 );
3578 return Ok(());
3579 }
3580
3581 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3582 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3583 {
3584 let task = unstaged_diff.update(cx, |diff, cx| {
3585 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3586 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3587 // view multibuffers.
3588 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3589 });
3590 Some(task.await)
3591 } else {
3592 None
3593 };
3594
3595 yield_now().await;
3596
3597 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3598 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3599 {
3600 uncommitted_diff
3601 .update(cx, |diff, cx| {
3602 if language_changed {
3603 diff.language_changed(language.clone(), language_registry, cx);
3604 }
3605 diff.set_snapshot_with_secondary(
3606 new_uncommitted_diff,
3607 &buffer,
3608 unstaged_changed_range.flatten(),
3609 true,
3610 cx,
3611 )
3612 })
3613 .await;
3614 }
3615
3616 yield_now().await;
3617
3618 for (oid, oid_diff, base_text) in oid_diffs {
3619 let new_oid_diff = cx
3620 .update(|cx| {
3621 oid_diff.read(cx).update_diff(
3622 buffer.clone(),
3623 base_text,
3624 None,
3625 language.clone(),
3626 cx,
3627 )
3628 })
3629 .await;
3630
3631 oid_diff
3632 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3633 .await;
3634
3635 log::debug!(
3636 "finished recalculating oid diff for buffer {} oid {:?}",
3637 buffer.remote_id(),
3638 oid
3639 );
3640
3641 yield_now().await;
3642 }
3643
3644 log::debug!(
3645 "finished recalculating diffs for buffer {}",
3646 buffer.remote_id()
3647 );
3648
3649 if let Some(this) = this.upgrade() {
3650 this.update(cx, |this, _| {
3651 this.index_changed = false;
3652 this.head_changed = false;
3653 this.language_changed = false;
3654 *this.recalculating_tx.borrow_mut() = false;
3655 });
3656 }
3657
3658 Ok(())
3659 }));
3660 }
3661}
3662
3663fn make_remote_delegate(
3664 this: Entity<GitStore>,
3665 project_id: u64,
3666 repository_id: RepositoryId,
3667 askpass_id: u64,
3668 cx: &mut AsyncApp,
3669) -> AskPassDelegate {
3670 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3671 this.update(cx, |this, cx| {
3672 let Some((client, _)) = this.downstream_client() else {
3673 return;
3674 };
3675 let response = client.request(proto::AskPassRequest {
3676 project_id,
3677 repository_id: repository_id.to_proto(),
3678 askpass_id,
3679 prompt,
3680 });
3681 cx.spawn(async move |_, _| {
3682 let mut response = response.await?.response;
3683 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3684 .ok();
3685 response.zeroize();
3686 anyhow::Ok(())
3687 })
3688 .detach_and_log_err(cx);
3689 });
3690 })
3691}
3692
3693impl RepositoryId {
3694 pub fn to_proto(self) -> u64 {
3695 self.0
3696 }
3697
3698 pub fn from_proto(id: u64) -> Self {
3699 RepositoryId(id)
3700 }
3701}
3702
3703impl RepositorySnapshot {
3704 fn empty(
3705 id: RepositoryId,
3706 work_directory_abs_path: Arc<Path>,
3707 original_repo_abs_path: Option<Arc<Path>>,
3708 path_style: PathStyle,
3709 ) -> Self {
3710 Self {
3711 id,
3712 statuses_by_path: Default::default(),
3713 original_repo_abs_path: original_repo_abs_path
3714 .unwrap_or_else(|| work_directory_abs_path.clone()),
3715 work_directory_abs_path,
3716 branch: None,
3717 branch_list: Arc::from([]),
3718 head_commit: None,
3719 scan_id: 0,
3720 merge: Default::default(),
3721 remote_origin_url: None,
3722 remote_upstream_url: None,
3723 stash_entries: Default::default(),
3724 linked_worktrees: Arc::from([]),
3725 path_style,
3726 }
3727 }
3728
3729 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3730 proto::UpdateRepository {
3731 branch_summary: self.branch.as_ref().map(branch_to_proto),
3732 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3733 updated_statuses: self
3734 .statuses_by_path
3735 .iter()
3736 .map(|entry| entry.to_proto())
3737 .collect(),
3738 removed_statuses: Default::default(),
3739 current_merge_conflicts: self
3740 .merge
3741 .merge_heads_by_conflicted_path
3742 .iter()
3743 .map(|(repo_path, _)| repo_path.to_proto())
3744 .collect(),
3745 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3746 project_id,
3747 id: self.id.to_proto(),
3748 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3749 entry_ids: vec![self.id.to_proto()],
3750 scan_id: self.scan_id,
3751 is_last_update: true,
3752 stash_entries: self
3753 .stash_entries
3754 .entries
3755 .iter()
3756 .map(stash_to_proto)
3757 .collect(),
3758 remote_upstream_url: self.remote_upstream_url.clone(),
3759 remote_origin_url: self.remote_origin_url.clone(),
3760 original_repo_abs_path: Some(
3761 self.original_repo_abs_path.to_string_lossy().into_owned(),
3762 ),
3763 linked_worktrees: self
3764 .linked_worktrees
3765 .iter()
3766 .map(worktree_to_proto)
3767 .collect(),
3768 }
3769 }
3770
3771 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3772 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3773 let mut removed_statuses: Vec<String> = Vec::new();
3774
3775 let mut new_statuses = self.statuses_by_path.iter().peekable();
3776 let mut old_statuses = old.statuses_by_path.iter().peekable();
3777
3778 let mut current_new_entry = new_statuses.next();
3779 let mut current_old_entry = old_statuses.next();
3780 loop {
3781 match (current_new_entry, current_old_entry) {
3782 (Some(new_entry), Some(old_entry)) => {
3783 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3784 Ordering::Less => {
3785 updated_statuses.push(new_entry.to_proto());
3786 current_new_entry = new_statuses.next();
3787 }
3788 Ordering::Equal => {
3789 if new_entry.status != old_entry.status
3790 || new_entry.diff_stat != old_entry.diff_stat
3791 {
3792 updated_statuses.push(new_entry.to_proto());
3793 }
3794 current_old_entry = old_statuses.next();
3795 current_new_entry = new_statuses.next();
3796 }
3797 Ordering::Greater => {
3798 removed_statuses.push(old_entry.repo_path.to_proto());
3799 current_old_entry = old_statuses.next();
3800 }
3801 }
3802 }
3803 (None, Some(old_entry)) => {
3804 removed_statuses.push(old_entry.repo_path.to_proto());
3805 current_old_entry = old_statuses.next();
3806 }
3807 (Some(new_entry), None) => {
3808 updated_statuses.push(new_entry.to_proto());
3809 current_new_entry = new_statuses.next();
3810 }
3811 (None, None) => break,
3812 }
3813 }
3814
3815 proto::UpdateRepository {
3816 branch_summary: self.branch.as_ref().map(branch_to_proto),
3817 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3818 updated_statuses,
3819 removed_statuses,
3820 current_merge_conflicts: self
3821 .merge
3822 .merge_heads_by_conflicted_path
3823 .iter()
3824 .map(|(path, _)| path.to_proto())
3825 .collect(),
3826 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3827 project_id,
3828 id: self.id.to_proto(),
3829 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3830 entry_ids: vec![],
3831 scan_id: self.scan_id,
3832 is_last_update: true,
3833 stash_entries: self
3834 .stash_entries
3835 .entries
3836 .iter()
3837 .map(stash_to_proto)
3838 .collect(),
3839 remote_upstream_url: self.remote_upstream_url.clone(),
3840 remote_origin_url: self.remote_origin_url.clone(),
3841 original_repo_abs_path: Some(
3842 self.original_repo_abs_path.to_string_lossy().into_owned(),
3843 ),
3844 linked_worktrees: self
3845 .linked_worktrees
3846 .iter()
3847 .map(worktree_to_proto)
3848 .collect(),
3849 }
3850 }
3851
3852 /// The main worktree is the original checkout that other worktrees were
3853 /// created from.
3854 ///
3855 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3856 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3857 ///
3858 /// Submodules also return `true` here, since they are not linked worktrees.
3859 pub fn is_main_worktree(&self) -> bool {
3860 self.work_directory_abs_path == self.original_repo_abs_path
3861 }
3862
3863 /// Returns true if this repository is a linked worktree, that is, one that
3864 /// was created from another worktree.
3865 ///
3866 /// Returns `false` for both the main worktree and submodules.
3867 pub fn is_linked_worktree(&self) -> bool {
3868 !self.is_main_worktree()
3869 }
3870
3871 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3872 &self.linked_worktrees
3873 }
3874
3875 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3876 self.statuses_by_path.iter().cloned()
3877 }
3878
3879 pub fn status_summary(&self) -> GitSummary {
3880 self.statuses_by_path.summary().item_summary
3881 }
3882
3883 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3884 self.statuses_by_path
3885 .get(&PathKey(path.as_ref().clone()), ())
3886 .cloned()
3887 }
3888
3889 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3890 self.statuses_by_path
3891 .get(&PathKey(path.as_ref().clone()), ())
3892 .and_then(|entry| entry.diff_stat)
3893 }
3894
3895 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3896 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3897 }
3898
3899 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3900 self.path_style
3901 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3902 .unwrap()
3903 .into()
3904 }
3905
3906 #[inline]
3907 fn abs_path_to_repo_path_inner(
3908 work_directory_abs_path: &Path,
3909 abs_path: &Path,
3910 path_style: PathStyle,
3911 ) -> Option<RepoPath> {
3912 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3913 Some(RepoPath::from_rel_path(&rel_path))
3914 }
3915
3916 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3917 self.merge
3918 .merge_heads_by_conflicted_path
3919 .contains_key(repo_path)
3920 }
3921
3922 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3923 let had_conflict_on_last_merge_head_change = self
3924 .merge
3925 .merge_heads_by_conflicted_path
3926 .contains_key(repo_path);
3927 let has_conflict_currently = self
3928 .status_for_path(repo_path)
3929 .is_some_and(|entry| entry.status.is_conflicted());
3930 had_conflict_on_last_merge_head_change || has_conflict_currently
3931 }
3932
3933 /// This is the name that will be displayed in the repository selector for this repository.
3934 pub fn display_name(&self) -> SharedString {
3935 self.work_directory_abs_path
3936 .file_name()
3937 .unwrap_or_default()
3938 .to_string_lossy()
3939 .to_string()
3940 .into()
3941 }
3942}
3943
3944pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3945 proto::StashEntry {
3946 oid: entry.oid.as_bytes().to_vec(),
3947 message: entry.message.clone(),
3948 branch: entry.branch.clone(),
3949 index: entry.index as u64,
3950 timestamp: entry.timestamp,
3951 }
3952}
3953
3954pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3955 Ok(StashEntry {
3956 oid: Oid::from_bytes(&entry.oid)?,
3957 message: entry.message.clone(),
3958 index: entry.index as usize,
3959 branch: entry.branch.clone(),
3960 timestamp: entry.timestamp,
3961 })
3962}
3963
3964impl MergeDetails {
3965 async fn update(
3966 &mut self,
3967 backend: &Arc<dyn GitRepository>,
3968 current_conflicted_paths: Vec<RepoPath>,
3969 ) -> Result<bool> {
3970 log::debug!("load merge details");
3971 self.message = backend.merge_message().await.map(SharedString::from);
3972 let heads = backend
3973 .revparse_batch(vec![
3974 "MERGE_HEAD".into(),
3975 "CHERRY_PICK_HEAD".into(),
3976 "REBASE_HEAD".into(),
3977 "REVERT_HEAD".into(),
3978 "APPLY_HEAD".into(),
3979 ])
3980 .await
3981 .log_err()
3982 .unwrap_or_default()
3983 .into_iter()
3984 .map(|opt| opt.map(SharedString::from))
3985 .collect::<Vec<_>>();
3986
3987 let mut conflicts_changed = false;
3988
3989 // Record the merge state for newly conflicted paths
3990 for path in ¤t_conflicted_paths {
3991 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3992 conflicts_changed = true;
3993 self.merge_heads_by_conflicted_path
3994 .insert(path.clone(), heads.clone());
3995 }
3996 }
3997
3998 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3999 self.merge_heads_by_conflicted_path
4000 .retain(|path, old_merge_heads| {
4001 let keep = current_conflicted_paths.contains(path)
4002 || (old_merge_heads == &heads
4003 && old_merge_heads.iter().any(|head| head.is_some()));
4004 if !keep {
4005 conflicts_changed = true;
4006 }
4007 keep
4008 });
4009
4010 Ok(conflicts_changed)
4011 }
4012}
4013
4014impl Repository {
4015 pub fn is_trusted(&self) -> bool {
4016 match self.repository_state.peek() {
4017 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4018 _ => false,
4019 }
4020 }
4021
4022 pub fn snapshot(&self) -> RepositorySnapshot {
4023 self.snapshot.clone()
4024 }
4025
4026 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4027 self.pending_ops.iter().cloned()
4028 }
4029
4030 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4031 self.pending_ops.summary().clone()
4032 }
4033
4034 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4035 self.pending_ops
4036 .get(&PathKey(path.as_ref().clone()), ())
4037 .cloned()
4038 }
4039
4040 fn local(
4041 id: RepositoryId,
4042 work_directory_abs_path: Arc<Path>,
4043 original_repo_abs_path: Arc<Path>,
4044 dot_git_abs_path: Arc<Path>,
4045 project_environment: WeakEntity<ProjectEnvironment>,
4046 fs: Arc<dyn Fs>,
4047 is_trusted: bool,
4048 git_store: WeakEntity<GitStore>,
4049 cx: &mut Context<Self>,
4050 ) -> Self {
4051 let snapshot = RepositorySnapshot::empty(
4052 id,
4053 work_directory_abs_path.clone(),
4054 Some(original_repo_abs_path),
4055 PathStyle::local(),
4056 );
4057 let state = cx
4058 .spawn(async move |_, cx| {
4059 LocalRepositoryState::new(
4060 work_directory_abs_path,
4061 dot_git_abs_path,
4062 project_environment,
4063 fs,
4064 is_trusted,
4065 cx,
4066 )
4067 .await
4068 .map_err(|err| err.to_string())
4069 })
4070 .shared();
4071 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4072 let state = cx
4073 .spawn(async move |_, _| {
4074 let state = state.await?;
4075 Ok(RepositoryState::Local(state))
4076 })
4077 .shared();
4078
4079 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4080 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4081 if this.scan_id > 1 {
4082 this.initial_graph_data.clear();
4083 }
4084 }
4085 RepositoryEvent::StashEntriesChanged => {
4086 if this.scan_id > 1 {
4087 this.initial_graph_data
4088 .retain(|(log_source, _), _| *log_source != LogSource::All);
4089 }
4090 }
4091 _ => {}
4092 })
4093 .detach();
4094
4095 Repository {
4096 this: cx.weak_entity(),
4097 git_store,
4098 snapshot,
4099 pending_ops: Default::default(),
4100 repository_state: state,
4101 commit_message_buffer: None,
4102 askpass_delegates: Default::default(),
4103 paths_needing_status_update: Default::default(),
4104 latest_askpass_id: 0,
4105 job_sender,
4106 job_id: 0,
4107 active_jobs: Default::default(),
4108 initial_graph_data: Default::default(),
4109 commit_data: Default::default(),
4110 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4111 }
4112 }
4113
4114 fn remote(
4115 id: RepositoryId,
4116 work_directory_abs_path: Arc<Path>,
4117 original_repo_abs_path: Option<Arc<Path>>,
4118 path_style: PathStyle,
4119 project_id: ProjectId,
4120 client: AnyProtoClient,
4121 git_store: WeakEntity<GitStore>,
4122 cx: &mut Context<Self>,
4123 ) -> Self {
4124 let snapshot = RepositorySnapshot::empty(
4125 id,
4126 work_directory_abs_path,
4127 original_repo_abs_path,
4128 path_style,
4129 );
4130 let repository_state = RemoteRepositoryState { project_id, client };
4131 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4132 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4133 Self {
4134 this: cx.weak_entity(),
4135 snapshot,
4136 commit_message_buffer: None,
4137 git_store,
4138 pending_ops: Default::default(),
4139 paths_needing_status_update: Default::default(),
4140 job_sender,
4141 repository_state,
4142 askpass_delegates: Default::default(),
4143 latest_askpass_id: 0,
4144 active_jobs: Default::default(),
4145 job_id: 0,
4146 initial_graph_data: Default::default(),
4147 commit_data: Default::default(),
4148 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4149 }
4150 }
4151
4152 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4153 self.git_store.upgrade()
4154 }
4155
4156 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4157 let this = cx.weak_entity();
4158 let git_store = self.git_store.clone();
4159 let _ = self.send_keyed_job(
4160 Some(GitJobKey::ReloadBufferDiffBases),
4161 None,
4162 |state, mut cx| async move {
4163 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4164 log::error!("tried to recompute diffs for a non-local repository");
4165 return Ok(());
4166 };
4167
4168 let Some(this) = this.upgrade() else {
4169 return Ok(());
4170 };
4171
4172 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4173 git_store.update(cx, |git_store, cx| {
4174 git_store
4175 .diffs
4176 .iter()
4177 .filter_map(|(buffer_id, diff_state)| {
4178 let buffer_store = git_store.buffer_store.read(cx);
4179 let buffer = buffer_store.get(*buffer_id)?;
4180 let file = File::from_dyn(buffer.read(cx).file())?;
4181 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4182 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4183 log::debug!(
4184 "start reload diff bases for repo path {}",
4185 repo_path.as_unix_str()
4186 );
4187 diff_state.update(cx, |diff_state, _| {
4188 let has_unstaged_diff = diff_state
4189 .unstaged_diff
4190 .as_ref()
4191 .is_some_and(|diff| diff.is_upgradable());
4192 let has_uncommitted_diff = diff_state
4193 .uncommitted_diff
4194 .as_ref()
4195 .is_some_and(|set| set.is_upgradable());
4196
4197 Some((
4198 buffer,
4199 repo_path,
4200 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4201 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4202 ))
4203 })
4204 })
4205 .collect::<Vec<_>>()
4206 })
4207 })?;
4208
4209 let buffer_diff_base_changes = cx
4210 .background_spawn(async move {
4211 let mut changes = Vec::new();
4212 for (buffer, repo_path, current_index_text, current_head_text) in
4213 &repo_diff_state_updates
4214 {
4215 let index_text = if current_index_text.is_some() {
4216 backend.load_index_text(repo_path.clone()).await
4217 } else {
4218 None
4219 };
4220 let head_text = if current_head_text.is_some() {
4221 backend.load_committed_text(repo_path.clone()).await
4222 } else {
4223 None
4224 };
4225
4226 let change =
4227 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4228 (Some(current_index), Some(current_head)) => {
4229 let index_changed =
4230 index_text.as_deref() != current_index.as_deref();
4231 let head_changed =
4232 head_text.as_deref() != current_head.as_deref();
4233 if index_changed && head_changed {
4234 if index_text == head_text {
4235 Some(DiffBasesChange::SetBoth(head_text))
4236 } else {
4237 Some(DiffBasesChange::SetEach {
4238 index: index_text,
4239 head: head_text,
4240 })
4241 }
4242 } else if index_changed {
4243 Some(DiffBasesChange::SetIndex(index_text))
4244 } else if head_changed {
4245 Some(DiffBasesChange::SetHead(head_text))
4246 } else {
4247 None
4248 }
4249 }
4250 (Some(current_index), None) => {
4251 let index_changed =
4252 index_text.as_deref() != current_index.as_deref();
4253 index_changed
4254 .then_some(DiffBasesChange::SetIndex(index_text))
4255 }
4256 (None, Some(current_head)) => {
4257 let head_changed =
4258 head_text.as_deref() != current_head.as_deref();
4259 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4260 }
4261 (None, None) => None,
4262 };
4263
4264 changes.push((buffer.clone(), change))
4265 }
4266 changes
4267 })
4268 .await;
4269
4270 git_store.update(&mut cx, |git_store, cx| {
4271 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4272 let buffer_snapshot = buffer.read(cx).text_snapshot();
4273 let buffer_id = buffer_snapshot.remote_id();
4274 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4275 continue;
4276 };
4277
4278 let downstream_client = git_store.downstream_client();
4279 diff_state.update(cx, |diff_state, cx| {
4280 use proto::update_diff_bases::Mode;
4281
4282 if let Some((diff_bases_change, (client, project_id))) =
4283 diff_bases_change.clone().zip(downstream_client)
4284 {
4285 let (staged_text, committed_text, mode) = match diff_bases_change {
4286 DiffBasesChange::SetIndex(index) => {
4287 (index, None, Mode::IndexOnly)
4288 }
4289 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4290 DiffBasesChange::SetEach { index, head } => {
4291 (index, head, Mode::IndexAndHead)
4292 }
4293 DiffBasesChange::SetBoth(text) => {
4294 (None, text, Mode::IndexMatchesHead)
4295 }
4296 };
4297 client
4298 .send(proto::UpdateDiffBases {
4299 project_id: project_id.to_proto(),
4300 buffer_id: buffer_id.to_proto(),
4301 staged_text,
4302 committed_text,
4303 mode: mode as i32,
4304 })
4305 .log_err();
4306 }
4307
4308 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4309 });
4310 }
4311 })
4312 },
4313 );
4314 }
4315
4316 pub fn send_job<F, Fut, R>(
4317 &mut self,
4318 status: Option<SharedString>,
4319 job: F,
4320 ) -> oneshot::Receiver<R>
4321 where
4322 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4323 Fut: Future<Output = R> + 'static,
4324 R: Send + 'static,
4325 {
4326 self.send_keyed_job(None, status, job)
4327 }
4328
4329 fn send_keyed_job<F, Fut, R>(
4330 &mut self,
4331 key: Option<GitJobKey>,
4332 status: Option<SharedString>,
4333 job: F,
4334 ) -> oneshot::Receiver<R>
4335 where
4336 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4337 Fut: Future<Output = R> + 'static,
4338 R: Send + 'static,
4339 {
4340 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4341 let job_id = post_inc(&mut self.job_id);
4342 let this = self.this.clone();
4343 self.job_sender
4344 .unbounded_send(GitJob {
4345 key,
4346 job: Box::new(move |state, cx: &mut AsyncApp| {
4347 let job = job(state, cx.clone());
4348 cx.spawn(async move |cx| {
4349 if let Some(s) = status.clone() {
4350 this.update(cx, |this, cx| {
4351 this.active_jobs.insert(
4352 job_id,
4353 JobInfo {
4354 start: Instant::now(),
4355 message: s.clone(),
4356 },
4357 );
4358
4359 cx.notify();
4360 })
4361 .ok();
4362 }
4363 let result = job.await;
4364
4365 this.update(cx, |this, cx| {
4366 this.active_jobs.remove(&job_id);
4367 cx.notify();
4368 })
4369 .ok();
4370
4371 result_tx.send(result).ok();
4372 })
4373 }),
4374 })
4375 .ok();
4376 result_rx
4377 }
4378
4379 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4380 let Some(git_store) = self.git_store.upgrade() else {
4381 return;
4382 };
4383 let entity = cx.entity();
4384 git_store.update(cx, |git_store, cx| {
4385 let Some((&id, _)) = git_store
4386 .repositories
4387 .iter()
4388 .find(|(_, handle)| *handle == &entity)
4389 else {
4390 return;
4391 };
4392 git_store.active_repo_id = Some(id);
4393 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4394 });
4395 }
4396
4397 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4398 self.snapshot.status()
4399 }
4400
4401 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4402 self.snapshot.diff_stat_for_path(path)
4403 }
4404
4405 pub fn cached_stash(&self) -> GitStash {
4406 self.snapshot.stash_entries.clone()
4407 }
4408
4409 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4410 let git_store = self.git_store.upgrade()?;
4411 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4412 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4413 let abs_path = SanitizedPath::new(&abs_path);
4414 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4415 Some(ProjectPath {
4416 worktree_id: worktree.read(cx).id(),
4417 path: relative_path,
4418 })
4419 }
4420
4421 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4422 let git_store = self.git_store.upgrade()?;
4423 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4424 let abs_path = worktree_store.absolutize(path, cx)?;
4425 self.snapshot.abs_path_to_repo_path(&abs_path)
4426 }
4427
4428 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4429 other
4430 .read(cx)
4431 .snapshot
4432 .work_directory_abs_path
4433 .starts_with(&self.snapshot.work_directory_abs_path)
4434 }
4435
4436 pub fn open_commit_buffer(
4437 &mut self,
4438 languages: Option<Arc<LanguageRegistry>>,
4439 buffer_store: Entity<BufferStore>,
4440 cx: &mut Context<Self>,
4441 ) -> Task<Result<Entity<Buffer>>> {
4442 let id = self.id;
4443 if let Some(buffer) = self.commit_message_buffer.clone() {
4444 return Task::ready(Ok(buffer));
4445 }
4446 let this = cx.weak_entity();
4447
4448 let rx = self.send_job(None, move |state, mut cx| async move {
4449 let Some(this) = this.upgrade() else {
4450 bail!("git store was dropped");
4451 };
4452 match state {
4453 RepositoryState::Local(..) => {
4454 this.update(&mut cx, |_, cx| {
4455 Self::open_local_commit_buffer(languages, buffer_store, cx)
4456 })
4457 .await
4458 }
4459 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4460 let request = client.request(proto::OpenCommitMessageBuffer {
4461 project_id: project_id.0,
4462 repository_id: id.to_proto(),
4463 });
4464 let response = request.await.context("requesting to open commit buffer")?;
4465 let buffer_id = BufferId::new(response.buffer_id)?;
4466 let buffer = buffer_store
4467 .update(&mut cx, |buffer_store, cx| {
4468 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4469 })
4470 .await?;
4471 if let Some(language_registry) = languages {
4472 let git_commit_language =
4473 language_registry.language_for_name("Git Commit").await?;
4474 buffer.update(&mut cx, |buffer, cx| {
4475 buffer.set_language(Some(git_commit_language), cx);
4476 });
4477 }
4478 this.update(&mut cx, |this, _| {
4479 this.commit_message_buffer = Some(buffer.clone());
4480 });
4481 Ok(buffer)
4482 }
4483 }
4484 });
4485
4486 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4487 }
4488
4489 fn open_local_commit_buffer(
4490 language_registry: Option<Arc<LanguageRegistry>>,
4491 buffer_store: Entity<BufferStore>,
4492 cx: &mut Context<Self>,
4493 ) -> Task<Result<Entity<Buffer>>> {
4494 cx.spawn(async move |repository, cx| {
4495 let git_commit_language = match language_registry {
4496 Some(language_registry) => {
4497 Some(language_registry.language_for_name("Git Commit").await?)
4498 }
4499 None => None,
4500 };
4501 let buffer = buffer_store
4502 .update(cx, |buffer_store, cx| {
4503 buffer_store.create_buffer(git_commit_language, false, cx)
4504 })
4505 .await?;
4506
4507 repository.update(cx, |repository, _| {
4508 repository.commit_message_buffer = Some(buffer.clone());
4509 })?;
4510 Ok(buffer)
4511 })
4512 }
4513
4514 pub fn checkout_files(
4515 &mut self,
4516 commit: &str,
4517 paths: Vec<RepoPath>,
4518 cx: &mut Context<Self>,
4519 ) -> Task<Result<()>> {
4520 let commit = commit.to_string();
4521 let id = self.id;
4522
4523 self.spawn_job_with_tracking(
4524 paths.clone(),
4525 pending_op::GitStatus::Reverted,
4526 cx,
4527 async move |this, cx| {
4528 this.update(cx, |this, _cx| {
4529 this.send_job(
4530 Some(format!("git checkout {}", commit).into()),
4531 move |git_repo, _| async move {
4532 match git_repo {
4533 RepositoryState::Local(LocalRepositoryState {
4534 backend,
4535 environment,
4536 ..
4537 }) => {
4538 backend
4539 .checkout_files(commit, paths, environment.clone())
4540 .await
4541 }
4542 RepositoryState::Remote(RemoteRepositoryState {
4543 project_id,
4544 client,
4545 }) => {
4546 client
4547 .request(proto::GitCheckoutFiles {
4548 project_id: project_id.0,
4549 repository_id: id.to_proto(),
4550 commit,
4551 paths: paths
4552 .into_iter()
4553 .map(|p| p.to_proto())
4554 .collect(),
4555 })
4556 .await?;
4557
4558 Ok(())
4559 }
4560 }
4561 },
4562 )
4563 })?
4564 .await?
4565 },
4566 )
4567 }
4568
4569 pub fn reset(
4570 &mut self,
4571 commit: String,
4572 reset_mode: ResetMode,
4573 _cx: &mut App,
4574 ) -> oneshot::Receiver<Result<()>> {
4575 let id = self.id;
4576
4577 self.send_job(None, move |git_repo, _| async move {
4578 match git_repo {
4579 RepositoryState::Local(LocalRepositoryState {
4580 backend,
4581 environment,
4582 ..
4583 }) => backend.reset(commit, reset_mode, environment).await,
4584 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4585 client
4586 .request(proto::GitReset {
4587 project_id: project_id.0,
4588 repository_id: id.to_proto(),
4589 commit,
4590 mode: match reset_mode {
4591 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4592 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4593 },
4594 })
4595 .await?;
4596
4597 Ok(())
4598 }
4599 }
4600 })
4601 }
4602
4603 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4604 let id = self.id;
4605 self.send_job(None, move |git_repo, _cx| async move {
4606 match git_repo {
4607 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4608 backend.show(commit).await
4609 }
4610 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4611 let resp = client
4612 .request(proto::GitShow {
4613 project_id: project_id.0,
4614 repository_id: id.to_proto(),
4615 commit,
4616 })
4617 .await?;
4618
4619 Ok(CommitDetails {
4620 sha: resp.sha.into(),
4621 message: resp.message.into(),
4622 commit_timestamp: resp.commit_timestamp,
4623 author_email: resp.author_email.into(),
4624 author_name: resp.author_name.into(),
4625 })
4626 }
4627 }
4628 })
4629 }
4630
4631 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4632 let id = self.id;
4633 self.send_job(None, move |git_repo, cx| async move {
4634 match git_repo {
4635 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4636 backend.load_commit(commit, cx).await
4637 }
4638 RepositoryState::Remote(RemoteRepositoryState {
4639 client, project_id, ..
4640 }) => {
4641 let response = client
4642 .request(proto::LoadCommitDiff {
4643 project_id: project_id.0,
4644 repository_id: id.to_proto(),
4645 commit,
4646 })
4647 .await?;
4648 Ok(CommitDiff {
4649 files: response
4650 .files
4651 .into_iter()
4652 .map(|file| {
4653 Ok(CommitFile {
4654 path: RepoPath::from_proto(&file.path)?,
4655 old_text: file.old_text,
4656 new_text: file.new_text,
4657 is_binary: file.is_binary,
4658 })
4659 })
4660 .collect::<Result<Vec<_>>>()?,
4661 })
4662 }
4663 }
4664 })
4665 }
4666
4667 pub fn file_history(
4668 &mut self,
4669 path: RepoPath,
4670 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4671 self.file_history_paginated(path, 0, None)
4672 }
4673
4674 pub fn file_history_paginated(
4675 &mut self,
4676 path: RepoPath,
4677 skip: usize,
4678 limit: Option<usize>,
4679 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4680 let id = self.id;
4681 self.send_job(None, move |git_repo, _cx| async move {
4682 match git_repo {
4683 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4684 backend.file_history_paginated(path, skip, limit).await
4685 }
4686 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4687 let response = client
4688 .request(proto::GitFileHistory {
4689 project_id: project_id.0,
4690 repository_id: id.to_proto(),
4691 path: path.to_proto(),
4692 skip: skip as u64,
4693 limit: limit.map(|l| l as u64),
4694 })
4695 .await?;
4696 Ok(git::repository::FileHistory {
4697 entries: response
4698 .entries
4699 .into_iter()
4700 .map(|entry| git::repository::FileHistoryEntry {
4701 sha: entry.sha.into(),
4702 subject: entry.subject.into(),
4703 message: entry.message.into(),
4704 commit_timestamp: entry.commit_timestamp,
4705 author_name: entry.author_name.into(),
4706 author_email: entry.author_email.into(),
4707 })
4708 .collect(),
4709 path: RepoPath::from_proto(&response.path)?,
4710 })
4711 }
4712 }
4713 })
4714 }
4715
4716 pub fn get_graph_data(
4717 &self,
4718 log_source: LogSource,
4719 log_order: LogOrder,
4720 ) -> Option<&InitialGitGraphData> {
4721 self.initial_graph_data.get(&(log_source, log_order))
4722 }
4723
4724 pub fn search_commits(
4725 &mut self,
4726 log_source: LogSource,
4727 search_args: SearchCommitArgs,
4728 request_tx: smol::channel::Sender<Oid>,
4729 cx: &mut Context<Self>,
4730 ) {
4731 let repository_state = self.repository_state.clone();
4732
4733 cx.background_spawn(async move {
4734 let repo_state = repository_state.await;
4735
4736 match repo_state {
4737 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4738 backend
4739 .search_commits(log_source, search_args, request_tx)
4740 .await
4741 .log_err();
4742 }
4743 Ok(RepositoryState::Remote(_)) => {}
4744 Err(_) => {}
4745 };
4746 })
4747 .detach();
4748 }
4749
4750 pub fn graph_data(
4751 &mut self,
4752 log_source: LogSource,
4753 log_order: LogOrder,
4754 range: Range<usize>,
4755 cx: &mut Context<Self>,
4756 ) -> GraphDataResponse<'_> {
4757 let initial_commit_data = self
4758 .initial_graph_data
4759 .entry((log_source.clone(), log_order))
4760 .or_insert_with(|| {
4761 let state = self.repository_state.clone();
4762 let log_source = log_source.clone();
4763
4764 let fetch_task = cx.spawn(async move |repository, cx| {
4765 let state = state.await;
4766 let result = match state {
4767 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4768 Self::local_git_graph_data(
4769 repository.clone(),
4770 backend,
4771 log_source.clone(),
4772 log_order,
4773 cx,
4774 )
4775 .await
4776 }
4777 Ok(RepositoryState::Remote(_)) => {
4778 Err("Git graph is not supported for collab yet".into())
4779 }
4780 Err(e) => Err(SharedString::from(e)),
4781 };
4782
4783 if let Err(fetch_task_error) = result {
4784 repository
4785 .update(cx, |repository, _| {
4786 if let Some(data) = repository
4787 .initial_graph_data
4788 .get_mut(&(log_source, log_order))
4789 {
4790 data.error = Some(fetch_task_error);
4791 } else {
4792 debug_panic!(
4793 "This task would be dropped if this entry doesn't exist"
4794 );
4795 }
4796 })
4797 .ok();
4798 }
4799 });
4800
4801 InitialGitGraphData {
4802 fetch_task,
4803 error: None,
4804 commit_data: Vec::new(),
4805 commit_oid_to_index: HashMap::default(),
4806 }
4807 });
4808
4809 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4810 let max_end = initial_commit_data.commit_data.len();
4811
4812 GraphDataResponse {
4813 commits: &initial_commit_data.commit_data
4814 [range.start.min(max_start)..range.end.min(max_end)],
4815 is_loading: !initial_commit_data.fetch_task.is_ready(),
4816 error: initial_commit_data.error.clone(),
4817 }
4818 }
4819
4820 async fn local_git_graph_data(
4821 this: WeakEntity<Self>,
4822 backend: Arc<dyn GitRepository>,
4823 log_source: LogSource,
4824 log_order: LogOrder,
4825 cx: &mut AsyncApp,
4826 ) -> Result<(), SharedString> {
4827 let (request_tx, request_rx) =
4828 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4829
4830 let task = cx.background_executor().spawn({
4831 let log_source = log_source.clone();
4832 async move {
4833 backend
4834 .initial_graph_data(log_source, log_order, request_tx)
4835 .await
4836 .map_err(|err| SharedString::from(err.to_string()))
4837 }
4838 });
4839
4840 let graph_data_key = (log_source, log_order);
4841
4842 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4843 this.update(cx, |repository, cx| {
4844 let graph_data = repository
4845 .initial_graph_data
4846 .entry(graph_data_key.clone())
4847 .and_modify(|graph_data| {
4848 for commit_data in initial_graph_commit_data {
4849 graph_data
4850 .commit_oid_to_index
4851 .insert(commit_data.sha, graph_data.commit_data.len());
4852 graph_data.commit_data.push(commit_data);
4853 }
4854 cx.emit(RepositoryEvent::GraphEvent(
4855 graph_data_key.clone(),
4856 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4857 ));
4858 });
4859
4860 match &graph_data {
4861 Entry::Occupied(_) => {}
4862 Entry::Vacant(_) => {
4863 debug_panic!("This task should be dropped if data doesn't exist");
4864 }
4865 }
4866 })
4867 .ok();
4868 }
4869
4870 task.await?;
4871 Ok(())
4872 }
4873
4874 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4875 if !self.commit_data.contains_key(&sha) {
4876 match &self.graph_commit_data_handler {
4877 GraphCommitHandlerState::Open(handler) => {
4878 if handler.commit_data_request.try_send(sha).is_ok() {
4879 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4880 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4881 }
4882 }
4883 GraphCommitHandlerState::Closed => {
4884 self.open_graph_commit_data_handler(cx);
4885 }
4886 GraphCommitHandlerState::Starting => {}
4887 }
4888 }
4889
4890 self.commit_data
4891 .get(&sha)
4892 .unwrap_or(&CommitDataState::Loading)
4893 }
4894
4895 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4896 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4897
4898 let state = self.repository_state.clone();
4899 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4900 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4901
4902 let foreground_task = cx.spawn(async move |this, cx| {
4903 while let Ok((sha, commit_data)) = result_rx.recv().await {
4904 let result = this.update(cx, |this, cx| {
4905 let old_value = this
4906 .commit_data
4907 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4908 debug_assert!(
4909 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4910 "We should never overwrite commit data"
4911 );
4912
4913 cx.notify();
4914 });
4915 if result.is_err() {
4916 break;
4917 }
4918 }
4919
4920 this.update(cx, |this, _cx| {
4921 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4922 })
4923 .ok();
4924 });
4925
4926 let request_tx_for_handler = request_tx;
4927 let background_executor = cx.background_executor().clone();
4928
4929 cx.background_spawn(async move {
4930 let backend = match state.await {
4931 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4932 Ok(RepositoryState::Remote(_)) => {
4933 log::error!("commit_data_reader not supported for remote repositories");
4934 return;
4935 }
4936 Err(error) => {
4937 log::error!("failed to get repository state: {error}");
4938 return;
4939 }
4940 };
4941
4942 let reader = match backend.commit_data_reader() {
4943 Ok(reader) => reader,
4944 Err(error) => {
4945 log::error!("failed to create commit data reader: {error:?}");
4946 return;
4947 }
4948 };
4949
4950 loop {
4951 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4952
4953 futures::select_biased! {
4954 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4955 let Ok(sha) = sha else {
4956 break;
4957 };
4958
4959 match reader.read(sha).await {
4960 Ok(commit_data) => {
4961 if result_tx.send((sha, commit_data)).await.is_err() {
4962 break;
4963 }
4964 }
4965 Err(error) => {
4966 log::error!("failed to read commit data for {sha}: {error:?}");
4967 }
4968 }
4969 }
4970 _ = futures::FutureExt::fuse(timeout) => {
4971 break;
4972 }
4973 }
4974 }
4975
4976 drop(result_tx);
4977 })
4978 .detach();
4979
4980 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4981 _task: foreground_task,
4982 commit_data_request: request_tx_for_handler,
4983 });
4984 }
4985
4986 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4987 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4988 }
4989
4990 fn save_buffers<'a>(
4991 &self,
4992 entries: impl IntoIterator<Item = &'a RepoPath>,
4993 cx: &mut Context<Self>,
4994 ) -> Vec<Task<anyhow::Result<()>>> {
4995 let mut save_futures = Vec::new();
4996 if let Some(buffer_store) = self.buffer_store(cx) {
4997 buffer_store.update(cx, |buffer_store, cx| {
4998 for path in entries {
4999 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5000 continue;
5001 };
5002 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5003 && buffer
5004 .read(cx)
5005 .file()
5006 .is_some_and(|file| file.disk_state().exists())
5007 && buffer.read(cx).has_unsaved_edits()
5008 {
5009 save_futures.push(buffer_store.save_buffer(buffer, cx));
5010 }
5011 }
5012 })
5013 }
5014 save_futures
5015 }
5016
5017 pub fn stage_entries(
5018 &mut self,
5019 entries: Vec<RepoPath>,
5020 cx: &mut Context<Self>,
5021 ) -> Task<anyhow::Result<()>> {
5022 self.stage_or_unstage_entries(true, entries, cx)
5023 }
5024
5025 pub fn unstage_entries(
5026 &mut self,
5027 entries: Vec<RepoPath>,
5028 cx: &mut Context<Self>,
5029 ) -> Task<anyhow::Result<()>> {
5030 self.stage_or_unstage_entries(false, entries, cx)
5031 }
5032
5033 fn stage_or_unstage_entries(
5034 &mut self,
5035 stage: bool,
5036 entries: Vec<RepoPath>,
5037 cx: &mut Context<Self>,
5038 ) -> Task<anyhow::Result<()>> {
5039 if entries.is_empty() {
5040 return Task::ready(Ok(()));
5041 }
5042 let Some(git_store) = self.git_store.upgrade() else {
5043 return Task::ready(Ok(()));
5044 };
5045 let id = self.id;
5046 let save_tasks = self.save_buffers(&entries, cx);
5047 let paths = entries
5048 .iter()
5049 .map(|p| p.as_unix_str())
5050 .collect::<Vec<_>>()
5051 .join(" ");
5052 let status = if stage {
5053 format!("git add {paths}")
5054 } else {
5055 format!("git reset {paths}")
5056 };
5057 let job_key = GitJobKey::WriteIndex(entries.clone());
5058
5059 self.spawn_job_with_tracking(
5060 entries.clone(),
5061 if stage {
5062 pending_op::GitStatus::Staged
5063 } else {
5064 pending_op::GitStatus::Unstaged
5065 },
5066 cx,
5067 async move |this, cx| {
5068 for save_task in save_tasks {
5069 save_task.await?;
5070 }
5071
5072 this.update(cx, |this, cx| {
5073 let weak_this = cx.weak_entity();
5074 this.send_keyed_job(
5075 Some(job_key),
5076 Some(status.into()),
5077 move |git_repo, mut cx| async move {
5078 let hunk_staging_operation_counts = weak_this
5079 .update(&mut cx, |this, cx| {
5080 let mut hunk_staging_operation_counts = HashMap::default();
5081 for path in &entries {
5082 let Some(project_path) =
5083 this.repo_path_to_project_path(path, cx)
5084 else {
5085 continue;
5086 };
5087 let Some(buffer) = git_store
5088 .read(cx)
5089 .buffer_store
5090 .read(cx)
5091 .get_by_path(&project_path)
5092 else {
5093 continue;
5094 };
5095 let Some(diff_state) = git_store
5096 .read(cx)
5097 .diffs
5098 .get(&buffer.read(cx).remote_id())
5099 .cloned()
5100 else {
5101 continue;
5102 };
5103 let Some(uncommitted_diff) =
5104 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5105 |uncommitted_diff| uncommitted_diff.upgrade(),
5106 )
5107 else {
5108 continue;
5109 };
5110 let buffer_snapshot = buffer.read(cx).text_snapshot();
5111 let file_exists = buffer
5112 .read(cx)
5113 .file()
5114 .is_some_and(|file| file.disk_state().exists());
5115 let hunk_staging_operation_count =
5116 diff_state.update(cx, |diff_state, cx| {
5117 uncommitted_diff.update(
5118 cx,
5119 |uncommitted_diff, cx| {
5120 uncommitted_diff
5121 .stage_or_unstage_all_hunks(
5122 stage,
5123 &buffer_snapshot,
5124 file_exists,
5125 cx,
5126 );
5127 },
5128 );
5129
5130 diff_state.hunk_staging_operation_count += 1;
5131 diff_state.hunk_staging_operation_count
5132 });
5133 hunk_staging_operation_counts.insert(
5134 diff_state.downgrade(),
5135 hunk_staging_operation_count,
5136 );
5137 }
5138 hunk_staging_operation_counts
5139 })
5140 .unwrap_or_default();
5141
5142 let result = match git_repo {
5143 RepositoryState::Local(LocalRepositoryState {
5144 backend,
5145 environment,
5146 ..
5147 }) => {
5148 if stage {
5149 backend.stage_paths(entries, environment.clone()).await
5150 } else {
5151 backend.unstage_paths(entries, environment.clone()).await
5152 }
5153 }
5154 RepositoryState::Remote(RemoteRepositoryState {
5155 project_id,
5156 client,
5157 }) => {
5158 if stage {
5159 client
5160 .request(proto::Stage {
5161 project_id: project_id.0,
5162 repository_id: id.to_proto(),
5163 paths: entries
5164 .into_iter()
5165 .map(|repo_path| repo_path.to_proto())
5166 .collect(),
5167 })
5168 .await
5169 .context("sending stage request")
5170 .map(|_| ())
5171 } else {
5172 client
5173 .request(proto::Unstage {
5174 project_id: project_id.0,
5175 repository_id: id.to_proto(),
5176 paths: entries
5177 .into_iter()
5178 .map(|repo_path| repo_path.to_proto())
5179 .collect(),
5180 })
5181 .await
5182 .context("sending unstage request")
5183 .map(|_| ())
5184 }
5185 }
5186 };
5187
5188 for (diff_state, hunk_staging_operation_count) in
5189 hunk_staging_operation_counts
5190 {
5191 diff_state
5192 .update(&mut cx, |diff_state, cx| {
5193 if result.is_ok() {
5194 diff_state.hunk_staging_operation_count_as_of_write =
5195 hunk_staging_operation_count;
5196 } else if let Some(uncommitted_diff) =
5197 &diff_state.uncommitted_diff
5198 {
5199 uncommitted_diff
5200 .update(cx, |uncommitted_diff, cx| {
5201 uncommitted_diff.clear_pending_hunks(cx);
5202 })
5203 .ok();
5204 }
5205 })
5206 .ok();
5207 }
5208
5209 result
5210 },
5211 )
5212 })?
5213 .await?
5214 },
5215 )
5216 }
5217
5218 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5219 let snapshot = self.snapshot.clone();
5220 let pending_ops = self.pending_ops.clone();
5221 let to_stage = cx.background_spawn(async move {
5222 snapshot
5223 .status()
5224 .filter_map(|entry| {
5225 if let Some(ops) =
5226 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5227 {
5228 if ops.staging() || ops.staged() {
5229 None
5230 } else {
5231 Some(entry.repo_path)
5232 }
5233 } else if entry.status.staging().is_fully_staged() {
5234 None
5235 } else {
5236 Some(entry.repo_path)
5237 }
5238 })
5239 .collect()
5240 });
5241
5242 cx.spawn(async move |this, cx| {
5243 let to_stage = to_stage.await;
5244 this.update(cx, |this, cx| {
5245 this.stage_or_unstage_entries(true, to_stage, cx)
5246 })?
5247 .await
5248 })
5249 }
5250
5251 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5252 let snapshot = self.snapshot.clone();
5253 let pending_ops = self.pending_ops.clone();
5254 let to_unstage = cx.background_spawn(async move {
5255 snapshot
5256 .status()
5257 .filter_map(|entry| {
5258 if let Some(ops) =
5259 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5260 {
5261 if !ops.staging() && !ops.staged() {
5262 None
5263 } else {
5264 Some(entry.repo_path)
5265 }
5266 } else if entry.status.staging().is_fully_unstaged() {
5267 None
5268 } else {
5269 Some(entry.repo_path)
5270 }
5271 })
5272 .collect()
5273 });
5274
5275 cx.spawn(async move |this, cx| {
5276 let to_unstage = to_unstage.await;
5277 this.update(cx, |this, cx| {
5278 this.stage_or_unstage_entries(false, to_unstage, cx)
5279 })?
5280 .await
5281 })
5282 }
5283
5284 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5285 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5286
5287 self.stash_entries(to_stash, cx)
5288 }
5289
5290 pub fn stash_entries(
5291 &mut self,
5292 entries: Vec<RepoPath>,
5293 cx: &mut Context<Self>,
5294 ) -> Task<anyhow::Result<()>> {
5295 let id = self.id;
5296
5297 cx.spawn(async move |this, cx| {
5298 this.update(cx, |this, _| {
5299 this.send_job(None, move |git_repo, _cx| async move {
5300 match git_repo {
5301 RepositoryState::Local(LocalRepositoryState {
5302 backend,
5303 environment,
5304 ..
5305 }) => backend.stash_paths(entries, environment).await,
5306 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5307 client
5308 .request(proto::Stash {
5309 project_id: project_id.0,
5310 repository_id: id.to_proto(),
5311 paths: entries
5312 .into_iter()
5313 .map(|repo_path| repo_path.to_proto())
5314 .collect(),
5315 })
5316 .await?;
5317 Ok(())
5318 }
5319 }
5320 })
5321 })?
5322 .await??;
5323 Ok(())
5324 })
5325 }
5326
5327 pub fn stash_pop(
5328 &mut self,
5329 index: Option<usize>,
5330 cx: &mut Context<Self>,
5331 ) -> Task<anyhow::Result<()>> {
5332 let id = self.id;
5333 cx.spawn(async move |this, cx| {
5334 this.update(cx, |this, _| {
5335 this.send_job(None, move |git_repo, _cx| async move {
5336 match git_repo {
5337 RepositoryState::Local(LocalRepositoryState {
5338 backend,
5339 environment,
5340 ..
5341 }) => backend.stash_pop(index, environment).await,
5342 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5343 client
5344 .request(proto::StashPop {
5345 project_id: project_id.0,
5346 repository_id: id.to_proto(),
5347 stash_index: index.map(|i| i as u64),
5348 })
5349 .await
5350 .context("sending stash pop request")?;
5351 Ok(())
5352 }
5353 }
5354 })
5355 })?
5356 .await??;
5357 Ok(())
5358 })
5359 }
5360
5361 pub fn stash_apply(
5362 &mut self,
5363 index: Option<usize>,
5364 cx: &mut Context<Self>,
5365 ) -> Task<anyhow::Result<()>> {
5366 let id = self.id;
5367 cx.spawn(async move |this, cx| {
5368 this.update(cx, |this, _| {
5369 this.send_job(None, move |git_repo, _cx| async move {
5370 match git_repo {
5371 RepositoryState::Local(LocalRepositoryState {
5372 backend,
5373 environment,
5374 ..
5375 }) => backend.stash_apply(index, environment).await,
5376 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5377 client
5378 .request(proto::StashApply {
5379 project_id: project_id.0,
5380 repository_id: id.to_proto(),
5381 stash_index: index.map(|i| i as u64),
5382 })
5383 .await
5384 .context("sending stash apply request")?;
5385 Ok(())
5386 }
5387 }
5388 })
5389 })?
5390 .await??;
5391 Ok(())
5392 })
5393 }
5394
5395 pub fn stash_drop(
5396 &mut self,
5397 index: Option<usize>,
5398 cx: &mut Context<Self>,
5399 ) -> oneshot::Receiver<anyhow::Result<()>> {
5400 let id = self.id;
5401 let updates_tx = self
5402 .git_store()
5403 .and_then(|git_store| match &git_store.read(cx).state {
5404 GitStoreState::Local { downstream, .. } => downstream
5405 .as_ref()
5406 .map(|downstream| downstream.updates_tx.clone()),
5407 _ => None,
5408 });
5409 let this = cx.weak_entity();
5410 self.send_job(None, move |git_repo, mut cx| async move {
5411 match git_repo {
5412 RepositoryState::Local(LocalRepositoryState {
5413 backend,
5414 environment,
5415 ..
5416 }) => {
5417 // TODO would be nice to not have to do this manually
5418 let result = backend.stash_drop(index, environment).await;
5419 if result.is_ok()
5420 && let Ok(stash_entries) = backend.stash_entries().await
5421 {
5422 let snapshot = this.update(&mut cx, |this, cx| {
5423 this.snapshot.stash_entries = stash_entries;
5424 cx.emit(RepositoryEvent::StashEntriesChanged);
5425 this.snapshot.clone()
5426 })?;
5427 if let Some(updates_tx) = updates_tx {
5428 updates_tx
5429 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5430 .ok();
5431 }
5432 }
5433
5434 result
5435 }
5436 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5437 client
5438 .request(proto::StashDrop {
5439 project_id: project_id.0,
5440 repository_id: id.to_proto(),
5441 stash_index: index.map(|i| i as u64),
5442 })
5443 .await
5444 .context("sending stash pop request")?;
5445 Ok(())
5446 }
5447 }
5448 })
5449 }
5450
5451 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5452 let id = self.id;
5453 self.send_job(
5454 Some(format!("git hook {}", hook.as_str()).into()),
5455 move |git_repo, _cx| async move {
5456 match git_repo {
5457 RepositoryState::Local(LocalRepositoryState {
5458 backend,
5459 environment,
5460 ..
5461 }) => backend.run_hook(hook, environment.clone()).await,
5462 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5463 client
5464 .request(proto::RunGitHook {
5465 project_id: project_id.0,
5466 repository_id: id.to_proto(),
5467 hook: hook.to_proto(),
5468 })
5469 .await?;
5470
5471 Ok(())
5472 }
5473 }
5474 },
5475 )
5476 }
5477
5478 pub fn commit(
5479 &mut self,
5480 message: SharedString,
5481 name_and_email: Option<(SharedString, SharedString)>,
5482 options: CommitOptions,
5483 askpass: AskPassDelegate,
5484 cx: &mut App,
5485 ) -> oneshot::Receiver<Result<()>> {
5486 let id = self.id;
5487 let askpass_delegates = self.askpass_delegates.clone();
5488 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5489
5490 let rx = self.run_hook(RunHook::PreCommit, cx);
5491
5492 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5493 rx.await??;
5494
5495 match git_repo {
5496 RepositoryState::Local(LocalRepositoryState {
5497 backend,
5498 environment,
5499 ..
5500 }) => {
5501 backend
5502 .commit(message, name_and_email, options, askpass, environment)
5503 .await
5504 }
5505 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5506 askpass_delegates.lock().insert(askpass_id, askpass);
5507 let _defer = util::defer(|| {
5508 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5509 debug_assert!(askpass_delegate.is_some());
5510 });
5511 let (name, email) = name_and_email.unzip();
5512 client
5513 .request(proto::Commit {
5514 project_id: project_id.0,
5515 repository_id: id.to_proto(),
5516 message: String::from(message),
5517 name: name.map(String::from),
5518 email: email.map(String::from),
5519 options: Some(proto::commit::CommitOptions {
5520 amend: options.amend,
5521 signoff: options.signoff,
5522 allow_empty: options.allow_empty,
5523 }),
5524 askpass_id,
5525 })
5526 .await?;
5527
5528 Ok(())
5529 }
5530 }
5531 })
5532 }
5533
5534 pub fn fetch(
5535 &mut self,
5536 fetch_options: FetchOptions,
5537 askpass: AskPassDelegate,
5538 _cx: &mut App,
5539 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5540 let askpass_delegates = self.askpass_delegates.clone();
5541 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5542 let id = self.id;
5543
5544 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5545 match git_repo {
5546 RepositoryState::Local(LocalRepositoryState {
5547 backend,
5548 environment,
5549 ..
5550 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5551 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5552 askpass_delegates.lock().insert(askpass_id, askpass);
5553 let _defer = util::defer(|| {
5554 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5555 debug_assert!(askpass_delegate.is_some());
5556 });
5557
5558 let response = client
5559 .request(proto::Fetch {
5560 project_id: project_id.0,
5561 repository_id: id.to_proto(),
5562 askpass_id,
5563 remote: fetch_options.to_proto(),
5564 })
5565 .await?;
5566
5567 Ok(RemoteCommandOutput {
5568 stdout: response.stdout,
5569 stderr: response.stderr,
5570 })
5571 }
5572 }
5573 })
5574 }
5575
5576 pub fn push(
5577 &mut self,
5578 branch: SharedString,
5579 remote_branch: SharedString,
5580 remote: SharedString,
5581 options: Option<PushOptions>,
5582 askpass: AskPassDelegate,
5583 cx: &mut Context<Self>,
5584 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5585 let askpass_delegates = self.askpass_delegates.clone();
5586 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5587 let id = self.id;
5588
5589 let args = options
5590 .map(|option| match option {
5591 PushOptions::SetUpstream => " --set-upstream",
5592 PushOptions::Force => " --force-with-lease",
5593 })
5594 .unwrap_or("");
5595
5596 let updates_tx = self
5597 .git_store()
5598 .and_then(|git_store| match &git_store.read(cx).state {
5599 GitStoreState::Local { downstream, .. } => downstream
5600 .as_ref()
5601 .map(|downstream| downstream.updates_tx.clone()),
5602 _ => None,
5603 });
5604
5605 let this = cx.weak_entity();
5606 self.send_job(
5607 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5608 move |git_repo, mut cx| async move {
5609 match git_repo {
5610 RepositoryState::Local(LocalRepositoryState {
5611 backend,
5612 environment,
5613 ..
5614 }) => {
5615 let result = backend
5616 .push(
5617 branch.to_string(),
5618 remote_branch.to_string(),
5619 remote.to_string(),
5620 options,
5621 askpass,
5622 environment.clone(),
5623 cx.clone(),
5624 )
5625 .await;
5626 // TODO would be nice to not have to do this manually
5627 if result.is_ok() {
5628 let branches = backend.branches().await?;
5629 let branch = branches.into_iter().find(|branch| branch.is_head);
5630 log::info!("head branch after scan is {branch:?}");
5631 let snapshot = this.update(&mut cx, |this, cx| {
5632 this.snapshot.branch = branch;
5633 cx.emit(RepositoryEvent::HeadChanged);
5634 this.snapshot.clone()
5635 })?;
5636 if let Some(updates_tx) = updates_tx {
5637 updates_tx
5638 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5639 .ok();
5640 }
5641 }
5642 result
5643 }
5644 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5645 askpass_delegates.lock().insert(askpass_id, askpass);
5646 let _defer = util::defer(|| {
5647 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5648 debug_assert!(askpass_delegate.is_some());
5649 });
5650 let response = client
5651 .request(proto::Push {
5652 project_id: project_id.0,
5653 repository_id: id.to_proto(),
5654 askpass_id,
5655 branch_name: branch.to_string(),
5656 remote_branch_name: remote_branch.to_string(),
5657 remote_name: remote.to_string(),
5658 options: options.map(|options| match options {
5659 PushOptions::Force => proto::push::PushOptions::Force,
5660 PushOptions::SetUpstream => {
5661 proto::push::PushOptions::SetUpstream
5662 }
5663 }
5664 as i32),
5665 })
5666 .await?;
5667
5668 Ok(RemoteCommandOutput {
5669 stdout: response.stdout,
5670 stderr: response.stderr,
5671 })
5672 }
5673 }
5674 },
5675 )
5676 }
5677
5678 pub fn pull(
5679 &mut self,
5680 branch: Option<SharedString>,
5681 remote: SharedString,
5682 rebase: bool,
5683 askpass: AskPassDelegate,
5684 _cx: &mut App,
5685 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5686 let askpass_delegates = self.askpass_delegates.clone();
5687 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5688 let id = self.id;
5689
5690 let mut status = "git pull".to_string();
5691 if rebase {
5692 status.push_str(" --rebase");
5693 }
5694 status.push_str(&format!(" {}", remote));
5695 if let Some(b) = &branch {
5696 status.push_str(&format!(" {}", b));
5697 }
5698
5699 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5700 match git_repo {
5701 RepositoryState::Local(LocalRepositoryState {
5702 backend,
5703 environment,
5704 ..
5705 }) => {
5706 backend
5707 .pull(
5708 branch.as_ref().map(|b| b.to_string()),
5709 remote.to_string(),
5710 rebase,
5711 askpass,
5712 environment.clone(),
5713 cx,
5714 )
5715 .await
5716 }
5717 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5718 askpass_delegates.lock().insert(askpass_id, askpass);
5719 let _defer = util::defer(|| {
5720 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5721 debug_assert!(askpass_delegate.is_some());
5722 });
5723 let response = client
5724 .request(proto::Pull {
5725 project_id: project_id.0,
5726 repository_id: id.to_proto(),
5727 askpass_id,
5728 rebase,
5729 branch_name: branch.as_ref().map(|b| b.to_string()),
5730 remote_name: remote.to_string(),
5731 })
5732 .await?;
5733
5734 Ok(RemoteCommandOutput {
5735 stdout: response.stdout,
5736 stderr: response.stderr,
5737 })
5738 }
5739 }
5740 })
5741 }
5742
5743 fn spawn_set_index_text_job(
5744 &mut self,
5745 path: RepoPath,
5746 content: Option<String>,
5747 hunk_staging_operation_count: Option<usize>,
5748 cx: &mut Context<Self>,
5749 ) -> oneshot::Receiver<anyhow::Result<()>> {
5750 let id = self.id;
5751 let this = cx.weak_entity();
5752 let git_store = self.git_store.clone();
5753 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5754 self.send_keyed_job(
5755 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5756 None,
5757 move |git_repo, mut cx| async move {
5758 log::debug!(
5759 "start updating index text for buffer {}",
5760 path.as_unix_str()
5761 );
5762
5763 match git_repo {
5764 RepositoryState::Local(LocalRepositoryState {
5765 fs,
5766 backend,
5767 environment,
5768 ..
5769 }) => {
5770 let executable = match fs.metadata(&abs_path).await {
5771 Ok(Some(meta)) => meta.is_executable,
5772 Ok(None) => false,
5773 Err(_err) => false,
5774 };
5775 backend
5776 .set_index_text(path.clone(), content, environment.clone(), executable)
5777 .await?;
5778 }
5779 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5780 client
5781 .request(proto::SetIndexText {
5782 project_id: project_id.0,
5783 repository_id: id.to_proto(),
5784 path: path.to_proto(),
5785 text: content,
5786 })
5787 .await?;
5788 }
5789 }
5790 log::debug!(
5791 "finish updating index text for buffer {}",
5792 path.as_unix_str()
5793 );
5794
5795 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5796 let project_path = this
5797 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5798 .ok()
5799 .flatten();
5800 git_store
5801 .update(&mut cx, |git_store, cx| {
5802 let buffer_id = git_store
5803 .buffer_store
5804 .read(cx)
5805 .get_by_path(&project_path?)?
5806 .read(cx)
5807 .remote_id();
5808 let diff_state = git_store.diffs.get(&buffer_id)?;
5809 diff_state.update(cx, |diff_state, _| {
5810 diff_state.hunk_staging_operation_count_as_of_write =
5811 hunk_staging_operation_count;
5812 });
5813 Some(())
5814 })
5815 .context("Git store dropped")?;
5816 }
5817 Ok(())
5818 },
5819 )
5820 }
5821
5822 pub fn create_remote(
5823 &mut self,
5824 remote_name: String,
5825 remote_url: String,
5826 ) -> oneshot::Receiver<Result<()>> {
5827 let id = self.id;
5828 self.send_job(
5829 Some(format!("git remote add {remote_name} {remote_url}").into()),
5830 move |repo, _cx| async move {
5831 match repo {
5832 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5833 backend.create_remote(remote_name, remote_url).await
5834 }
5835 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5836 client
5837 .request(proto::GitCreateRemote {
5838 project_id: project_id.0,
5839 repository_id: id.to_proto(),
5840 remote_name,
5841 remote_url,
5842 })
5843 .await?;
5844
5845 Ok(())
5846 }
5847 }
5848 },
5849 )
5850 }
5851
5852 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5853 let id = self.id;
5854 self.send_job(
5855 Some(format!("git remove remote {remote_name}").into()),
5856 move |repo, _cx| async move {
5857 match repo {
5858 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5859 backend.remove_remote(remote_name).await
5860 }
5861 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5862 client
5863 .request(proto::GitRemoveRemote {
5864 project_id: project_id.0,
5865 repository_id: id.to_proto(),
5866 remote_name,
5867 })
5868 .await?;
5869
5870 Ok(())
5871 }
5872 }
5873 },
5874 )
5875 }
5876
5877 pub fn get_remotes(
5878 &mut self,
5879 branch_name: Option<String>,
5880 is_push: bool,
5881 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5882 let id = self.id;
5883 self.send_job(None, move |repo, _cx| async move {
5884 match repo {
5885 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5886 let remote = if let Some(branch_name) = branch_name {
5887 if is_push {
5888 backend.get_push_remote(branch_name).await?
5889 } else {
5890 backend.get_branch_remote(branch_name).await?
5891 }
5892 } else {
5893 None
5894 };
5895
5896 match remote {
5897 Some(remote) => Ok(vec![remote]),
5898 None => backend.get_all_remotes().await,
5899 }
5900 }
5901 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5902 let response = client
5903 .request(proto::GetRemotes {
5904 project_id: project_id.0,
5905 repository_id: id.to_proto(),
5906 branch_name,
5907 is_push,
5908 })
5909 .await?;
5910
5911 let remotes = response
5912 .remotes
5913 .into_iter()
5914 .map(|remotes| Remote {
5915 name: remotes.name.into(),
5916 })
5917 .collect();
5918
5919 Ok(remotes)
5920 }
5921 }
5922 })
5923 }
5924
5925 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5926 let id = self.id;
5927 self.send_job(None, move |repo, _| async move {
5928 match repo {
5929 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5930 backend.branches().await
5931 }
5932 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5933 let response = client
5934 .request(proto::GitGetBranches {
5935 project_id: project_id.0,
5936 repository_id: id.to_proto(),
5937 })
5938 .await?;
5939
5940 let branches = response
5941 .branches
5942 .into_iter()
5943 .map(|branch| proto_to_branch(&branch))
5944 .collect();
5945
5946 Ok(branches)
5947 }
5948 }
5949 })
5950 }
5951
5952 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5953 /// returns the pathed for the linked worktree.
5954 ///
5955 /// Returns None if this is the main checkout.
5956 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5957 if self.work_directory_abs_path != self.original_repo_abs_path {
5958 Some(&self.work_directory_abs_path)
5959 } else {
5960 None
5961 }
5962 }
5963
5964 pub fn path_for_new_linked_worktree(
5965 &self,
5966 branch_name: &str,
5967 worktree_directory_setting: &str,
5968 ) -> Result<PathBuf> {
5969 let original_repo = self.original_repo_abs_path.clone();
5970 let project_name = original_repo
5971 .file_name()
5972 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5973 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5974 Ok(directory.join(branch_name).join(project_name))
5975 }
5976
5977 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5978 let id = self.id;
5979 self.send_job(None, move |repo, _| async move {
5980 match repo {
5981 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5982 backend.worktrees().await
5983 }
5984 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5985 let response = client
5986 .request(proto::GitGetWorktrees {
5987 project_id: project_id.0,
5988 repository_id: id.to_proto(),
5989 })
5990 .await?;
5991
5992 let worktrees = response
5993 .worktrees
5994 .into_iter()
5995 .map(|worktree| proto_to_worktree(&worktree))
5996 .collect();
5997
5998 Ok(worktrees)
5999 }
6000 }
6001 })
6002 }
6003
6004 fn create_worktree_with_start_point(
6005 &mut self,
6006 start_point: CreateWorktreeStartPoint,
6007 path: PathBuf,
6008 commit: Option<String>,
6009 ) -> oneshot::Receiver<Result<()>> {
6010 let id = self.id;
6011 let message = match &start_point {
6012 CreateWorktreeStartPoint::Detached => "git worktree add (detached)".into(),
6013 CreateWorktreeStartPoint::Branched { name } => {
6014 format!("git worktree add: {name}").into()
6015 }
6016 };
6017
6018 self.send_job(Some(message), move |repo, _cx| async move {
6019 let branch_name = match start_point {
6020 CreateWorktreeStartPoint::Detached => None,
6021 CreateWorktreeStartPoint::Branched { name } => Some(name),
6022 };
6023
6024 match repo {
6025 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6026 backend.create_worktree(branch_name, path, commit).await
6027 }
6028 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6029 client
6030 .request(proto::GitCreateWorktree {
6031 project_id: project_id.0,
6032 repository_id: id.to_proto(),
6033 name: branch_name,
6034 directory: path.to_string_lossy().to_string(),
6035 commit,
6036 })
6037 .await?;
6038
6039 Ok(())
6040 }
6041 }
6042 })
6043 }
6044
6045 pub fn create_worktree(
6046 &mut self,
6047 branch_name: String,
6048 path: PathBuf,
6049 commit: Option<String>,
6050 ) -> oneshot::Receiver<Result<()>> {
6051 self.create_worktree_with_start_point(
6052 CreateWorktreeStartPoint::Branched { name: branch_name },
6053 path,
6054 commit,
6055 )
6056 }
6057
6058 pub fn create_worktree_detached(
6059 &mut self,
6060 path: PathBuf,
6061 commit: String,
6062 ) -> oneshot::Receiver<Result<()>> {
6063 self.create_worktree_with_start_point(
6064 CreateWorktreeStartPoint::Detached,
6065 path,
6066 Some(commit),
6067 )
6068 }
6069
6070 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6071 let id = self.id;
6072 self.send_job(None, move |repo, _cx| async move {
6073 match repo {
6074 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6075 Ok(backend.head_sha().await)
6076 }
6077 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6078 let response = client
6079 .request(proto::GitGetHeadSha {
6080 project_id: project_id.0,
6081 repository_id: id.to_proto(),
6082 })
6083 .await?;
6084
6085 Ok(response.sha)
6086 }
6087 }
6088 })
6089 }
6090
6091 pub fn update_ref(
6092 &mut self,
6093 ref_name: String,
6094 commit: String,
6095 ) -> oneshot::Receiver<Result<()>> {
6096 self.send_job(None, move |repo, _cx| async move {
6097 match repo {
6098 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6099 backend.update_ref(ref_name, commit).await
6100 }
6101 RepositoryState::Remote(_) => {
6102 anyhow::bail!("update_ref is not supported for remote repositories")
6103 }
6104 }
6105 })
6106 }
6107
6108 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6109 self.send_job(None, move |repo, _cx| async move {
6110 match repo {
6111 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6112 backend.delete_ref(ref_name).await
6113 }
6114 RepositoryState::Remote(_) => {
6115 anyhow::bail!("delete_ref is not supported for remote repositories")
6116 }
6117 }
6118 })
6119 }
6120
6121 pub fn stage_all_including_untracked(&mut self) -> oneshot::Receiver<Result<()>> {
6122 self.send_job(None, move |repo, _cx| async move {
6123 match repo {
6124 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6125 backend.stage_all_including_untracked().await
6126 }
6127 RepositoryState::Remote(_) => {
6128 anyhow::bail!(
6129 "stage_all_including_untracked is not supported for remote repositories"
6130 )
6131 }
6132 }
6133 })
6134 }
6135
6136 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6137 let id = self.id;
6138 self.send_job(
6139 Some(format!("git worktree remove: {}", path.display()).into()),
6140 move |repo, _cx| async move {
6141 match repo {
6142 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6143 backend.remove_worktree(path, force).await
6144 }
6145 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6146 client
6147 .request(proto::GitRemoveWorktree {
6148 project_id: project_id.0,
6149 repository_id: id.to_proto(),
6150 path: path.to_string_lossy().to_string(),
6151 force,
6152 })
6153 .await?;
6154
6155 Ok(())
6156 }
6157 }
6158 },
6159 )
6160 }
6161
6162 pub fn rename_worktree(
6163 &mut self,
6164 old_path: PathBuf,
6165 new_path: PathBuf,
6166 ) -> oneshot::Receiver<Result<()>> {
6167 let id = self.id;
6168 self.send_job(
6169 Some(format!("git worktree move: {}", old_path.display()).into()),
6170 move |repo, _cx| async move {
6171 match repo {
6172 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6173 backend.rename_worktree(old_path, new_path).await
6174 }
6175 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6176 client
6177 .request(proto::GitRenameWorktree {
6178 project_id: project_id.0,
6179 repository_id: id.to_proto(),
6180 old_path: old_path.to_string_lossy().to_string(),
6181 new_path: new_path.to_string_lossy().to_string(),
6182 })
6183 .await?;
6184
6185 Ok(())
6186 }
6187 }
6188 },
6189 )
6190 }
6191
6192 pub fn default_branch(
6193 &mut self,
6194 include_remote_name: bool,
6195 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6196 let id = self.id;
6197 self.send_job(None, move |repo, _| async move {
6198 match repo {
6199 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6200 backend.default_branch(include_remote_name).await
6201 }
6202 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6203 let response = client
6204 .request(proto::GetDefaultBranch {
6205 project_id: project_id.0,
6206 repository_id: id.to_proto(),
6207 })
6208 .await?;
6209
6210 anyhow::Ok(response.branch.map(SharedString::from))
6211 }
6212 }
6213 })
6214 }
6215
6216 pub fn diff_tree(
6217 &mut self,
6218 diff_type: DiffTreeType,
6219 _cx: &App,
6220 ) -> oneshot::Receiver<Result<TreeDiff>> {
6221 let repository_id = self.snapshot.id;
6222 self.send_job(None, move |repo, _cx| async move {
6223 match repo {
6224 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6225 backend.diff_tree(diff_type).await
6226 }
6227 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6228 let response = client
6229 .request(proto::GetTreeDiff {
6230 project_id: project_id.0,
6231 repository_id: repository_id.0,
6232 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6233 base: diff_type.base().to_string(),
6234 head: diff_type.head().to_string(),
6235 })
6236 .await?;
6237
6238 let entries = response
6239 .entries
6240 .into_iter()
6241 .filter_map(|entry| {
6242 let status = match entry.status() {
6243 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6244 proto::tree_diff_status::Status::Modified => {
6245 TreeDiffStatus::Modified {
6246 old: git::Oid::from_str(
6247 &entry.oid.context("missing oid").log_err()?,
6248 )
6249 .log_err()?,
6250 }
6251 }
6252 proto::tree_diff_status::Status::Deleted => {
6253 TreeDiffStatus::Deleted {
6254 old: git::Oid::from_str(
6255 &entry.oid.context("missing oid").log_err()?,
6256 )
6257 .log_err()?,
6258 }
6259 }
6260 };
6261 Some((
6262 RepoPath::from_rel_path(
6263 &RelPath::from_proto(&entry.path).log_err()?,
6264 ),
6265 status,
6266 ))
6267 })
6268 .collect();
6269
6270 Ok(TreeDiff { entries })
6271 }
6272 }
6273 })
6274 }
6275
6276 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6277 let id = self.id;
6278 self.send_job(None, move |repo, _cx| async move {
6279 match repo {
6280 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6281 backend.diff(diff_type).await
6282 }
6283 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6284 let (proto_diff_type, merge_base_ref) = match &diff_type {
6285 DiffType::HeadToIndex => {
6286 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6287 }
6288 DiffType::HeadToWorktree => {
6289 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6290 }
6291 DiffType::MergeBase { base_ref } => (
6292 proto::git_diff::DiffType::MergeBase.into(),
6293 Some(base_ref.to_string()),
6294 ),
6295 };
6296 let response = client
6297 .request(proto::GitDiff {
6298 project_id: project_id.0,
6299 repository_id: id.to_proto(),
6300 diff_type: proto_diff_type,
6301 merge_base_ref,
6302 })
6303 .await?;
6304
6305 Ok(response.diff)
6306 }
6307 }
6308 })
6309 }
6310
6311 pub fn create_branch(
6312 &mut self,
6313 branch_name: String,
6314 base_branch: Option<String>,
6315 ) -> oneshot::Receiver<Result<()>> {
6316 let id = self.id;
6317 let status_msg = if let Some(ref base) = base_branch {
6318 format!("git switch -c {branch_name} {base}").into()
6319 } else {
6320 format!("git switch -c {branch_name}").into()
6321 };
6322 self.send_job(Some(status_msg), move |repo, _cx| async move {
6323 match repo {
6324 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6325 backend.create_branch(branch_name, base_branch).await
6326 }
6327 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6328 client
6329 .request(proto::GitCreateBranch {
6330 project_id: project_id.0,
6331 repository_id: id.to_proto(),
6332 branch_name,
6333 })
6334 .await?;
6335
6336 Ok(())
6337 }
6338 }
6339 })
6340 }
6341
6342 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6343 let id = self.id;
6344 self.send_job(
6345 Some(format!("git switch {branch_name}").into()),
6346 move |repo, _cx| async move {
6347 match repo {
6348 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6349 backend.change_branch(branch_name).await
6350 }
6351 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6352 client
6353 .request(proto::GitChangeBranch {
6354 project_id: project_id.0,
6355 repository_id: id.to_proto(),
6356 branch_name,
6357 })
6358 .await?;
6359
6360 Ok(())
6361 }
6362 }
6363 },
6364 )
6365 }
6366
6367 pub fn delete_branch(
6368 &mut self,
6369 is_remote: bool,
6370 branch_name: String,
6371 ) -> oneshot::Receiver<Result<()>> {
6372 let id = self.id;
6373 self.send_job(
6374 Some(
6375 format!(
6376 "git branch {} {}",
6377 if is_remote { "-dr" } else { "-d" },
6378 branch_name
6379 )
6380 .into(),
6381 ),
6382 move |repo, _cx| async move {
6383 match repo {
6384 RepositoryState::Local(state) => {
6385 state.backend.delete_branch(is_remote, branch_name).await
6386 }
6387 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6388 client
6389 .request(proto::GitDeleteBranch {
6390 project_id: project_id.0,
6391 repository_id: id.to_proto(),
6392 is_remote,
6393 branch_name,
6394 })
6395 .await?;
6396
6397 Ok(())
6398 }
6399 }
6400 },
6401 )
6402 }
6403
6404 pub fn rename_branch(
6405 &mut self,
6406 branch: String,
6407 new_name: String,
6408 ) -> oneshot::Receiver<Result<()>> {
6409 let id = self.id;
6410 self.send_job(
6411 Some(format!("git branch -m {branch} {new_name}").into()),
6412 move |repo, _cx| async move {
6413 match repo {
6414 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6415 backend.rename_branch(branch, new_name).await
6416 }
6417 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6418 client
6419 .request(proto::GitRenameBranch {
6420 project_id: project_id.0,
6421 repository_id: id.to_proto(),
6422 branch,
6423 new_name,
6424 })
6425 .await?;
6426
6427 Ok(())
6428 }
6429 }
6430 },
6431 )
6432 }
6433
6434 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6435 let id = self.id;
6436 self.send_job(None, move |repo, _cx| async move {
6437 match repo {
6438 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6439 backend.check_for_pushed_commit().await
6440 }
6441 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6442 let response = client
6443 .request(proto::CheckForPushedCommits {
6444 project_id: project_id.0,
6445 repository_id: id.to_proto(),
6446 })
6447 .await?;
6448
6449 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6450
6451 Ok(branches)
6452 }
6453 }
6454 })
6455 }
6456
6457 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6458 let id = self.id;
6459 self.send_job(None, move |repo, _cx| async move {
6460 match repo {
6461 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6462 backend.checkpoint().await
6463 }
6464 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6465 let response = client
6466 .request(proto::GitCreateCheckpoint {
6467 project_id: project_id.0,
6468 repository_id: id.to_proto(),
6469 })
6470 .await?;
6471
6472 Ok(GitRepositoryCheckpoint {
6473 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6474 })
6475 }
6476 }
6477 })
6478 }
6479
6480 pub fn restore_checkpoint(
6481 &mut self,
6482 checkpoint: GitRepositoryCheckpoint,
6483 ) -> oneshot::Receiver<Result<()>> {
6484 let id = self.id;
6485 self.send_job(None, move |repo, _cx| async move {
6486 match repo {
6487 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6488 backend.restore_checkpoint(checkpoint).await
6489 }
6490 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6491 client
6492 .request(proto::GitRestoreCheckpoint {
6493 project_id: project_id.0,
6494 repository_id: id.to_proto(),
6495 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6496 })
6497 .await?;
6498 Ok(())
6499 }
6500 }
6501 })
6502 }
6503
6504 pub(crate) fn apply_remote_update(
6505 &mut self,
6506 update: proto::UpdateRepository,
6507 cx: &mut Context<Self>,
6508 ) -> Result<()> {
6509 if let Some(main_path) = &update.original_repo_abs_path {
6510 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6511 }
6512
6513 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6514 let new_head_commit = update
6515 .head_commit_details
6516 .as_ref()
6517 .map(proto_to_commit_details);
6518 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6519 cx.emit(RepositoryEvent::HeadChanged)
6520 }
6521 self.snapshot.branch = new_branch;
6522 self.snapshot.head_commit = new_head_commit;
6523
6524 // We don't store any merge head state for downstream projects; the upstream
6525 // will track it and we will just get the updated conflicts
6526 let new_merge_heads = TreeMap::from_ordered_entries(
6527 update
6528 .current_merge_conflicts
6529 .into_iter()
6530 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6531 );
6532 let conflicts_changed =
6533 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6534 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6535 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6536 let new_stash_entries = GitStash {
6537 entries: update
6538 .stash_entries
6539 .iter()
6540 .filter_map(|entry| proto_to_stash(entry).ok())
6541 .collect(),
6542 };
6543 if self.snapshot.stash_entries != new_stash_entries {
6544 cx.emit(RepositoryEvent::StashEntriesChanged)
6545 }
6546 self.snapshot.stash_entries = new_stash_entries;
6547 let new_linked_worktrees: Arc<[GitWorktree]> = update
6548 .linked_worktrees
6549 .iter()
6550 .map(proto_to_worktree)
6551 .collect();
6552 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6553 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6554 }
6555 self.snapshot.linked_worktrees = new_linked_worktrees;
6556 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6557 self.snapshot.remote_origin_url = update.remote_origin_url;
6558
6559 let edits = update
6560 .removed_statuses
6561 .into_iter()
6562 .filter_map(|path| {
6563 Some(sum_tree::Edit::Remove(PathKey(
6564 RelPath::from_proto(&path).log_err()?,
6565 )))
6566 })
6567 .chain(
6568 update
6569 .updated_statuses
6570 .into_iter()
6571 .filter_map(|updated_status| {
6572 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6573 }),
6574 )
6575 .collect::<Vec<_>>();
6576 if conflicts_changed || !edits.is_empty() {
6577 cx.emit(RepositoryEvent::StatusesChanged);
6578 }
6579 self.snapshot.statuses_by_path.edit(edits, ());
6580
6581 if update.is_last_update {
6582 self.snapshot.scan_id = update.scan_id;
6583 }
6584 self.clear_pending_ops(cx);
6585 Ok(())
6586 }
6587
6588 pub fn compare_checkpoints(
6589 &mut self,
6590 left: GitRepositoryCheckpoint,
6591 right: GitRepositoryCheckpoint,
6592 ) -> oneshot::Receiver<Result<bool>> {
6593 let id = self.id;
6594 self.send_job(None, move |repo, _cx| async move {
6595 match repo {
6596 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6597 backend.compare_checkpoints(left, right).await
6598 }
6599 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6600 let response = client
6601 .request(proto::GitCompareCheckpoints {
6602 project_id: project_id.0,
6603 repository_id: id.to_proto(),
6604 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6605 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6606 })
6607 .await?;
6608 Ok(response.equal)
6609 }
6610 }
6611 })
6612 }
6613
6614 pub fn diff_checkpoints(
6615 &mut self,
6616 base_checkpoint: GitRepositoryCheckpoint,
6617 target_checkpoint: GitRepositoryCheckpoint,
6618 ) -> oneshot::Receiver<Result<String>> {
6619 let id = self.id;
6620 self.send_job(None, move |repo, _cx| async move {
6621 match repo {
6622 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6623 backend
6624 .diff_checkpoints(base_checkpoint, target_checkpoint)
6625 .await
6626 }
6627 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6628 let response = client
6629 .request(proto::GitDiffCheckpoints {
6630 project_id: project_id.0,
6631 repository_id: id.to_proto(),
6632 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6633 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6634 })
6635 .await?;
6636 Ok(response.diff)
6637 }
6638 }
6639 })
6640 }
6641
6642 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6643 let updated = SumTree::from_iter(
6644 self.pending_ops.iter().filter_map(|ops| {
6645 let inner_ops: Vec<PendingOp> =
6646 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6647 if inner_ops.is_empty() {
6648 None
6649 } else {
6650 Some(PendingOps {
6651 repo_path: ops.repo_path.clone(),
6652 ops: inner_ops,
6653 })
6654 }
6655 }),
6656 (),
6657 );
6658
6659 if updated != self.pending_ops {
6660 cx.emit(RepositoryEvent::PendingOpsChanged {
6661 pending_ops: self.pending_ops.clone(),
6662 })
6663 }
6664
6665 self.pending_ops = updated;
6666 }
6667
6668 fn schedule_scan(
6669 &mut self,
6670 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6671 cx: &mut Context<Self>,
6672 ) {
6673 let this = cx.weak_entity();
6674 let _ = self.send_keyed_job(
6675 Some(GitJobKey::ReloadGitState),
6676 None,
6677 |state, mut cx| async move {
6678 log::debug!("run scheduled git status scan");
6679
6680 let Some(this) = this.upgrade() else {
6681 return Ok(());
6682 };
6683 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6684 bail!("not a local repository")
6685 };
6686 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6687 this.update(&mut cx, |this, cx| {
6688 this.clear_pending_ops(cx);
6689 });
6690 if let Some(updates_tx) = updates_tx {
6691 updates_tx
6692 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6693 .ok();
6694 }
6695 Ok(())
6696 },
6697 );
6698 }
6699
6700 fn spawn_local_git_worker(
6701 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6702 cx: &mut Context<Self>,
6703 ) -> mpsc::UnboundedSender<GitJob> {
6704 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6705
6706 cx.spawn(async move |_, cx| {
6707 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6708 if let Some(git_hosting_provider_registry) =
6709 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6710 {
6711 git_hosting_providers::register_additional_providers(
6712 git_hosting_provider_registry,
6713 state.backend.clone(),
6714 )
6715 .await;
6716 }
6717 let state = RepositoryState::Local(state);
6718 let mut jobs = VecDeque::new();
6719 loop {
6720 while let Ok(next_job) = job_rx.try_recv() {
6721 jobs.push_back(next_job);
6722 }
6723
6724 if let Some(job) = jobs.pop_front() {
6725 if let Some(current_key) = &job.key
6726 && jobs
6727 .iter()
6728 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6729 {
6730 continue;
6731 }
6732 (job.job)(state.clone(), cx).await;
6733 } else if let Some(job) = job_rx.next().await {
6734 jobs.push_back(job);
6735 } else {
6736 break;
6737 }
6738 }
6739 anyhow::Ok(())
6740 })
6741 .detach_and_log_err(cx);
6742
6743 job_tx
6744 }
6745
6746 fn spawn_remote_git_worker(
6747 state: RemoteRepositoryState,
6748 cx: &mut Context<Self>,
6749 ) -> mpsc::UnboundedSender<GitJob> {
6750 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6751
6752 cx.spawn(async move |_, cx| {
6753 let state = RepositoryState::Remote(state);
6754 let mut jobs = VecDeque::new();
6755 loop {
6756 while let Ok(next_job) = job_rx.try_recv() {
6757 jobs.push_back(next_job);
6758 }
6759
6760 if let Some(job) = jobs.pop_front() {
6761 if let Some(current_key) = &job.key
6762 && jobs
6763 .iter()
6764 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6765 {
6766 continue;
6767 }
6768 (job.job)(state.clone(), cx).await;
6769 } else if let Some(job) = job_rx.next().await {
6770 jobs.push_back(job);
6771 } else {
6772 break;
6773 }
6774 }
6775 anyhow::Ok(())
6776 })
6777 .detach_and_log_err(cx);
6778
6779 job_tx
6780 }
6781
6782 fn load_staged_text(
6783 &mut self,
6784 buffer_id: BufferId,
6785 repo_path: RepoPath,
6786 cx: &App,
6787 ) -> Task<Result<Option<String>>> {
6788 let rx = self.send_job(None, move |state, _| async move {
6789 match state {
6790 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6791 anyhow::Ok(backend.load_index_text(repo_path).await)
6792 }
6793 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6794 let response = client
6795 .request(proto::OpenUnstagedDiff {
6796 project_id: project_id.to_proto(),
6797 buffer_id: buffer_id.to_proto(),
6798 })
6799 .await?;
6800 Ok(response.staged_text)
6801 }
6802 }
6803 });
6804 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6805 }
6806
6807 fn load_committed_text(
6808 &mut self,
6809 buffer_id: BufferId,
6810 repo_path: RepoPath,
6811 cx: &App,
6812 ) -> Task<Result<DiffBasesChange>> {
6813 let rx = self.send_job(None, move |state, _| async move {
6814 match state {
6815 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6816 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6817 let staged_text = backend.load_index_text(repo_path).await;
6818 let diff_bases_change = if committed_text == staged_text {
6819 DiffBasesChange::SetBoth(committed_text)
6820 } else {
6821 DiffBasesChange::SetEach {
6822 index: staged_text,
6823 head: committed_text,
6824 }
6825 };
6826 anyhow::Ok(diff_bases_change)
6827 }
6828 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6829 use proto::open_uncommitted_diff_response::Mode;
6830
6831 let response = client
6832 .request(proto::OpenUncommittedDiff {
6833 project_id: project_id.to_proto(),
6834 buffer_id: buffer_id.to_proto(),
6835 })
6836 .await?;
6837 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6838 let bases = match mode {
6839 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6840 Mode::IndexAndHead => DiffBasesChange::SetEach {
6841 head: response.committed_text,
6842 index: response.staged_text,
6843 },
6844 };
6845 Ok(bases)
6846 }
6847 }
6848 });
6849
6850 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6851 }
6852
6853 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6854 let repository_id = self.snapshot.id;
6855 let rx = self.send_job(None, move |state, _| async move {
6856 match state {
6857 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6858 backend.load_blob_content(oid).await
6859 }
6860 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6861 let response = client
6862 .request(proto::GetBlobContent {
6863 project_id: project_id.to_proto(),
6864 repository_id: repository_id.0,
6865 oid: oid.to_string(),
6866 })
6867 .await?;
6868 Ok(response.content)
6869 }
6870 }
6871 });
6872 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6873 }
6874
6875 fn paths_changed(
6876 &mut self,
6877 paths: Vec<RepoPath>,
6878 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6879 cx: &mut Context<Self>,
6880 ) {
6881 if !paths.is_empty() {
6882 self.paths_needing_status_update.push(paths);
6883 }
6884
6885 let this = cx.weak_entity();
6886 let _ = self.send_keyed_job(
6887 Some(GitJobKey::RefreshStatuses),
6888 None,
6889 |state, mut cx| async move {
6890 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6891 (
6892 this.snapshot.clone(),
6893 mem::take(&mut this.paths_needing_status_update),
6894 )
6895 })?;
6896 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6897 bail!("not a local repository")
6898 };
6899
6900 if changed_paths.is_empty() {
6901 return Ok(());
6902 }
6903
6904 let has_head = prev_snapshot.head_commit.is_some();
6905
6906 let stash_entries = backend.stash_entries().await?;
6907 let changed_path_statuses = cx
6908 .background_spawn(async move {
6909 let mut changed_paths =
6910 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6911 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6912
6913 let status_task = backend.status(&changed_paths_vec);
6914 let diff_stat_future = if has_head {
6915 backend.diff_stat(&changed_paths_vec)
6916 } else {
6917 future::ready(Ok(status::GitDiffStat {
6918 entries: Arc::default(),
6919 }))
6920 .boxed()
6921 };
6922
6923 let (statuses, diff_stats) =
6924 futures::future::try_join(status_task, diff_stat_future).await?;
6925
6926 let diff_stats: HashMap<RepoPath, DiffStat> =
6927 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6928
6929 let mut changed_path_statuses = Vec::new();
6930 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6931 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6932
6933 for (repo_path, status) in &*statuses.entries {
6934 let current_diff_stat = diff_stats.get(repo_path).copied();
6935
6936 changed_paths.remove(repo_path);
6937 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6938 && cursor.item().is_some_and(|entry| {
6939 entry.status == *status && entry.diff_stat == current_diff_stat
6940 })
6941 {
6942 continue;
6943 }
6944
6945 changed_path_statuses.push(Edit::Insert(StatusEntry {
6946 repo_path: repo_path.clone(),
6947 status: *status,
6948 diff_stat: current_diff_stat,
6949 }));
6950 }
6951 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6952 for path in changed_paths.into_iter() {
6953 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6954 changed_path_statuses
6955 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6956 }
6957 }
6958 anyhow::Ok(changed_path_statuses)
6959 })
6960 .await?;
6961
6962 this.update(&mut cx, |this, cx| {
6963 if this.snapshot.stash_entries != stash_entries {
6964 cx.emit(RepositoryEvent::StashEntriesChanged);
6965 this.snapshot.stash_entries = stash_entries;
6966 }
6967
6968 if !changed_path_statuses.is_empty() {
6969 cx.emit(RepositoryEvent::StatusesChanged);
6970 this.snapshot
6971 .statuses_by_path
6972 .edit(changed_path_statuses, ());
6973 this.snapshot.scan_id += 1;
6974 }
6975
6976 if let Some(updates_tx) = updates_tx {
6977 updates_tx
6978 .unbounded_send(DownstreamUpdate::UpdateRepository(
6979 this.snapshot.clone(),
6980 ))
6981 .ok();
6982 }
6983 })
6984 },
6985 );
6986 }
6987
6988 /// currently running git command and when it started
6989 pub fn current_job(&self) -> Option<JobInfo> {
6990 self.active_jobs.values().next().cloned()
6991 }
6992
6993 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6994 self.send_job(None, |_, _| async {})
6995 }
6996
6997 fn spawn_job_with_tracking<AsyncFn>(
6998 &mut self,
6999 paths: Vec<RepoPath>,
7000 git_status: pending_op::GitStatus,
7001 cx: &mut Context<Self>,
7002 f: AsyncFn,
7003 ) -> Task<Result<()>>
7004 where
7005 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7006 {
7007 let ids = self.new_pending_ops_for_paths(paths, git_status);
7008
7009 cx.spawn(async move |this, cx| {
7010 let (job_status, result) = match f(this.clone(), cx).await {
7011 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7012 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7013 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7014 };
7015
7016 this.update(cx, |this, _| {
7017 let mut edits = Vec::with_capacity(ids.len());
7018 for (id, entry) in ids {
7019 if let Some(mut ops) = this
7020 .pending_ops
7021 .get(&PathKey(entry.as_ref().clone()), ())
7022 .cloned()
7023 {
7024 if let Some(op) = ops.op_by_id_mut(id) {
7025 op.job_status = job_status;
7026 }
7027 edits.push(sum_tree::Edit::Insert(ops));
7028 }
7029 }
7030 this.pending_ops.edit(edits, ());
7031 })?;
7032
7033 result
7034 })
7035 }
7036
7037 fn new_pending_ops_for_paths(
7038 &mut self,
7039 paths: Vec<RepoPath>,
7040 git_status: pending_op::GitStatus,
7041 ) -> Vec<(PendingOpId, RepoPath)> {
7042 let mut edits = Vec::with_capacity(paths.len());
7043 let mut ids = Vec::with_capacity(paths.len());
7044 for path in paths {
7045 let mut ops = self
7046 .pending_ops
7047 .get(&PathKey(path.as_ref().clone()), ())
7048 .cloned()
7049 .unwrap_or_else(|| PendingOps::new(&path));
7050 let id = ops.max_id() + 1;
7051 ops.ops.push(PendingOp {
7052 id,
7053 git_status,
7054 job_status: pending_op::JobStatus::Running,
7055 });
7056 edits.push(sum_tree::Edit::Insert(ops));
7057 ids.push((id, path));
7058 }
7059 self.pending_ops.edit(edits, ());
7060 ids
7061 }
7062 pub fn default_remote_url(&self) -> Option<String> {
7063 self.remote_upstream_url
7064 .clone()
7065 .or(self.remote_origin_url.clone())
7066 }
7067}
7068
7069/// If `path` is a git linked worktree checkout, resolves it to the main
7070/// repository's working directory path. Returns `None` if `path` is a normal
7071/// repository, not a git repo, or if resolution fails.
7072///
7073/// Resolution works by:
7074/// 1. Reading the `.git` file to get the `gitdir:` pointer
7075/// 2. Following that to the worktree-specific git directory
7076/// 3. Reading the `commondir` file to find the shared `.git` directory
7077/// 4. Deriving the main repo's working directory from the common dir
7078pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7079 let dot_git = path.join(".git");
7080 let metadata = fs.metadata(&dot_git).await.ok()??;
7081 if metadata.is_dir {
7082 return None; // Normal repo, not a linked worktree
7083 }
7084 // It's a .git file — parse the gitdir: pointer
7085 let content = fs.load(&dot_git).await.ok()?;
7086 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7087 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7088 // Read commondir to find the main .git directory
7089 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7090 let common_dir = fs
7091 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7092 .await
7093 .ok()?;
7094 Some(git::repository::original_repo_path_from_common_dir(
7095 &common_dir,
7096 ))
7097}
7098
7099/// Validates that the resolved worktree directory is acceptable:
7100/// - The setting must not be an absolute path.
7101/// - The resolved path must be either a subdirectory of the working
7102/// directory or a subdirectory of its parent (i.e., a sibling).
7103///
7104/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7105pub fn worktrees_directory_for_repo(
7106 original_repo_abs_path: &Path,
7107 worktree_directory_setting: &str,
7108) -> Result<PathBuf> {
7109 // Check the original setting before trimming, since a path like "///"
7110 // is absolute but becomes "" after stripping trailing separators.
7111 // Also check for leading `/` or `\` explicitly, because on Windows
7112 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7113 // would slip through even though it's clearly not a relative path.
7114 if Path::new(worktree_directory_setting).is_absolute()
7115 || worktree_directory_setting.starts_with('/')
7116 || worktree_directory_setting.starts_with('\\')
7117 {
7118 anyhow::bail!(
7119 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7120 );
7121 }
7122
7123 if worktree_directory_setting.is_empty() {
7124 anyhow::bail!("git.worktree_directory must not be empty");
7125 }
7126
7127 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7128 if trimmed == ".." {
7129 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7130 }
7131
7132 let joined = original_repo_abs_path.join(trimmed);
7133 let resolved = util::normalize_path(&joined);
7134 let resolved = if resolved.starts_with(original_repo_abs_path) {
7135 resolved
7136 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7137 resolved.join(repo_dir_name)
7138 } else {
7139 resolved
7140 };
7141
7142 let parent = original_repo_abs_path
7143 .parent()
7144 .unwrap_or(original_repo_abs_path);
7145
7146 if !resolved.starts_with(parent) {
7147 anyhow::bail!(
7148 "git.worktree_directory resolved to {resolved:?}, which is outside \
7149 the project root and its parent directory. It must resolve to a \
7150 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7151 );
7152 }
7153
7154 Ok(resolved)
7155}
7156
7157/// Returns a short name for a linked worktree suitable for UI display
7158///
7159/// Uses the main worktree path to come up with a short name that disambiguates
7160/// the linked worktree from the main worktree.
7161pub fn linked_worktree_short_name(
7162 main_worktree_path: &Path,
7163 linked_worktree_path: &Path,
7164) -> Option<SharedString> {
7165 if main_worktree_path == linked_worktree_path {
7166 return None;
7167 }
7168
7169 let project_name = main_worktree_path.file_name()?.to_str()?;
7170 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7171 let name = if directory_name != project_name {
7172 directory_name.to_string()
7173 } else {
7174 linked_worktree_path
7175 .parent()?
7176 .file_name()?
7177 .to_str()?
7178 .to_string()
7179 };
7180 Some(name.into())
7181}
7182
7183fn get_permalink_in_rust_registry_src(
7184 provider_registry: Arc<GitHostingProviderRegistry>,
7185 path: PathBuf,
7186 selection: Range<u32>,
7187) -> Result<url::Url> {
7188 #[derive(Deserialize)]
7189 struct CargoVcsGit {
7190 sha1: String,
7191 }
7192
7193 #[derive(Deserialize)]
7194 struct CargoVcsInfo {
7195 git: CargoVcsGit,
7196 path_in_vcs: String,
7197 }
7198
7199 #[derive(Deserialize)]
7200 struct CargoPackage {
7201 repository: String,
7202 }
7203
7204 #[derive(Deserialize)]
7205 struct CargoToml {
7206 package: CargoPackage,
7207 }
7208
7209 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7210 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7211 Some((dir, json))
7212 }) else {
7213 bail!("No .cargo_vcs_info.json found in parent directories")
7214 };
7215 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7216 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7217 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7218 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7219 .context("parsing package.repository field of manifest")?;
7220 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7221 let permalink = provider.build_permalink(
7222 remote,
7223 BuildPermalinkParams::new(
7224 &cargo_vcs_info.git.sha1,
7225 &RepoPath::from_rel_path(
7226 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7227 ),
7228 Some(selection),
7229 ),
7230 );
7231 Ok(permalink)
7232}
7233
7234fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7235 let Some(blame) = blame else {
7236 return proto::BlameBufferResponse {
7237 blame_response: None,
7238 };
7239 };
7240
7241 let entries = blame
7242 .entries
7243 .into_iter()
7244 .map(|entry| proto::BlameEntry {
7245 sha: entry.sha.as_bytes().into(),
7246 start_line: entry.range.start,
7247 end_line: entry.range.end,
7248 original_line_number: entry.original_line_number,
7249 author: entry.author,
7250 author_mail: entry.author_mail,
7251 author_time: entry.author_time,
7252 author_tz: entry.author_tz,
7253 committer: entry.committer_name,
7254 committer_mail: entry.committer_email,
7255 committer_time: entry.committer_time,
7256 committer_tz: entry.committer_tz,
7257 summary: entry.summary,
7258 previous: entry.previous,
7259 filename: entry.filename,
7260 })
7261 .collect::<Vec<_>>();
7262
7263 let messages = blame
7264 .messages
7265 .into_iter()
7266 .map(|(oid, message)| proto::CommitMessage {
7267 oid: oid.as_bytes().into(),
7268 message,
7269 })
7270 .collect::<Vec<_>>();
7271
7272 proto::BlameBufferResponse {
7273 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7274 }
7275}
7276
7277fn deserialize_blame_buffer_response(
7278 response: proto::BlameBufferResponse,
7279) -> Option<git::blame::Blame> {
7280 let response = response.blame_response?;
7281 let entries = response
7282 .entries
7283 .into_iter()
7284 .filter_map(|entry| {
7285 Some(git::blame::BlameEntry {
7286 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7287 range: entry.start_line..entry.end_line,
7288 original_line_number: entry.original_line_number,
7289 committer_name: entry.committer,
7290 committer_time: entry.committer_time,
7291 committer_tz: entry.committer_tz,
7292 committer_email: entry.committer_mail,
7293 author: entry.author,
7294 author_mail: entry.author_mail,
7295 author_time: entry.author_time,
7296 author_tz: entry.author_tz,
7297 summary: entry.summary,
7298 previous: entry.previous,
7299 filename: entry.filename,
7300 })
7301 })
7302 .collect::<Vec<_>>();
7303
7304 let messages = response
7305 .messages
7306 .into_iter()
7307 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7308 .collect::<HashMap<_, _>>();
7309
7310 Some(Blame { entries, messages })
7311}
7312
7313fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7314 proto::Branch {
7315 is_head: branch.is_head,
7316 ref_name: branch.ref_name.to_string(),
7317 unix_timestamp: branch
7318 .most_recent_commit
7319 .as_ref()
7320 .map(|commit| commit.commit_timestamp as u64),
7321 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7322 ref_name: upstream.ref_name.to_string(),
7323 tracking: upstream
7324 .tracking
7325 .status()
7326 .map(|upstream| proto::UpstreamTracking {
7327 ahead: upstream.ahead as u64,
7328 behind: upstream.behind as u64,
7329 }),
7330 }),
7331 most_recent_commit: branch
7332 .most_recent_commit
7333 .as_ref()
7334 .map(|commit| proto::CommitSummary {
7335 sha: commit.sha.to_string(),
7336 subject: commit.subject.to_string(),
7337 commit_timestamp: commit.commit_timestamp,
7338 author_name: commit.author_name.to_string(),
7339 }),
7340 }
7341}
7342
7343fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7344 proto::Worktree {
7345 path: worktree.path.to_string_lossy().to_string(),
7346 ref_name: worktree
7347 .ref_name
7348 .as_ref()
7349 .map(|s| s.to_string())
7350 .unwrap_or_default(),
7351 sha: worktree.sha.to_string(),
7352 is_main: worktree.is_main,
7353 }
7354}
7355
7356fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7357 git::repository::Worktree {
7358 path: PathBuf::from(proto.path.clone()),
7359 ref_name: Some(SharedString::from(&proto.ref_name)),
7360 sha: proto.sha.clone().into(),
7361 is_main: proto.is_main,
7362 }
7363}
7364
7365fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7366 git::repository::Branch {
7367 is_head: proto.is_head,
7368 ref_name: proto.ref_name.clone().into(),
7369 upstream: proto
7370 .upstream
7371 .as_ref()
7372 .map(|upstream| git::repository::Upstream {
7373 ref_name: upstream.ref_name.to_string().into(),
7374 tracking: upstream
7375 .tracking
7376 .as_ref()
7377 .map(|tracking| {
7378 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7379 ahead: tracking.ahead as u32,
7380 behind: tracking.behind as u32,
7381 })
7382 })
7383 .unwrap_or(git::repository::UpstreamTracking::Gone),
7384 }),
7385 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7386 git::repository::CommitSummary {
7387 sha: commit.sha.to_string().into(),
7388 subject: commit.subject.to_string().into(),
7389 commit_timestamp: commit.commit_timestamp,
7390 author_name: commit.author_name.to_string().into(),
7391 has_parent: true,
7392 }
7393 }),
7394 }
7395}
7396
7397fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7398 proto::GitCommitDetails {
7399 sha: commit.sha.to_string(),
7400 message: commit.message.to_string(),
7401 commit_timestamp: commit.commit_timestamp,
7402 author_email: commit.author_email.to_string(),
7403 author_name: commit.author_name.to_string(),
7404 }
7405}
7406
7407fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7408 CommitDetails {
7409 sha: proto.sha.clone().into(),
7410 message: proto.message.clone().into(),
7411 commit_timestamp: proto.commit_timestamp,
7412 author_email: proto.author_email.clone().into(),
7413 author_name: proto.author_name.clone().into(),
7414 }
7415}
7416
7417/// This snapshot computes the repository state on the foreground thread while
7418/// running the git commands on the background thread. We update branch, head,
7419/// remotes, and worktrees first so the UI can react sooner, then compute file
7420/// state and emit those events immediately after.
7421async fn compute_snapshot(
7422 this: Entity<Repository>,
7423 backend: Arc<dyn GitRepository>,
7424 cx: &mut AsyncApp,
7425) -> Result<RepositorySnapshot> {
7426 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7427 this.paths_needing_status_update.clear();
7428 (
7429 this.id,
7430 this.work_directory_abs_path.clone(),
7431 this.snapshot.clone(),
7432 )
7433 });
7434
7435 let head_commit_future = {
7436 let backend = backend.clone();
7437 async move {
7438 Ok(match backend.head_sha().await {
7439 Some(head_sha) => backend.show(head_sha).await.log_err(),
7440 None => None,
7441 })
7442 }
7443 };
7444 let (branches, head_commit, all_worktrees) = cx
7445 .background_spawn({
7446 let backend = backend.clone();
7447 async move {
7448 futures::future::try_join3(
7449 backend.branches(),
7450 head_commit_future,
7451 backend.worktrees(),
7452 )
7453 .await
7454 }
7455 })
7456 .await?;
7457 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7458 let branch_list: Arc<[Branch]> = branches.into();
7459
7460 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7461 .into_iter()
7462 .filter(|wt| wt.path != *work_directory_abs_path)
7463 .collect();
7464
7465 let (remote_origin_url, remote_upstream_url) = cx
7466 .background_spawn({
7467 let backend = backend.clone();
7468 async move {
7469 Ok::<_, anyhow::Error>(
7470 futures::future::join(
7471 backend.remote_url("origin"),
7472 backend.remote_url("upstream"),
7473 )
7474 .await,
7475 )
7476 }
7477 })
7478 .await?;
7479
7480 let snapshot = this.update(cx, |this, cx| {
7481 let head_changed =
7482 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7483 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7484 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7485
7486 this.snapshot = RepositorySnapshot {
7487 id,
7488 work_directory_abs_path,
7489 branch,
7490 branch_list: branch_list.clone(),
7491 head_commit,
7492 remote_origin_url,
7493 remote_upstream_url,
7494 linked_worktrees,
7495 scan_id: prev_snapshot.scan_id + 1,
7496 ..prev_snapshot
7497 };
7498
7499 if head_changed {
7500 cx.emit(RepositoryEvent::HeadChanged);
7501 }
7502
7503 if branch_list_changed {
7504 cx.emit(RepositoryEvent::BranchListChanged);
7505 }
7506
7507 if worktrees_changed {
7508 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7509 }
7510
7511 this.snapshot.clone()
7512 });
7513
7514 let (statuses, diff_stats, stash_entries) = cx
7515 .background_spawn({
7516 let backend = backend.clone();
7517 let snapshot = snapshot.clone();
7518 async move {
7519 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7520 if snapshot.head_commit.is_some() {
7521 backend.diff_stat(&[])
7522 } else {
7523 future::ready(Ok(status::GitDiffStat {
7524 entries: Arc::default(),
7525 }))
7526 .boxed()
7527 };
7528 futures::future::try_join3(
7529 backend.status(&[RepoPath::from_rel_path(
7530 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7531 )]),
7532 diff_stat_future,
7533 backend.stash_entries(),
7534 )
7535 .await
7536 }
7537 })
7538 .await?;
7539
7540 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7541 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7542 let mut conflicted_paths = Vec::new();
7543 let statuses_by_path = SumTree::from_iter(
7544 statuses.entries.iter().map(|(repo_path, status)| {
7545 if status.is_conflicted() {
7546 conflicted_paths.push(repo_path.clone());
7547 }
7548 StatusEntry {
7549 repo_path: repo_path.clone(),
7550 status: *status,
7551 diff_stat: diff_stat_map.get(repo_path).copied(),
7552 }
7553 }),
7554 (),
7555 );
7556
7557 let merge_details = cx
7558 .background_spawn({
7559 let backend = backend.clone();
7560 let mut merge_details = snapshot.merge.clone();
7561 async move {
7562 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7563 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7564 }
7565 })
7566 .await?;
7567 let (merge_details, conflicts_changed) = merge_details;
7568 log::debug!("new merge details: {merge_details:?}");
7569
7570 Ok(this.update(cx, |this, cx| {
7571 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7572 cx.emit(RepositoryEvent::StatusesChanged);
7573 }
7574 if stash_entries != this.snapshot.stash_entries {
7575 cx.emit(RepositoryEvent::StashEntriesChanged);
7576 }
7577
7578 this.snapshot.scan_id += 1;
7579 this.snapshot.merge = merge_details;
7580 this.snapshot.statuses_by_path = statuses_by_path;
7581 this.snapshot.stash_entries = stash_entries;
7582
7583 this.snapshot.clone()
7584 }))
7585}
7586
7587fn status_from_proto(
7588 simple_status: i32,
7589 status: Option<proto::GitFileStatus>,
7590) -> anyhow::Result<FileStatus> {
7591 use proto::git_file_status::Variant;
7592
7593 let Some(variant) = status.and_then(|status| status.variant) else {
7594 let code = proto::GitStatus::from_i32(simple_status)
7595 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7596 let result = match code {
7597 proto::GitStatus::Added => TrackedStatus {
7598 worktree_status: StatusCode::Added,
7599 index_status: StatusCode::Unmodified,
7600 }
7601 .into(),
7602 proto::GitStatus::Modified => TrackedStatus {
7603 worktree_status: StatusCode::Modified,
7604 index_status: StatusCode::Unmodified,
7605 }
7606 .into(),
7607 proto::GitStatus::Conflict => UnmergedStatus {
7608 first_head: UnmergedStatusCode::Updated,
7609 second_head: UnmergedStatusCode::Updated,
7610 }
7611 .into(),
7612 proto::GitStatus::Deleted => TrackedStatus {
7613 worktree_status: StatusCode::Deleted,
7614 index_status: StatusCode::Unmodified,
7615 }
7616 .into(),
7617 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7618 };
7619 return Ok(result);
7620 };
7621
7622 let result = match variant {
7623 Variant::Untracked(_) => FileStatus::Untracked,
7624 Variant::Ignored(_) => FileStatus::Ignored,
7625 Variant::Unmerged(unmerged) => {
7626 let [first_head, second_head] =
7627 [unmerged.first_head, unmerged.second_head].map(|head| {
7628 let code = proto::GitStatus::from_i32(head)
7629 .with_context(|| format!("Invalid git status code: {head}"))?;
7630 let result = match code {
7631 proto::GitStatus::Added => UnmergedStatusCode::Added,
7632 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7633 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7634 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7635 };
7636 Ok(result)
7637 });
7638 let [first_head, second_head] = [first_head?, second_head?];
7639 UnmergedStatus {
7640 first_head,
7641 second_head,
7642 }
7643 .into()
7644 }
7645 Variant::Tracked(tracked) => {
7646 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7647 .map(|status| {
7648 let code = proto::GitStatus::from_i32(status)
7649 .with_context(|| format!("Invalid git status code: {status}"))?;
7650 let result = match code {
7651 proto::GitStatus::Modified => StatusCode::Modified,
7652 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7653 proto::GitStatus::Added => StatusCode::Added,
7654 proto::GitStatus::Deleted => StatusCode::Deleted,
7655 proto::GitStatus::Renamed => StatusCode::Renamed,
7656 proto::GitStatus::Copied => StatusCode::Copied,
7657 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7658 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7659 };
7660 Ok(result)
7661 });
7662 let [index_status, worktree_status] = [index_status?, worktree_status?];
7663 TrackedStatus {
7664 index_status,
7665 worktree_status,
7666 }
7667 .into()
7668 }
7669 };
7670 Ok(result)
7671}
7672
7673fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7674 use proto::git_file_status::{Tracked, Unmerged, Variant};
7675
7676 let variant = match status {
7677 FileStatus::Untracked => Variant::Untracked(Default::default()),
7678 FileStatus::Ignored => Variant::Ignored(Default::default()),
7679 FileStatus::Unmerged(UnmergedStatus {
7680 first_head,
7681 second_head,
7682 }) => Variant::Unmerged(Unmerged {
7683 first_head: unmerged_status_to_proto(first_head),
7684 second_head: unmerged_status_to_proto(second_head),
7685 }),
7686 FileStatus::Tracked(TrackedStatus {
7687 index_status,
7688 worktree_status,
7689 }) => Variant::Tracked(Tracked {
7690 index_status: tracked_status_to_proto(index_status),
7691 worktree_status: tracked_status_to_proto(worktree_status),
7692 }),
7693 };
7694 proto::GitFileStatus {
7695 variant: Some(variant),
7696 }
7697}
7698
7699fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7700 match code {
7701 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7702 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7703 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7704 }
7705}
7706
7707fn tracked_status_to_proto(code: StatusCode) -> i32 {
7708 match code {
7709 StatusCode::Added => proto::GitStatus::Added as _,
7710 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7711 StatusCode::Modified => proto::GitStatus::Modified as _,
7712 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7713 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7714 StatusCode::Copied => proto::GitStatus::Copied as _,
7715 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7716 }
7717}