1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332#[derive(Clone, Debug)]
333enum CreateWorktreeStartPoint {
334 Detached,
335 Branched { name: String },
336}
337
338pub struct Repository {
339 this: WeakEntity<Self>,
340 snapshot: RepositorySnapshot,
341 commit_message_buffer: Option<Entity<Buffer>>,
342 git_store: WeakEntity<GitStore>,
343 // For a local repository, holds paths that have had worktree events since the last status scan completed,
344 // and that should be examined during the next status scan.
345 paths_needing_status_update: Vec<Vec<RepoPath>>,
346 job_sender: mpsc::UnboundedSender<GitJob>,
347 active_jobs: HashMap<JobId, JobInfo>,
348 pending_ops: SumTree<PendingOps>,
349 job_id: JobId,
350 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
351 latest_askpass_id: u64,
352 repository_state: Shared<Task<Result<RepositoryState, String>>>,
353 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
354 graph_commit_data_handler: GraphCommitHandlerState,
355 commit_data: HashMap<Oid, CommitDataState>,
356}
357
358impl std::ops::Deref for Repository {
359 type Target = RepositorySnapshot;
360
361 fn deref(&self) -> &Self::Target {
362 &self.snapshot
363 }
364}
365
366#[derive(Clone)]
367pub struct LocalRepositoryState {
368 pub fs: Arc<dyn Fs>,
369 pub backend: Arc<dyn GitRepository>,
370 pub environment: Arc<HashMap<String, String>>,
371}
372
373impl LocalRepositoryState {
374 async fn new(
375 work_directory_abs_path: Arc<Path>,
376 dot_git_abs_path: Arc<Path>,
377 project_environment: WeakEntity<ProjectEnvironment>,
378 fs: Arc<dyn Fs>,
379 is_trusted: bool,
380 cx: &mut AsyncApp,
381 ) -> anyhow::Result<Self> {
382 let environment = project_environment
383 .update(cx, |project_environment, cx| {
384 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
385 })?
386 .await
387 .unwrap_or_else(|| {
388 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
389 HashMap::default()
390 });
391 let search_paths = environment.get("PATH").map(|val| val.to_owned());
392 let backend = cx
393 .background_spawn({
394 let fs = fs.clone();
395 async move {
396 let system_git_binary_path = search_paths
397 .and_then(|search_paths| {
398 which::which_in("git", Some(search_paths), &work_directory_abs_path)
399 .ok()
400 })
401 .or_else(|| which::which("git").ok());
402 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
403 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
404 }
405 })
406 .await?;
407 backend.set_trusted(is_trusted);
408 Ok(LocalRepositoryState {
409 backend,
410 environment: Arc::new(environment),
411 fs,
412 })
413 }
414}
415
416#[derive(Clone)]
417pub struct RemoteRepositoryState {
418 pub project_id: ProjectId,
419 pub client: AnyProtoClient,
420}
421
422#[derive(Clone)]
423pub enum RepositoryState {
424 Local(LocalRepositoryState),
425 Remote(RemoteRepositoryState),
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum GitGraphEvent {
430 CountUpdated(usize),
431 FullyLoaded,
432 LoadingError,
433}
434
435#[derive(Clone, Debug, PartialEq, Eq)]
436pub enum RepositoryEvent {
437 StatusesChanged,
438 HeadChanged,
439 BranchListChanged,
440 StashEntriesChanged,
441 GitWorktreeListChanged,
442 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
443 GraphEvent((LogSource, LogOrder), GitGraphEvent),
444}
445
446#[derive(Clone, Debug)]
447pub struct JobsUpdated;
448
449#[derive(Debug)]
450pub enum GitStoreEvent {
451 ActiveRepositoryChanged(Option<RepositoryId>),
452 /// Bool is true when the repository that's updated is the active repository
453 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
454 RepositoryAdded,
455 RepositoryRemoved(RepositoryId),
456 IndexWriteError(anyhow::Error),
457 JobsUpdated,
458 ConflictsUpdated,
459}
460
461impl EventEmitter<RepositoryEvent> for Repository {}
462impl EventEmitter<JobsUpdated> for Repository {}
463impl EventEmitter<GitStoreEvent> for GitStore {}
464
465pub struct GitJob {
466 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
467 key: Option<GitJobKey>,
468}
469
470#[derive(PartialEq, Eq)]
471enum GitJobKey {
472 WriteIndex(Vec<RepoPath>),
473 ReloadBufferDiffBases,
474 RefreshStatuses,
475 ReloadGitState,
476}
477
478impl GitStore {
479 pub fn local(
480 worktree_store: &Entity<WorktreeStore>,
481 buffer_store: Entity<BufferStore>,
482 environment: Entity<ProjectEnvironment>,
483 fs: Arc<dyn Fs>,
484 cx: &mut Context<Self>,
485 ) -> Self {
486 Self::new(
487 worktree_store.clone(),
488 buffer_store,
489 GitStoreState::Local {
490 next_repository_id: Arc::new(AtomicU64::new(1)),
491 downstream: None,
492 project_environment: environment,
493 fs,
494 },
495 cx,
496 )
497 }
498
499 pub fn remote(
500 worktree_store: &Entity<WorktreeStore>,
501 buffer_store: Entity<BufferStore>,
502 upstream_client: AnyProtoClient,
503 project_id: u64,
504 cx: &mut Context<Self>,
505 ) -> Self {
506 Self::new(
507 worktree_store.clone(),
508 buffer_store,
509 GitStoreState::Remote {
510 upstream_client,
511 upstream_project_id: project_id,
512 downstream: None,
513 },
514 cx,
515 )
516 }
517
518 fn new(
519 worktree_store: Entity<WorktreeStore>,
520 buffer_store: Entity<BufferStore>,
521 state: GitStoreState,
522 cx: &mut Context<Self>,
523 ) -> Self {
524 let mut _subscriptions = vec![
525 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
526 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
527 ];
528
529 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
530 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
531 }
532
533 GitStore {
534 state,
535 buffer_store,
536 worktree_store,
537 repositories: HashMap::default(),
538 worktree_ids: HashMap::default(),
539 active_repo_id: None,
540 _subscriptions,
541 loading_diffs: HashMap::default(),
542 shared_diffs: HashMap::default(),
543 diffs: HashMap::default(),
544 }
545 }
546
547 pub fn init(client: &AnyProtoClient) {
548 client.add_entity_request_handler(Self::handle_get_remotes);
549 client.add_entity_request_handler(Self::handle_get_branches);
550 client.add_entity_request_handler(Self::handle_get_default_branch);
551 client.add_entity_request_handler(Self::handle_change_branch);
552 client.add_entity_request_handler(Self::handle_create_branch);
553 client.add_entity_request_handler(Self::handle_rename_branch);
554 client.add_entity_request_handler(Self::handle_create_remote);
555 client.add_entity_request_handler(Self::handle_remove_remote);
556 client.add_entity_request_handler(Self::handle_delete_branch);
557 client.add_entity_request_handler(Self::handle_git_init);
558 client.add_entity_request_handler(Self::handle_push);
559 client.add_entity_request_handler(Self::handle_pull);
560 client.add_entity_request_handler(Self::handle_fetch);
561 client.add_entity_request_handler(Self::handle_stage);
562 client.add_entity_request_handler(Self::handle_unstage);
563 client.add_entity_request_handler(Self::handle_stash);
564 client.add_entity_request_handler(Self::handle_stash_pop);
565 client.add_entity_request_handler(Self::handle_stash_apply);
566 client.add_entity_request_handler(Self::handle_stash_drop);
567 client.add_entity_request_handler(Self::handle_commit);
568 client.add_entity_request_handler(Self::handle_run_hook);
569 client.add_entity_request_handler(Self::handle_reset);
570 client.add_entity_request_handler(Self::handle_show);
571 client.add_entity_request_handler(Self::handle_create_checkpoint);
572 client.add_entity_request_handler(Self::handle_restore_checkpoint);
573 client.add_entity_request_handler(Self::handle_compare_checkpoints);
574 client.add_entity_request_handler(Self::handle_diff_checkpoints);
575 client.add_entity_request_handler(Self::handle_load_commit_diff);
576 client.add_entity_request_handler(Self::handle_file_history);
577 client.add_entity_request_handler(Self::handle_checkout_files);
578 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
579 client.add_entity_request_handler(Self::handle_set_index_text);
580 client.add_entity_request_handler(Self::handle_askpass);
581 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
582 client.add_entity_request_handler(Self::handle_git_diff);
583 client.add_entity_request_handler(Self::handle_tree_diff);
584 client.add_entity_request_handler(Self::handle_get_blob_content);
585 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
586 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
587 client.add_entity_message_handler(Self::handle_update_diff_bases);
588 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
589 client.add_entity_request_handler(Self::handle_blame_buffer);
590 client.add_entity_message_handler(Self::handle_update_repository);
591 client.add_entity_message_handler(Self::handle_remove_repository);
592 client.add_entity_request_handler(Self::handle_git_clone);
593 client.add_entity_request_handler(Self::handle_get_worktrees);
594 client.add_entity_request_handler(Self::handle_create_worktree);
595 client.add_entity_request_handler(Self::handle_remove_worktree);
596 client.add_entity_request_handler(Self::handle_rename_worktree);
597 client.add_entity_request_handler(Self::handle_get_head_sha);
598 }
599
600 pub fn is_local(&self) -> bool {
601 matches!(self.state, GitStoreState::Local { .. })
602 }
603 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
604 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
605 let id = repo.read(cx).id;
606 if self.active_repo_id != Some(id) {
607 self.active_repo_id = Some(id);
608 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
609 }
610 }
611 }
612
613 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
614 match &mut self.state {
615 GitStoreState::Remote {
616 downstream: downstream_client,
617 ..
618 } => {
619 for repo in self.repositories.values() {
620 let update = repo.read(cx).snapshot.initial_update(project_id);
621 for update in split_repository_update(update) {
622 client.send(update).log_err();
623 }
624 }
625 *downstream_client = Some((client, ProjectId(project_id)));
626 }
627 GitStoreState::Local {
628 downstream: downstream_client,
629 ..
630 } => {
631 let mut snapshots = HashMap::default();
632 let (updates_tx, mut updates_rx) = mpsc::unbounded();
633 for repo in self.repositories.values() {
634 updates_tx
635 .unbounded_send(DownstreamUpdate::UpdateRepository(
636 repo.read(cx).snapshot.clone(),
637 ))
638 .ok();
639 }
640 *downstream_client = Some(LocalDownstreamState {
641 client: client.clone(),
642 project_id: ProjectId(project_id),
643 updates_tx,
644 _task: cx.spawn(async move |this, cx| {
645 cx.background_spawn(async move {
646 while let Some(update) = updates_rx.next().await {
647 match update {
648 DownstreamUpdate::UpdateRepository(snapshot) => {
649 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
650 {
651 let update =
652 snapshot.build_update(old_snapshot, project_id);
653 *old_snapshot = snapshot;
654 for update in split_repository_update(update) {
655 client.send(update)?;
656 }
657 } else {
658 let update = snapshot.initial_update(project_id);
659 for update in split_repository_update(update) {
660 client.send(update)?;
661 }
662 snapshots.insert(snapshot.id, snapshot);
663 }
664 }
665 DownstreamUpdate::RemoveRepository(id) => {
666 client.send(proto::RemoveRepository {
667 project_id,
668 id: id.to_proto(),
669 })?;
670 }
671 }
672 }
673 anyhow::Ok(())
674 })
675 .await
676 .ok();
677 this.update(cx, |this, _| {
678 if let GitStoreState::Local {
679 downstream: downstream_client,
680 ..
681 } = &mut this.state
682 {
683 downstream_client.take();
684 } else {
685 unreachable!("unshared called on remote store");
686 }
687 })
688 }),
689 });
690 }
691 }
692 }
693
694 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
695 match &mut self.state {
696 GitStoreState::Local {
697 downstream: downstream_client,
698 ..
699 } => {
700 downstream_client.take();
701 }
702 GitStoreState::Remote {
703 downstream: downstream_client,
704 ..
705 } => {
706 downstream_client.take();
707 }
708 }
709 self.shared_diffs.clear();
710 }
711
712 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
713 self.shared_diffs.remove(peer_id);
714 }
715
716 pub fn active_repository(&self) -> Option<Entity<Repository>> {
717 self.active_repo_id
718 .as_ref()
719 .map(|id| self.repositories[id].clone())
720 }
721
722 pub fn open_unstaged_diff(
723 &mut self,
724 buffer: Entity<Buffer>,
725 cx: &mut Context<Self>,
726 ) -> Task<Result<Entity<BufferDiff>>> {
727 let buffer_id = buffer.read(cx).remote_id();
728 if let Some(diff_state) = self.diffs.get(&buffer_id)
729 && let Some(unstaged_diff) = diff_state
730 .read(cx)
731 .unstaged_diff
732 .as_ref()
733 .and_then(|weak| weak.upgrade())
734 {
735 if let Some(task) =
736 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
737 {
738 return cx.background_executor().spawn(async move {
739 task.await;
740 Ok(unstaged_diff)
741 });
742 }
743 return Task::ready(Ok(unstaged_diff));
744 }
745
746 let Some((repo, repo_path)) =
747 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
748 else {
749 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
750 };
751
752 let task = self
753 .loading_diffs
754 .entry((buffer_id, DiffKind::Unstaged))
755 .or_insert_with(|| {
756 let staged_text = repo.update(cx, |repo, cx| {
757 repo.load_staged_text(buffer_id, repo_path, cx)
758 });
759 cx.spawn(async move |this, cx| {
760 Self::open_diff_internal(
761 this,
762 DiffKind::Unstaged,
763 staged_text.await.map(DiffBasesChange::SetIndex),
764 buffer,
765 cx,
766 )
767 .await
768 .map_err(Arc::new)
769 })
770 .shared()
771 })
772 .clone();
773
774 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
775 }
776
777 pub fn open_diff_since(
778 &mut self,
779 oid: Option<git::Oid>,
780 buffer: Entity<Buffer>,
781 repo: Entity<Repository>,
782 cx: &mut Context<Self>,
783 ) -> Task<Result<Entity<BufferDiff>>> {
784 let buffer_id = buffer.read(cx).remote_id();
785
786 if let Some(diff_state) = self.diffs.get(&buffer_id)
787 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
788 {
789 if let Some(task) =
790 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
791 {
792 return cx.background_executor().spawn(async move {
793 task.await;
794 Ok(oid_diff)
795 });
796 }
797 return Task::ready(Ok(oid_diff));
798 }
799
800 let diff_kind = DiffKind::SinceOid(oid);
801 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
802 let task = task.clone();
803 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
804 }
805
806 let task = cx
807 .spawn(async move |this, cx| {
808 let result: Result<Entity<BufferDiff>> = async {
809 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
810 let language_registry =
811 buffer.update(cx, |buffer, _| buffer.language_registry());
812 let content: Option<Arc<str>> = match oid {
813 None => None,
814 Some(oid) => Some(
815 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
816 .await?
817 .into(),
818 ),
819 };
820 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
821
822 buffer_diff
823 .update(cx, |buffer_diff, cx| {
824 buffer_diff.language_changed(
825 buffer_snapshot.language().cloned(),
826 language_registry,
827 cx,
828 );
829 buffer_diff.set_base_text(
830 content.clone(),
831 buffer_snapshot.language().cloned(),
832 buffer_snapshot.text,
833 cx,
834 )
835 })
836 .await?;
837 let unstaged_diff = this
838 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
839 .await?;
840 buffer_diff.update(cx, |buffer_diff, _| {
841 buffer_diff.set_secondary_diff(unstaged_diff);
842 });
843
844 this.update(cx, |this, cx| {
845 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
846 .detach();
847
848 this.loading_diffs.remove(&(buffer_id, diff_kind));
849
850 let git_store = cx.weak_entity();
851 let diff_state = this
852 .diffs
853 .entry(buffer_id)
854 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
855
856 diff_state.update(cx, |state, _| {
857 if let Some(oid) = oid {
858 if let Some(content) = content {
859 state.oid_texts.insert(oid, content);
860 }
861 }
862 state.oid_diffs.insert(oid, buffer_diff.downgrade());
863 });
864 })?;
865
866 Ok(buffer_diff)
867 }
868 .await;
869 result.map_err(Arc::new)
870 })
871 .shared();
872
873 self.loading_diffs
874 .insert((buffer_id, diff_kind), task.clone());
875 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
876 }
877
878 #[ztracing::instrument(skip_all)]
879 pub fn open_uncommitted_diff(
880 &mut self,
881 buffer: Entity<Buffer>,
882 cx: &mut Context<Self>,
883 ) -> Task<Result<Entity<BufferDiff>>> {
884 let buffer_id = buffer.read(cx).remote_id();
885
886 if let Some(diff_state) = self.diffs.get(&buffer_id)
887 && let Some(uncommitted_diff) = diff_state
888 .read(cx)
889 .uncommitted_diff
890 .as_ref()
891 .and_then(|weak| weak.upgrade())
892 {
893 if let Some(task) =
894 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
895 {
896 return cx.background_executor().spawn(async move {
897 task.await;
898 Ok(uncommitted_diff)
899 });
900 }
901 return Task::ready(Ok(uncommitted_diff));
902 }
903
904 let Some((repo, repo_path)) =
905 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
906 else {
907 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
908 };
909
910 let task = self
911 .loading_diffs
912 .entry((buffer_id, DiffKind::Uncommitted))
913 .or_insert_with(|| {
914 let changes = repo.update(cx, |repo, cx| {
915 repo.load_committed_text(buffer_id, repo_path, cx)
916 });
917
918 // todo(lw): hot foreground spawn
919 cx.spawn(async move |this, cx| {
920 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
921 .await
922 .map_err(Arc::new)
923 })
924 .shared()
925 })
926 .clone();
927
928 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
929 }
930
931 #[ztracing::instrument(skip_all)]
932 async fn open_diff_internal(
933 this: WeakEntity<Self>,
934 kind: DiffKind,
935 texts: Result<DiffBasesChange>,
936 buffer_entity: Entity<Buffer>,
937 cx: &mut AsyncApp,
938 ) -> Result<Entity<BufferDiff>> {
939 let diff_bases_change = match texts {
940 Err(e) => {
941 this.update(cx, |this, cx| {
942 let buffer = buffer_entity.read(cx);
943 let buffer_id = buffer.remote_id();
944 this.loading_diffs.remove(&(buffer_id, kind));
945 })?;
946 return Err(e);
947 }
948 Ok(change) => change,
949 };
950
951 this.update(cx, |this, cx| {
952 let buffer = buffer_entity.read(cx);
953 let buffer_id = buffer.remote_id();
954 let language = buffer.language().cloned();
955 let language_registry = buffer.language_registry();
956 let text_snapshot = buffer.text_snapshot();
957 this.loading_diffs.remove(&(buffer_id, kind));
958
959 let git_store = cx.weak_entity();
960 let diff_state = this
961 .diffs
962 .entry(buffer_id)
963 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
964
965 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
966
967 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
968 diff_state.update(cx, |diff_state, cx| {
969 diff_state.language_changed = true;
970 diff_state.language = language;
971 diff_state.language_registry = language_registry;
972
973 match kind {
974 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
975 DiffKind::Uncommitted => {
976 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
977 diff
978 } else {
979 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
980 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
981 unstaged_diff
982 };
983
984 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
985 diff_state.uncommitted_diff = Some(diff.downgrade())
986 }
987 DiffKind::SinceOid(_) => {
988 unreachable!("open_diff_internal is not used for OID diffs")
989 }
990 }
991
992 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
993 let rx = diff_state.wait_for_recalculation();
994
995 anyhow::Ok(async move {
996 if let Some(rx) = rx {
997 rx.await;
998 }
999 Ok(diff)
1000 })
1001 })
1002 })??
1003 .await
1004 }
1005
1006 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1007 let diff_state = self.diffs.get(&buffer_id)?;
1008 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1009 }
1010
1011 pub fn get_uncommitted_diff(
1012 &self,
1013 buffer_id: BufferId,
1014 cx: &App,
1015 ) -> Option<Entity<BufferDiff>> {
1016 let diff_state = self.diffs.get(&buffer_id)?;
1017 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1018 }
1019
1020 pub fn get_diff_since_oid(
1021 &self,
1022 buffer_id: BufferId,
1023 oid: Option<git::Oid>,
1024 cx: &App,
1025 ) -> Option<Entity<BufferDiff>> {
1026 let diff_state = self.diffs.get(&buffer_id)?;
1027 diff_state.read(cx).oid_diff(oid)
1028 }
1029
1030 pub fn open_conflict_set(
1031 &mut self,
1032 buffer: Entity<Buffer>,
1033 cx: &mut Context<Self>,
1034 ) -> Entity<ConflictSet> {
1035 log::debug!("open conflict set");
1036 let buffer_id = buffer.read(cx).remote_id();
1037
1038 if let Some(git_state) = self.diffs.get(&buffer_id)
1039 && let Some(conflict_set) = git_state
1040 .read(cx)
1041 .conflict_set
1042 .as_ref()
1043 .and_then(|weak| weak.upgrade())
1044 {
1045 let conflict_set = conflict_set;
1046 let buffer_snapshot = buffer.read(cx).text_snapshot();
1047
1048 git_state.update(cx, |state, cx| {
1049 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1050 });
1051
1052 return conflict_set;
1053 }
1054
1055 let is_unmerged = self
1056 .repository_and_path_for_buffer_id(buffer_id, cx)
1057 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1058 let git_store = cx.weak_entity();
1059 let buffer_git_state = self
1060 .diffs
1061 .entry(buffer_id)
1062 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1063 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1064
1065 self._subscriptions
1066 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1067 cx.emit(GitStoreEvent::ConflictsUpdated);
1068 }));
1069
1070 buffer_git_state.update(cx, |state, cx| {
1071 state.conflict_set = Some(conflict_set.downgrade());
1072 let buffer_snapshot = buffer.read(cx).text_snapshot();
1073 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1074 });
1075
1076 conflict_set
1077 }
1078
1079 pub fn project_path_git_status(
1080 &self,
1081 project_path: &ProjectPath,
1082 cx: &App,
1083 ) -> Option<FileStatus> {
1084 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1085 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1086 }
1087
1088 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1089 let mut work_directory_abs_paths = Vec::new();
1090 let mut checkpoints = Vec::new();
1091 for repository in self.repositories.values() {
1092 repository.update(cx, |repository, _| {
1093 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1094 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1095 });
1096 }
1097
1098 cx.background_executor().spawn(async move {
1099 let checkpoints = future::try_join_all(checkpoints).await?;
1100 Ok(GitStoreCheckpoint {
1101 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1102 .into_iter()
1103 .zip(checkpoints)
1104 .collect(),
1105 })
1106 })
1107 }
1108
1109 pub fn restore_checkpoint(
1110 &self,
1111 checkpoint: GitStoreCheckpoint,
1112 cx: &mut App,
1113 ) -> Task<Result<()>> {
1114 let repositories_by_work_dir_abs_path = self
1115 .repositories
1116 .values()
1117 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1118 .collect::<HashMap<_, _>>();
1119
1120 let mut tasks = Vec::new();
1121 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1122 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1123 let restore = repository.update(cx, |repository, _| {
1124 repository.restore_checkpoint(checkpoint)
1125 });
1126 tasks.push(async move { restore.await? });
1127 }
1128 }
1129 cx.background_spawn(async move {
1130 future::try_join_all(tasks).await?;
1131 Ok(())
1132 })
1133 }
1134
1135 /// Compares two checkpoints, returning true if they are equal.
1136 pub fn compare_checkpoints(
1137 &self,
1138 left: GitStoreCheckpoint,
1139 mut right: GitStoreCheckpoint,
1140 cx: &mut App,
1141 ) -> Task<Result<bool>> {
1142 let repositories_by_work_dir_abs_path = self
1143 .repositories
1144 .values()
1145 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1146 .collect::<HashMap<_, _>>();
1147
1148 let mut tasks = Vec::new();
1149 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1150 if let Some(right_checkpoint) = right
1151 .checkpoints_by_work_dir_abs_path
1152 .remove(&work_dir_abs_path)
1153 {
1154 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1155 {
1156 let compare = repository.update(cx, |repository, _| {
1157 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1158 });
1159
1160 tasks.push(async move { compare.await? });
1161 }
1162 } else {
1163 return Task::ready(Ok(false));
1164 }
1165 }
1166 cx.background_spawn(async move {
1167 Ok(future::try_join_all(tasks)
1168 .await?
1169 .into_iter()
1170 .all(|result| result))
1171 })
1172 }
1173
1174 /// Blames a buffer.
1175 pub fn blame_buffer(
1176 &self,
1177 buffer: &Entity<Buffer>,
1178 version: Option<clock::Global>,
1179 cx: &mut Context<Self>,
1180 ) -> Task<Result<Option<Blame>>> {
1181 let buffer = buffer.read(cx);
1182 let Some((repo, repo_path)) =
1183 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1184 else {
1185 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1186 };
1187 let content = match &version {
1188 Some(version) => buffer.rope_for_version(version),
1189 None => buffer.as_rope().clone(),
1190 };
1191 let line_ending = buffer.line_ending();
1192 let version = version.unwrap_or(buffer.version());
1193 let buffer_id = buffer.remote_id();
1194
1195 let repo = repo.downgrade();
1196 cx.spawn(async move |_, cx| {
1197 let repository_state = repo
1198 .update(cx, |repo, _| repo.repository_state.clone())?
1199 .await
1200 .map_err(|err| anyhow::anyhow!(err))?;
1201 match repository_state {
1202 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1203 .blame(repo_path.clone(), content, line_ending)
1204 .await
1205 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1206 .map(Some),
1207 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1208 let response = client
1209 .request(proto::BlameBuffer {
1210 project_id: project_id.to_proto(),
1211 buffer_id: buffer_id.into(),
1212 version: serialize_version(&version),
1213 })
1214 .await?;
1215 Ok(deserialize_blame_buffer_response(response))
1216 }
1217 }
1218 })
1219 }
1220
1221 pub fn file_history(
1222 &self,
1223 repo: &Entity<Repository>,
1224 path: RepoPath,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn file_history_paginated(
1233 &self,
1234 repo: &Entity<Repository>,
1235 path: RepoPath,
1236 skip: usize,
1237 limit: Option<usize>,
1238 cx: &mut App,
1239 ) -> Task<Result<git::repository::FileHistory>> {
1240 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1241
1242 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1243 }
1244
1245 pub fn get_permalink_to_line(
1246 &self,
1247 buffer: &Entity<Buffer>,
1248 selection: Range<u32>,
1249 cx: &mut App,
1250 ) -> Task<Result<url::Url>> {
1251 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1252 return Task::ready(Err(anyhow!("buffer has no file")));
1253 };
1254
1255 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1256 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1257 cx,
1258 ) else {
1259 // If we're not in a Git repo, check whether this is a Rust source
1260 // file in the Cargo registry (presumably opened with go-to-definition
1261 // from a normal Rust file). If so, we can put together a permalink
1262 // using crate metadata.
1263 if buffer
1264 .read(cx)
1265 .language()
1266 .is_none_or(|lang| lang.name() != "Rust")
1267 {
1268 return Task::ready(Err(anyhow!("no permalink available")));
1269 }
1270 let file_path = file.worktree.read(cx).absolutize(&file.path);
1271 return cx.spawn(async move |cx| {
1272 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1273 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1274 .context("no permalink available")
1275 });
1276 };
1277
1278 let buffer_id = buffer.read(cx).remote_id();
1279 let branch = repo.read(cx).branch.clone();
1280 let remote = branch
1281 .as_ref()
1282 .and_then(|b| b.upstream.as_ref())
1283 .and_then(|b| b.remote_name())
1284 .unwrap_or("origin")
1285 .to_string();
1286
1287 let rx = repo.update(cx, |repo, _| {
1288 repo.send_job(None, move |state, cx| async move {
1289 match state {
1290 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1291 let origin_url = backend
1292 .remote_url(&remote)
1293 .await
1294 .with_context(|| format!("remote \"{remote}\" not found"))?;
1295
1296 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1297
1298 let provider_registry =
1299 cx.update(GitHostingProviderRegistry::default_global);
1300
1301 let (provider, remote) =
1302 parse_git_remote_url(provider_registry, &origin_url)
1303 .context("parsing Git remote URL")?;
1304
1305 Ok(provider.build_permalink(
1306 remote,
1307 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1308 ))
1309 }
1310 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1311 let response = client
1312 .request(proto::GetPermalinkToLine {
1313 project_id: project_id.to_proto(),
1314 buffer_id: buffer_id.into(),
1315 selection: Some(proto::Range {
1316 start: selection.start as u64,
1317 end: selection.end as u64,
1318 }),
1319 })
1320 .await?;
1321
1322 url::Url::parse(&response.permalink).context("failed to parse permalink")
1323 }
1324 }
1325 })
1326 });
1327 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1328 }
1329
1330 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1331 match &self.state {
1332 GitStoreState::Local {
1333 downstream: downstream_client,
1334 ..
1335 } => downstream_client
1336 .as_ref()
1337 .map(|state| (state.client.clone(), state.project_id)),
1338 GitStoreState::Remote {
1339 downstream: downstream_client,
1340 ..
1341 } => downstream_client.clone(),
1342 }
1343 }
1344
1345 fn upstream_client(&self) -> Option<AnyProtoClient> {
1346 match &self.state {
1347 GitStoreState::Local { .. } => None,
1348 GitStoreState::Remote {
1349 upstream_client, ..
1350 } => Some(upstream_client.clone()),
1351 }
1352 }
1353
1354 fn on_worktree_store_event(
1355 &mut self,
1356 worktree_store: Entity<WorktreeStore>,
1357 event: &WorktreeStoreEvent,
1358 cx: &mut Context<Self>,
1359 ) {
1360 let GitStoreState::Local {
1361 project_environment,
1362 downstream,
1363 next_repository_id,
1364 fs,
1365 } = &self.state
1366 else {
1367 return;
1368 };
1369
1370 match event {
1371 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1372 if let Some(worktree) = self
1373 .worktree_store
1374 .read(cx)
1375 .worktree_for_id(*worktree_id, cx)
1376 {
1377 let paths_by_git_repo =
1378 self.process_updated_entries(&worktree, updated_entries, cx);
1379 let downstream = downstream
1380 .as_ref()
1381 .map(|downstream| downstream.updates_tx.clone());
1382 cx.spawn(async move |_, cx| {
1383 let paths_by_git_repo = paths_by_git_repo.await;
1384 for (repo, paths) in paths_by_git_repo {
1385 repo.update(cx, |repo, cx| {
1386 repo.paths_changed(paths, downstream.clone(), cx);
1387 });
1388 }
1389 })
1390 .detach();
1391 }
1392 }
1393 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1394 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1395 else {
1396 return;
1397 };
1398 if !worktree.read(cx).is_visible() {
1399 log::debug!(
1400 "not adding repositories for local worktree {:?} because it's not visible",
1401 worktree.read(cx).abs_path()
1402 );
1403 return;
1404 }
1405 self.update_repositories_from_worktree(
1406 *worktree_id,
1407 project_environment.clone(),
1408 next_repository_id.clone(),
1409 downstream
1410 .as_ref()
1411 .map(|downstream| downstream.updates_tx.clone()),
1412 changed_repos.clone(),
1413 fs.clone(),
1414 cx,
1415 );
1416 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1417 }
1418 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1419 let repos_without_worktree: Vec<RepositoryId> = self
1420 .worktree_ids
1421 .iter_mut()
1422 .filter_map(|(repo_id, worktree_ids)| {
1423 worktree_ids.remove(worktree_id);
1424 if worktree_ids.is_empty() {
1425 Some(*repo_id)
1426 } else {
1427 None
1428 }
1429 })
1430 .collect();
1431 let is_active_repo_removed = repos_without_worktree
1432 .iter()
1433 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1434
1435 for repo_id in repos_without_worktree {
1436 self.repositories.remove(&repo_id);
1437 self.worktree_ids.remove(&repo_id);
1438 if let Some(updates_tx) =
1439 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1440 {
1441 updates_tx
1442 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1443 .ok();
1444 }
1445 }
1446
1447 if is_active_repo_removed {
1448 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1449 self.active_repo_id = Some(repo_id);
1450 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1451 } else {
1452 self.active_repo_id = None;
1453 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1454 }
1455 }
1456 }
1457 _ => {}
1458 }
1459 }
1460 fn on_repository_event(
1461 &mut self,
1462 repo: Entity<Repository>,
1463 event: &RepositoryEvent,
1464 cx: &mut Context<Self>,
1465 ) {
1466 let id = repo.read(cx).id;
1467 let repo_snapshot = repo.read(cx).snapshot.clone();
1468 for (buffer_id, diff) in self.diffs.iter() {
1469 if let Some((buffer_repo, repo_path)) =
1470 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1471 && buffer_repo == repo
1472 {
1473 diff.update(cx, |diff, cx| {
1474 if let Some(conflict_set) = &diff.conflict_set {
1475 let conflict_status_changed =
1476 conflict_set.update(cx, |conflict_set, cx| {
1477 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1478 conflict_set.set_has_conflict(has_conflict, cx)
1479 })?;
1480 if conflict_status_changed {
1481 let buffer_store = self.buffer_store.read(cx);
1482 if let Some(buffer) = buffer_store.get(*buffer_id) {
1483 let _ = diff
1484 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1485 }
1486 }
1487 }
1488 anyhow::Ok(())
1489 })
1490 .ok();
1491 }
1492 }
1493 cx.emit(GitStoreEvent::RepositoryUpdated(
1494 id,
1495 event.clone(),
1496 self.active_repo_id == Some(id),
1497 ))
1498 }
1499
1500 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1501 cx.emit(GitStoreEvent::JobsUpdated)
1502 }
1503
1504 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1505 fn update_repositories_from_worktree(
1506 &mut self,
1507 worktree_id: WorktreeId,
1508 project_environment: Entity<ProjectEnvironment>,
1509 next_repository_id: Arc<AtomicU64>,
1510 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1511 updated_git_repositories: UpdatedGitRepositoriesSet,
1512 fs: Arc<dyn Fs>,
1513 cx: &mut Context<Self>,
1514 ) {
1515 let mut removed_ids = Vec::new();
1516 for update in updated_git_repositories.iter() {
1517 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1518 let existing_work_directory_abs_path =
1519 repo.read(cx).work_directory_abs_path.clone();
1520 Some(&existing_work_directory_abs_path)
1521 == update.old_work_directory_abs_path.as_ref()
1522 || Some(&existing_work_directory_abs_path)
1523 == update.new_work_directory_abs_path.as_ref()
1524 }) {
1525 let repo_id = *id;
1526 if let Some(new_work_directory_abs_path) =
1527 update.new_work_directory_abs_path.clone()
1528 {
1529 self.worktree_ids
1530 .entry(repo_id)
1531 .or_insert_with(HashSet::new)
1532 .insert(worktree_id);
1533 existing.update(cx, |existing, cx| {
1534 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1535 existing.schedule_scan(updates_tx.clone(), cx);
1536 });
1537 } else {
1538 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1539 worktree_ids.remove(&worktree_id);
1540 if worktree_ids.is_empty() {
1541 removed_ids.push(repo_id);
1542 }
1543 }
1544 }
1545 } else if let UpdatedGitRepository {
1546 new_work_directory_abs_path: Some(work_directory_abs_path),
1547 dot_git_abs_path: Some(dot_git_abs_path),
1548 repository_dir_abs_path: Some(repository_dir_abs_path),
1549 common_dir_abs_path: Some(common_dir_abs_path),
1550 ..
1551 } = update
1552 {
1553 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1554 work_directory_abs_path,
1555 common_dir_abs_path,
1556 repository_dir_abs_path,
1557 )
1558 .into();
1559 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1560 let is_trusted = TrustedWorktrees::try_get_global(cx)
1561 .map(|trusted_worktrees| {
1562 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1563 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1564 })
1565 })
1566 .unwrap_or(false);
1567 let git_store = cx.weak_entity();
1568 let repo = cx.new(|cx| {
1569 let mut repo = Repository::local(
1570 id,
1571 work_directory_abs_path.clone(),
1572 original_repo_abs_path.clone(),
1573 dot_git_abs_path.clone(),
1574 project_environment.downgrade(),
1575 fs.clone(),
1576 is_trusted,
1577 git_store,
1578 cx,
1579 );
1580 if let Some(updates_tx) = updates_tx.as_ref() {
1581 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1582 updates_tx
1583 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1584 .ok();
1585 }
1586 repo.schedule_scan(updates_tx.clone(), cx);
1587 repo
1588 });
1589 self._subscriptions
1590 .push(cx.subscribe(&repo, Self::on_repository_event));
1591 self._subscriptions
1592 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1593 self.repositories.insert(id, repo);
1594 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1595 cx.emit(GitStoreEvent::RepositoryAdded);
1596 self.active_repo_id.get_or_insert_with(|| {
1597 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1598 id
1599 });
1600 }
1601 }
1602
1603 for id in removed_ids {
1604 if self.active_repo_id == Some(id) {
1605 self.active_repo_id = None;
1606 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1607 }
1608 self.repositories.remove(&id);
1609 if let Some(updates_tx) = updates_tx.as_ref() {
1610 updates_tx
1611 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1612 .ok();
1613 }
1614 }
1615 }
1616
1617 fn on_trusted_worktrees_event(
1618 &mut self,
1619 _: Entity<TrustedWorktreesStore>,
1620 event: &TrustedWorktreesEvent,
1621 cx: &mut Context<Self>,
1622 ) {
1623 if !matches!(self.state, GitStoreState::Local { .. }) {
1624 return;
1625 }
1626
1627 let (is_trusted, event_paths) = match event {
1628 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1629 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1630 };
1631
1632 for (repo_id, worktree_ids) in &self.worktree_ids {
1633 if worktree_ids
1634 .iter()
1635 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1636 {
1637 if let Some(repo) = self.repositories.get(repo_id) {
1638 let repository_state = repo.read(cx).repository_state.clone();
1639 cx.background_spawn(async move {
1640 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1641 state.backend.set_trusted(is_trusted);
1642 }
1643 })
1644 .detach();
1645 }
1646 }
1647 }
1648 }
1649
1650 fn on_buffer_store_event(
1651 &mut self,
1652 _: Entity<BufferStore>,
1653 event: &BufferStoreEvent,
1654 cx: &mut Context<Self>,
1655 ) {
1656 match event {
1657 BufferStoreEvent::BufferAdded(buffer) => {
1658 cx.subscribe(buffer, |this, buffer, event, cx| {
1659 if let BufferEvent::LanguageChanged(_) = event {
1660 let buffer_id = buffer.read(cx).remote_id();
1661 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1662 diff_state.update(cx, |diff_state, cx| {
1663 diff_state.buffer_language_changed(buffer, cx);
1664 });
1665 }
1666 }
1667 })
1668 .detach();
1669 }
1670 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1671 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1672 diffs.remove(buffer_id);
1673 }
1674 }
1675 BufferStoreEvent::BufferDropped(buffer_id) => {
1676 self.diffs.remove(buffer_id);
1677 for diffs in self.shared_diffs.values_mut() {
1678 diffs.remove(buffer_id);
1679 }
1680 }
1681 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1682 // Whenever a buffer's file path changes, it's possible that the
1683 // new path is actually a path that is being tracked by a git
1684 // repository. In that case, we'll want to update the buffer's
1685 // `BufferDiffState`, in case it already has one.
1686 let buffer_id = buffer.read(cx).remote_id();
1687 let diff_state = self.diffs.get(&buffer_id);
1688 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1689
1690 if let Some(diff_state) = diff_state
1691 && let Some((repo, repo_path)) = repo
1692 {
1693 let buffer = buffer.clone();
1694 let diff_state = diff_state.clone();
1695
1696 cx.spawn(async move |_git_store, cx| {
1697 async {
1698 let diff_bases_change = repo
1699 .update(cx, |repo, cx| {
1700 repo.load_committed_text(buffer_id, repo_path, cx)
1701 })
1702 .await?;
1703
1704 diff_state.update(cx, |diff_state, cx| {
1705 let buffer_snapshot = buffer.read(cx).text_snapshot();
1706 diff_state.diff_bases_changed(
1707 buffer_snapshot,
1708 Some(diff_bases_change),
1709 cx,
1710 );
1711 });
1712 anyhow::Ok(())
1713 }
1714 .await
1715 .log_err();
1716 })
1717 .detach();
1718 }
1719 }
1720 }
1721 }
1722
1723 pub fn recalculate_buffer_diffs(
1724 &mut self,
1725 buffers: Vec<Entity<Buffer>>,
1726 cx: &mut Context<Self>,
1727 ) -> impl Future<Output = ()> + use<> {
1728 let mut futures = Vec::new();
1729 for buffer in buffers {
1730 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1731 let buffer = buffer.read(cx).text_snapshot();
1732 diff_state.update(cx, |diff_state, cx| {
1733 diff_state.recalculate_diffs(buffer.clone(), cx);
1734 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1735 });
1736 futures.push(diff_state.update(cx, |diff_state, cx| {
1737 diff_state
1738 .reparse_conflict_markers(buffer, cx)
1739 .map(|_| {})
1740 .boxed()
1741 }));
1742 }
1743 }
1744 async move {
1745 futures::future::join_all(futures).await;
1746 }
1747 }
1748
1749 fn on_buffer_diff_event(
1750 &mut self,
1751 diff: Entity<buffer_diff::BufferDiff>,
1752 event: &BufferDiffEvent,
1753 cx: &mut Context<Self>,
1754 ) {
1755 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1756 let buffer_id = diff.read(cx).buffer_id;
1757 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1758 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1759 diff_state.hunk_staging_operation_count += 1;
1760 diff_state.hunk_staging_operation_count
1761 });
1762 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1763 let recv = repo.update(cx, |repo, cx| {
1764 log::debug!("hunks changed for {}", path.as_unix_str());
1765 repo.spawn_set_index_text_job(
1766 path,
1767 new_index_text.as_ref().map(|rope| rope.to_string()),
1768 Some(hunk_staging_operation_count),
1769 cx,
1770 )
1771 });
1772 let diff = diff.downgrade();
1773 cx.spawn(async move |this, cx| {
1774 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1775 diff.update(cx, |diff, cx| {
1776 diff.clear_pending_hunks(cx);
1777 })
1778 .ok();
1779 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1780 .ok();
1781 }
1782 })
1783 .detach();
1784 }
1785 }
1786 }
1787 }
1788
1789 fn local_worktree_git_repos_changed(
1790 &mut self,
1791 worktree: Entity<Worktree>,
1792 changed_repos: &UpdatedGitRepositoriesSet,
1793 cx: &mut Context<Self>,
1794 ) {
1795 log::debug!("local worktree repos changed");
1796 debug_assert!(worktree.read(cx).is_local());
1797
1798 for repository in self.repositories.values() {
1799 repository.update(cx, |repository, cx| {
1800 let repo_abs_path = &repository.work_directory_abs_path;
1801 if changed_repos.iter().any(|update| {
1802 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1803 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1804 }) {
1805 repository.reload_buffer_diff_bases(cx);
1806 }
1807 });
1808 }
1809 }
1810
1811 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1812 &self.repositories
1813 }
1814
1815 /// Returns the original (main) repository working directory for the given worktree.
1816 /// For normal checkouts this equals the worktree's own path; for linked
1817 /// worktrees it points back to the original repo.
1818 pub fn original_repo_path_for_worktree(
1819 &self,
1820 worktree_id: WorktreeId,
1821 cx: &App,
1822 ) -> Option<Arc<Path>> {
1823 self.active_repo_id
1824 .iter()
1825 .chain(self.worktree_ids.keys())
1826 .find(|repo_id| {
1827 self.worktree_ids
1828 .get(repo_id)
1829 .is_some_and(|ids| ids.contains(&worktree_id))
1830 })
1831 .and_then(|repo_id| self.repositories.get(repo_id))
1832 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1833 }
1834
1835 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1836 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1837 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1838 Some(status.status)
1839 }
1840
1841 pub fn repository_and_path_for_buffer_id(
1842 &self,
1843 buffer_id: BufferId,
1844 cx: &App,
1845 ) -> Option<(Entity<Repository>, RepoPath)> {
1846 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1847 let project_path = buffer.read(cx).project_path(cx)?;
1848 self.repository_and_path_for_project_path(&project_path, cx)
1849 }
1850
1851 pub fn repository_and_path_for_project_path(
1852 &self,
1853 path: &ProjectPath,
1854 cx: &App,
1855 ) -> Option<(Entity<Repository>, RepoPath)> {
1856 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1857 self.repositories
1858 .values()
1859 .filter_map(|repo| {
1860 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1861 Some((repo.clone(), repo_path))
1862 })
1863 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1864 }
1865
1866 pub fn git_init(
1867 &self,
1868 path: Arc<Path>,
1869 fallback_branch_name: String,
1870 cx: &App,
1871 ) -> Task<Result<()>> {
1872 match &self.state {
1873 GitStoreState::Local { fs, .. } => {
1874 let fs = fs.clone();
1875 cx.background_executor()
1876 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1877 }
1878 GitStoreState::Remote {
1879 upstream_client,
1880 upstream_project_id: project_id,
1881 ..
1882 } => {
1883 let client = upstream_client.clone();
1884 let project_id = *project_id;
1885 cx.background_executor().spawn(async move {
1886 client
1887 .request(proto::GitInit {
1888 project_id: project_id,
1889 abs_path: path.to_string_lossy().into_owned(),
1890 fallback_branch_name,
1891 })
1892 .await?;
1893 Ok(())
1894 })
1895 }
1896 }
1897 }
1898
1899 pub fn git_clone(
1900 &self,
1901 repo: String,
1902 path: impl Into<Arc<std::path::Path>>,
1903 cx: &App,
1904 ) -> Task<Result<()>> {
1905 let path = path.into();
1906 match &self.state {
1907 GitStoreState::Local { fs, .. } => {
1908 let fs = fs.clone();
1909 cx.background_executor()
1910 .spawn(async move { fs.git_clone(&repo, &path).await })
1911 }
1912 GitStoreState::Remote {
1913 upstream_client,
1914 upstream_project_id,
1915 ..
1916 } => {
1917 if upstream_client.is_via_collab() {
1918 return Task::ready(Err(anyhow!(
1919 "Git Clone isn't supported for project guests"
1920 )));
1921 }
1922 let request = upstream_client.request(proto::GitClone {
1923 project_id: *upstream_project_id,
1924 abs_path: path.to_string_lossy().into_owned(),
1925 remote_repo: repo,
1926 });
1927
1928 cx.background_spawn(async move {
1929 let result = request.await?;
1930
1931 match result.success {
1932 true => Ok(()),
1933 false => Err(anyhow!("Git Clone failed")),
1934 }
1935 })
1936 }
1937 }
1938 }
1939
1940 async fn handle_update_repository(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::UpdateRepository>,
1943 mut cx: AsyncApp,
1944 ) -> Result<()> {
1945 this.update(&mut cx, |this, cx| {
1946 let path_style = this.worktree_store.read(cx).path_style();
1947 let mut update = envelope.payload;
1948
1949 let id = RepositoryId::from_proto(update.id);
1950 let client = this.upstream_client().context("no upstream client")?;
1951
1952 let original_repo_abs_path: Option<Arc<Path>> = update
1953 .original_repo_abs_path
1954 .as_deref()
1955 .map(|p| Path::new(p).into());
1956
1957 let mut repo_subscription = None;
1958 let repo = this.repositories.entry(id).or_insert_with(|| {
1959 let git_store = cx.weak_entity();
1960 let repo = cx.new(|cx| {
1961 Repository::remote(
1962 id,
1963 Path::new(&update.abs_path).into(),
1964 original_repo_abs_path.clone(),
1965 path_style,
1966 ProjectId(update.project_id),
1967 client,
1968 git_store,
1969 cx,
1970 )
1971 });
1972 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1973 cx.emit(GitStoreEvent::RepositoryAdded);
1974 repo
1975 });
1976 this._subscriptions.extend(repo_subscription);
1977
1978 repo.update(cx, {
1979 let update = update.clone();
1980 |repo, cx| repo.apply_remote_update(update, cx)
1981 })?;
1982
1983 this.active_repo_id.get_or_insert_with(|| {
1984 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1985 id
1986 });
1987
1988 if let Some((client, project_id)) = this.downstream_client() {
1989 update.project_id = project_id.to_proto();
1990 client.send(update).log_err();
1991 }
1992 Ok(())
1993 })
1994 }
1995
1996 async fn handle_remove_repository(
1997 this: Entity<Self>,
1998 envelope: TypedEnvelope<proto::RemoveRepository>,
1999 mut cx: AsyncApp,
2000 ) -> Result<()> {
2001 this.update(&mut cx, |this, cx| {
2002 let mut update = envelope.payload;
2003 let id = RepositoryId::from_proto(update.id);
2004 this.repositories.remove(&id);
2005 if let Some((client, project_id)) = this.downstream_client() {
2006 update.project_id = project_id.to_proto();
2007 client.send(update).log_err();
2008 }
2009 if this.active_repo_id == Some(id) {
2010 this.active_repo_id = None;
2011 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2012 }
2013 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2014 });
2015 Ok(())
2016 }
2017
2018 async fn handle_git_init(
2019 this: Entity<Self>,
2020 envelope: TypedEnvelope<proto::GitInit>,
2021 cx: AsyncApp,
2022 ) -> Result<proto::Ack> {
2023 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2024 let name = envelope.payload.fallback_branch_name;
2025 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2026 .await?;
2027
2028 Ok(proto::Ack {})
2029 }
2030
2031 async fn handle_git_clone(
2032 this: Entity<Self>,
2033 envelope: TypedEnvelope<proto::GitClone>,
2034 cx: AsyncApp,
2035 ) -> Result<proto::GitCloneResponse> {
2036 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2037 let repo_name = envelope.payload.remote_repo;
2038 let result = cx
2039 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2040 .await;
2041
2042 Ok(proto::GitCloneResponse {
2043 success: result.is_ok(),
2044 })
2045 }
2046
2047 async fn handle_fetch(
2048 this: Entity<Self>,
2049 envelope: TypedEnvelope<proto::Fetch>,
2050 mut cx: AsyncApp,
2051 ) -> Result<proto::RemoteMessageResponse> {
2052 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2053 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2054 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2055 let askpass_id = envelope.payload.askpass_id;
2056
2057 let askpass = make_remote_delegate(
2058 this,
2059 envelope.payload.project_id,
2060 repository_id,
2061 askpass_id,
2062 &mut cx,
2063 );
2064
2065 let remote_output = repository_handle
2066 .update(&mut cx, |repository_handle, cx| {
2067 repository_handle.fetch(fetch_options, askpass, cx)
2068 })
2069 .await??;
2070
2071 Ok(proto::RemoteMessageResponse {
2072 stdout: remote_output.stdout,
2073 stderr: remote_output.stderr,
2074 })
2075 }
2076
2077 async fn handle_push(
2078 this: Entity<Self>,
2079 envelope: TypedEnvelope<proto::Push>,
2080 mut cx: AsyncApp,
2081 ) -> Result<proto::RemoteMessageResponse> {
2082 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2083 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2084
2085 let askpass_id = envelope.payload.askpass_id;
2086 let askpass = make_remote_delegate(
2087 this,
2088 envelope.payload.project_id,
2089 repository_id,
2090 askpass_id,
2091 &mut cx,
2092 );
2093
2094 let options = envelope
2095 .payload
2096 .options
2097 .as_ref()
2098 .map(|_| match envelope.payload.options() {
2099 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2100 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2101 });
2102
2103 let branch_name = envelope.payload.branch_name.into();
2104 let remote_branch_name = envelope.payload.remote_branch_name.into();
2105 let remote_name = envelope.payload.remote_name.into();
2106
2107 let remote_output = repository_handle
2108 .update(&mut cx, |repository_handle, cx| {
2109 repository_handle.push(
2110 branch_name,
2111 remote_branch_name,
2112 remote_name,
2113 options,
2114 askpass,
2115 cx,
2116 )
2117 })
2118 .await??;
2119 Ok(proto::RemoteMessageResponse {
2120 stdout: remote_output.stdout,
2121 stderr: remote_output.stderr,
2122 })
2123 }
2124
2125 async fn handle_pull(
2126 this: Entity<Self>,
2127 envelope: TypedEnvelope<proto::Pull>,
2128 mut cx: AsyncApp,
2129 ) -> Result<proto::RemoteMessageResponse> {
2130 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2131 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2132 let askpass_id = envelope.payload.askpass_id;
2133 let askpass = make_remote_delegate(
2134 this,
2135 envelope.payload.project_id,
2136 repository_id,
2137 askpass_id,
2138 &mut cx,
2139 );
2140
2141 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2142 let remote_name = envelope.payload.remote_name.into();
2143 let rebase = envelope.payload.rebase;
2144
2145 let remote_message = repository_handle
2146 .update(&mut cx, |repository_handle, cx| {
2147 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2148 })
2149 .await??;
2150
2151 Ok(proto::RemoteMessageResponse {
2152 stdout: remote_message.stdout,
2153 stderr: remote_message.stderr,
2154 })
2155 }
2156
2157 async fn handle_stage(
2158 this: Entity<Self>,
2159 envelope: TypedEnvelope<proto::Stage>,
2160 mut cx: AsyncApp,
2161 ) -> Result<proto::Ack> {
2162 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2163 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2164
2165 let entries = envelope
2166 .payload
2167 .paths
2168 .into_iter()
2169 .map(|path| RepoPath::new(&path))
2170 .collect::<Result<Vec<_>>>()?;
2171
2172 repository_handle
2173 .update(&mut cx, |repository_handle, cx| {
2174 repository_handle.stage_entries(entries, cx)
2175 })
2176 .await?;
2177 Ok(proto::Ack {})
2178 }
2179
2180 async fn handle_unstage(
2181 this: Entity<Self>,
2182 envelope: TypedEnvelope<proto::Unstage>,
2183 mut cx: AsyncApp,
2184 ) -> Result<proto::Ack> {
2185 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2186 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2187
2188 let entries = envelope
2189 .payload
2190 .paths
2191 .into_iter()
2192 .map(|path| RepoPath::new(&path))
2193 .collect::<Result<Vec<_>>>()?;
2194
2195 repository_handle
2196 .update(&mut cx, |repository_handle, cx| {
2197 repository_handle.unstage_entries(entries, cx)
2198 })
2199 .await?;
2200
2201 Ok(proto::Ack {})
2202 }
2203
2204 async fn handle_stash(
2205 this: Entity<Self>,
2206 envelope: TypedEnvelope<proto::Stash>,
2207 mut cx: AsyncApp,
2208 ) -> Result<proto::Ack> {
2209 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2210 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2211
2212 let entries = envelope
2213 .payload
2214 .paths
2215 .into_iter()
2216 .map(|path| RepoPath::new(&path))
2217 .collect::<Result<Vec<_>>>()?;
2218
2219 repository_handle
2220 .update(&mut cx, |repository_handle, cx| {
2221 repository_handle.stash_entries(entries, cx)
2222 })
2223 .await?;
2224
2225 Ok(proto::Ack {})
2226 }
2227
2228 async fn handle_stash_pop(
2229 this: Entity<Self>,
2230 envelope: TypedEnvelope<proto::StashPop>,
2231 mut cx: AsyncApp,
2232 ) -> Result<proto::Ack> {
2233 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2234 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2235 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2236
2237 repository_handle
2238 .update(&mut cx, |repository_handle, cx| {
2239 repository_handle.stash_pop(stash_index, cx)
2240 })
2241 .await?;
2242
2243 Ok(proto::Ack {})
2244 }
2245
2246 async fn handle_stash_apply(
2247 this: Entity<Self>,
2248 envelope: TypedEnvelope<proto::StashApply>,
2249 mut cx: AsyncApp,
2250 ) -> Result<proto::Ack> {
2251 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2252 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2253 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2254
2255 repository_handle
2256 .update(&mut cx, |repository_handle, cx| {
2257 repository_handle.stash_apply(stash_index, cx)
2258 })
2259 .await?;
2260
2261 Ok(proto::Ack {})
2262 }
2263
2264 async fn handle_stash_drop(
2265 this: Entity<Self>,
2266 envelope: TypedEnvelope<proto::StashDrop>,
2267 mut cx: AsyncApp,
2268 ) -> Result<proto::Ack> {
2269 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2270 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2271 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2272
2273 repository_handle
2274 .update(&mut cx, |repository_handle, cx| {
2275 repository_handle.stash_drop(stash_index, cx)
2276 })
2277 .await??;
2278
2279 Ok(proto::Ack {})
2280 }
2281
2282 async fn handle_set_index_text(
2283 this: Entity<Self>,
2284 envelope: TypedEnvelope<proto::SetIndexText>,
2285 mut cx: AsyncApp,
2286 ) -> Result<proto::Ack> {
2287 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2288 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2289 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2290
2291 repository_handle
2292 .update(&mut cx, |repository_handle, cx| {
2293 repository_handle.spawn_set_index_text_job(
2294 repo_path,
2295 envelope.payload.text,
2296 None,
2297 cx,
2298 )
2299 })
2300 .await??;
2301 Ok(proto::Ack {})
2302 }
2303
2304 async fn handle_run_hook(
2305 this: Entity<Self>,
2306 envelope: TypedEnvelope<proto::RunGitHook>,
2307 mut cx: AsyncApp,
2308 ) -> Result<proto::Ack> {
2309 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2310 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2311 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2312 repository_handle
2313 .update(&mut cx, |repository_handle, cx| {
2314 repository_handle.run_hook(hook, cx)
2315 })
2316 .await??;
2317 Ok(proto::Ack {})
2318 }
2319
2320 async fn handle_commit(
2321 this: Entity<Self>,
2322 envelope: TypedEnvelope<proto::Commit>,
2323 mut cx: AsyncApp,
2324 ) -> Result<proto::Ack> {
2325 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2326 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2327 let askpass_id = envelope.payload.askpass_id;
2328
2329 let askpass = make_remote_delegate(
2330 this,
2331 envelope.payload.project_id,
2332 repository_id,
2333 askpass_id,
2334 &mut cx,
2335 );
2336
2337 let message = SharedString::from(envelope.payload.message);
2338 let name = envelope.payload.name.map(SharedString::from);
2339 let email = envelope.payload.email.map(SharedString::from);
2340 let options = envelope.payload.options.unwrap_or_default();
2341
2342 repository_handle
2343 .update(&mut cx, |repository_handle, cx| {
2344 repository_handle.commit(
2345 message,
2346 name.zip(email),
2347 CommitOptions {
2348 amend: options.amend,
2349 signoff: options.signoff,
2350 allow_empty: options.allow_empty,
2351 },
2352 askpass,
2353 cx,
2354 )
2355 })
2356 .await??;
2357 Ok(proto::Ack {})
2358 }
2359
2360 async fn handle_get_remotes(
2361 this: Entity<Self>,
2362 envelope: TypedEnvelope<proto::GetRemotes>,
2363 mut cx: AsyncApp,
2364 ) -> Result<proto::GetRemotesResponse> {
2365 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2366 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2367
2368 let branch_name = envelope.payload.branch_name;
2369 let is_push = envelope.payload.is_push;
2370
2371 let remotes = repository_handle
2372 .update(&mut cx, |repository_handle, _| {
2373 repository_handle.get_remotes(branch_name, is_push)
2374 })
2375 .await??;
2376
2377 Ok(proto::GetRemotesResponse {
2378 remotes: remotes
2379 .into_iter()
2380 .map(|remotes| proto::get_remotes_response::Remote {
2381 name: remotes.name.to_string(),
2382 })
2383 .collect::<Vec<_>>(),
2384 })
2385 }
2386
2387 async fn handle_get_worktrees(
2388 this: Entity<Self>,
2389 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2390 mut cx: AsyncApp,
2391 ) -> Result<proto::GitWorktreesResponse> {
2392 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2393 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2394
2395 let worktrees = repository_handle
2396 .update(&mut cx, |repository_handle, _| {
2397 repository_handle.worktrees()
2398 })
2399 .await??;
2400
2401 Ok(proto::GitWorktreesResponse {
2402 worktrees: worktrees
2403 .into_iter()
2404 .map(|worktree| worktree_to_proto(&worktree))
2405 .collect::<Vec<_>>(),
2406 })
2407 }
2408
2409 async fn handle_create_worktree(
2410 this: Entity<Self>,
2411 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2412 mut cx: AsyncApp,
2413 ) -> Result<proto::Ack> {
2414 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2415 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2416 let directory = PathBuf::from(envelope.payload.directory);
2417 let start_point = if envelope.payload.name.is_empty() {
2418 CreateWorktreeStartPoint::Detached
2419 } else {
2420 CreateWorktreeStartPoint::Branched {
2421 name: envelope.payload.name,
2422 }
2423 };
2424 let commit = envelope.payload.commit;
2425
2426 repository_handle
2427 .update(&mut cx, |repository_handle, _| {
2428 repository_handle.create_worktree_with_start_point(start_point, directory, commit)
2429 })
2430 .await??;
2431
2432 Ok(proto::Ack {})
2433 }
2434
2435 async fn handle_remove_worktree(
2436 this: Entity<Self>,
2437 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2438 mut cx: AsyncApp,
2439 ) -> Result<proto::Ack> {
2440 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2441 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2442 let path = PathBuf::from(envelope.payload.path);
2443 let force = envelope.payload.force;
2444
2445 repository_handle
2446 .update(&mut cx, |repository_handle, _| {
2447 repository_handle.remove_worktree(path, force)
2448 })
2449 .await??;
2450
2451 Ok(proto::Ack {})
2452 }
2453
2454 async fn handle_rename_worktree(
2455 this: Entity<Self>,
2456 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2457 mut cx: AsyncApp,
2458 ) -> Result<proto::Ack> {
2459 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2460 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2461 let old_path = PathBuf::from(envelope.payload.old_path);
2462 let new_path = PathBuf::from(envelope.payload.new_path);
2463
2464 repository_handle
2465 .update(&mut cx, |repository_handle, _| {
2466 repository_handle.rename_worktree(old_path, new_path)
2467 })
2468 .await??;
2469
2470 Ok(proto::Ack {})
2471 }
2472
2473 async fn handle_get_head_sha(
2474 this: Entity<Self>,
2475 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2476 mut cx: AsyncApp,
2477 ) -> Result<proto::GitGetHeadShaResponse> {
2478 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2479 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2480
2481 let head_sha = repository_handle
2482 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2483 .await??;
2484
2485 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2486 }
2487
2488 async fn handle_get_branches(
2489 this: Entity<Self>,
2490 envelope: TypedEnvelope<proto::GitGetBranches>,
2491 mut cx: AsyncApp,
2492 ) -> Result<proto::GitBranchesResponse> {
2493 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2494 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2495
2496 let branches = repository_handle
2497 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2498 .await??;
2499
2500 Ok(proto::GitBranchesResponse {
2501 branches: branches
2502 .into_iter()
2503 .map(|branch| branch_to_proto(&branch))
2504 .collect::<Vec<_>>(),
2505 })
2506 }
2507 async fn handle_get_default_branch(
2508 this: Entity<Self>,
2509 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2510 mut cx: AsyncApp,
2511 ) -> Result<proto::GetDefaultBranchResponse> {
2512 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2513 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2514
2515 let branch = repository_handle
2516 .update(&mut cx, |repository_handle, _| {
2517 repository_handle.default_branch(false)
2518 })
2519 .await??
2520 .map(Into::into);
2521
2522 Ok(proto::GetDefaultBranchResponse { branch })
2523 }
2524 async fn handle_create_branch(
2525 this: Entity<Self>,
2526 envelope: TypedEnvelope<proto::GitCreateBranch>,
2527 mut cx: AsyncApp,
2528 ) -> Result<proto::Ack> {
2529 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2530 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2531 let branch_name = envelope.payload.branch_name;
2532
2533 repository_handle
2534 .update(&mut cx, |repository_handle, _| {
2535 repository_handle.create_branch(branch_name, None)
2536 })
2537 .await??;
2538
2539 Ok(proto::Ack {})
2540 }
2541
2542 async fn handle_change_branch(
2543 this: Entity<Self>,
2544 envelope: TypedEnvelope<proto::GitChangeBranch>,
2545 mut cx: AsyncApp,
2546 ) -> Result<proto::Ack> {
2547 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2548 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2549 let branch_name = envelope.payload.branch_name;
2550
2551 repository_handle
2552 .update(&mut cx, |repository_handle, _| {
2553 repository_handle.change_branch(branch_name)
2554 })
2555 .await??;
2556
2557 Ok(proto::Ack {})
2558 }
2559
2560 async fn handle_rename_branch(
2561 this: Entity<Self>,
2562 envelope: TypedEnvelope<proto::GitRenameBranch>,
2563 mut cx: AsyncApp,
2564 ) -> Result<proto::Ack> {
2565 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2566 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2567 let branch = envelope.payload.branch;
2568 let new_name = envelope.payload.new_name;
2569
2570 repository_handle
2571 .update(&mut cx, |repository_handle, _| {
2572 repository_handle.rename_branch(branch, new_name)
2573 })
2574 .await??;
2575
2576 Ok(proto::Ack {})
2577 }
2578
2579 async fn handle_create_remote(
2580 this: Entity<Self>,
2581 envelope: TypedEnvelope<proto::GitCreateRemote>,
2582 mut cx: AsyncApp,
2583 ) -> Result<proto::Ack> {
2584 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2585 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2586 let remote_name = envelope.payload.remote_name;
2587 let remote_url = envelope.payload.remote_url;
2588
2589 repository_handle
2590 .update(&mut cx, |repository_handle, _| {
2591 repository_handle.create_remote(remote_name, remote_url)
2592 })
2593 .await??;
2594
2595 Ok(proto::Ack {})
2596 }
2597
2598 async fn handle_delete_branch(
2599 this: Entity<Self>,
2600 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2601 mut cx: AsyncApp,
2602 ) -> Result<proto::Ack> {
2603 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2604 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2605 let is_remote = envelope.payload.is_remote;
2606 let branch_name = envelope.payload.branch_name;
2607
2608 repository_handle
2609 .update(&mut cx, |repository_handle, _| {
2610 repository_handle.delete_branch(is_remote, branch_name)
2611 })
2612 .await??;
2613
2614 Ok(proto::Ack {})
2615 }
2616
2617 async fn handle_remove_remote(
2618 this: Entity<Self>,
2619 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2620 mut cx: AsyncApp,
2621 ) -> Result<proto::Ack> {
2622 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2623 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2624 let remote_name = envelope.payload.remote_name;
2625
2626 repository_handle
2627 .update(&mut cx, |repository_handle, _| {
2628 repository_handle.remove_remote(remote_name)
2629 })
2630 .await??;
2631
2632 Ok(proto::Ack {})
2633 }
2634
2635 async fn handle_show(
2636 this: Entity<Self>,
2637 envelope: TypedEnvelope<proto::GitShow>,
2638 mut cx: AsyncApp,
2639 ) -> Result<proto::GitCommitDetails> {
2640 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2641 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2642
2643 let commit = repository_handle
2644 .update(&mut cx, |repository_handle, _| {
2645 repository_handle.show(envelope.payload.commit)
2646 })
2647 .await??;
2648 Ok(proto::GitCommitDetails {
2649 sha: commit.sha.into(),
2650 message: commit.message.into(),
2651 commit_timestamp: commit.commit_timestamp,
2652 author_email: commit.author_email.into(),
2653 author_name: commit.author_name.into(),
2654 })
2655 }
2656
2657 async fn handle_create_checkpoint(
2658 this: Entity<Self>,
2659 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2660 mut cx: AsyncApp,
2661 ) -> Result<proto::GitCreateCheckpointResponse> {
2662 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2663 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2664
2665 let checkpoint = repository_handle
2666 .update(&mut cx, |repository, _| repository.checkpoint())
2667 .await??;
2668
2669 Ok(proto::GitCreateCheckpointResponse {
2670 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2671 })
2672 }
2673
2674 async fn handle_restore_checkpoint(
2675 this: Entity<Self>,
2676 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2677 mut cx: AsyncApp,
2678 ) -> Result<proto::Ack> {
2679 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2680 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2681
2682 let checkpoint = GitRepositoryCheckpoint {
2683 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2684 };
2685
2686 repository_handle
2687 .update(&mut cx, |repository, _| {
2688 repository.restore_checkpoint(checkpoint)
2689 })
2690 .await??;
2691
2692 Ok(proto::Ack {})
2693 }
2694
2695 async fn handle_compare_checkpoints(
2696 this: Entity<Self>,
2697 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2698 mut cx: AsyncApp,
2699 ) -> Result<proto::GitCompareCheckpointsResponse> {
2700 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2701 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2702
2703 let left = GitRepositoryCheckpoint {
2704 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2705 };
2706 let right = GitRepositoryCheckpoint {
2707 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2708 };
2709
2710 let equal = repository_handle
2711 .update(&mut cx, |repository, _| {
2712 repository.compare_checkpoints(left, right)
2713 })
2714 .await??;
2715
2716 Ok(proto::GitCompareCheckpointsResponse { equal })
2717 }
2718
2719 async fn handle_diff_checkpoints(
2720 this: Entity<Self>,
2721 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2722 mut cx: AsyncApp,
2723 ) -> Result<proto::GitDiffCheckpointsResponse> {
2724 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2725 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2726
2727 let base = GitRepositoryCheckpoint {
2728 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2729 };
2730 let target = GitRepositoryCheckpoint {
2731 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2732 };
2733
2734 let diff = repository_handle
2735 .update(&mut cx, |repository, _| {
2736 repository.diff_checkpoints(base, target)
2737 })
2738 .await??;
2739
2740 Ok(proto::GitDiffCheckpointsResponse { diff })
2741 }
2742
2743 async fn handle_load_commit_diff(
2744 this: Entity<Self>,
2745 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2746 mut cx: AsyncApp,
2747 ) -> Result<proto::LoadCommitDiffResponse> {
2748 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2749 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2750
2751 let commit_diff = repository_handle
2752 .update(&mut cx, |repository_handle, _| {
2753 repository_handle.load_commit_diff(envelope.payload.commit)
2754 })
2755 .await??;
2756 Ok(proto::LoadCommitDiffResponse {
2757 files: commit_diff
2758 .files
2759 .into_iter()
2760 .map(|file| proto::CommitFile {
2761 path: file.path.to_proto(),
2762 old_text: file.old_text,
2763 new_text: file.new_text,
2764 is_binary: file.is_binary,
2765 })
2766 .collect(),
2767 })
2768 }
2769
2770 async fn handle_file_history(
2771 this: Entity<Self>,
2772 envelope: TypedEnvelope<proto::GitFileHistory>,
2773 mut cx: AsyncApp,
2774 ) -> Result<proto::GitFileHistoryResponse> {
2775 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2776 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2777 let path = RepoPath::from_proto(&envelope.payload.path)?;
2778 let skip = envelope.payload.skip as usize;
2779 let limit = envelope.payload.limit.map(|l| l as usize);
2780
2781 let file_history = repository_handle
2782 .update(&mut cx, |repository_handle, _| {
2783 repository_handle.file_history_paginated(path, skip, limit)
2784 })
2785 .await??;
2786
2787 Ok(proto::GitFileHistoryResponse {
2788 entries: file_history
2789 .entries
2790 .into_iter()
2791 .map(|entry| proto::FileHistoryEntry {
2792 sha: entry.sha.to_string(),
2793 subject: entry.subject.to_string(),
2794 message: entry.message.to_string(),
2795 commit_timestamp: entry.commit_timestamp,
2796 author_name: entry.author_name.to_string(),
2797 author_email: entry.author_email.to_string(),
2798 })
2799 .collect(),
2800 path: file_history.path.to_proto(),
2801 })
2802 }
2803
2804 async fn handle_reset(
2805 this: Entity<Self>,
2806 envelope: TypedEnvelope<proto::GitReset>,
2807 mut cx: AsyncApp,
2808 ) -> Result<proto::Ack> {
2809 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2810 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2811
2812 let mode = match envelope.payload.mode() {
2813 git_reset::ResetMode::Soft => ResetMode::Soft,
2814 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2815 };
2816
2817 repository_handle
2818 .update(&mut cx, |repository_handle, cx| {
2819 repository_handle.reset(envelope.payload.commit, mode, cx)
2820 })
2821 .await??;
2822 Ok(proto::Ack {})
2823 }
2824
2825 async fn handle_checkout_files(
2826 this: Entity<Self>,
2827 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2828 mut cx: AsyncApp,
2829 ) -> Result<proto::Ack> {
2830 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2831 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2832 let paths = envelope
2833 .payload
2834 .paths
2835 .iter()
2836 .map(|s| RepoPath::from_proto(s))
2837 .collect::<Result<Vec<_>>>()?;
2838
2839 repository_handle
2840 .update(&mut cx, |repository_handle, cx| {
2841 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2842 })
2843 .await?;
2844 Ok(proto::Ack {})
2845 }
2846
2847 async fn handle_open_commit_message_buffer(
2848 this: Entity<Self>,
2849 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2850 mut cx: AsyncApp,
2851 ) -> Result<proto::OpenBufferResponse> {
2852 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2853 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2854 let buffer = repository
2855 .update(&mut cx, |repository, cx| {
2856 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2857 })
2858 .await?;
2859
2860 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2861 this.update(&mut cx, |this, cx| {
2862 this.buffer_store.update(cx, |buffer_store, cx| {
2863 buffer_store
2864 .create_buffer_for_peer(
2865 &buffer,
2866 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2867 cx,
2868 )
2869 .detach_and_log_err(cx);
2870 })
2871 });
2872
2873 Ok(proto::OpenBufferResponse {
2874 buffer_id: buffer_id.to_proto(),
2875 })
2876 }
2877
2878 async fn handle_askpass(
2879 this: Entity<Self>,
2880 envelope: TypedEnvelope<proto::AskPassRequest>,
2881 mut cx: AsyncApp,
2882 ) -> Result<proto::AskPassResponse> {
2883 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2884 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2885
2886 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2887 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2888 debug_panic!("no askpass found");
2889 anyhow::bail!("no askpass found");
2890 };
2891
2892 let response = askpass
2893 .ask_password(envelope.payload.prompt)
2894 .await
2895 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2896
2897 delegates
2898 .lock()
2899 .insert(envelope.payload.askpass_id, askpass);
2900
2901 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2902 Ok(proto::AskPassResponse {
2903 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2904 })
2905 }
2906
2907 async fn handle_check_for_pushed_commits(
2908 this: Entity<Self>,
2909 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2910 mut cx: AsyncApp,
2911 ) -> Result<proto::CheckForPushedCommitsResponse> {
2912 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2913 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2914
2915 let branches = repository_handle
2916 .update(&mut cx, |repository_handle, _| {
2917 repository_handle.check_for_pushed_commits()
2918 })
2919 .await??;
2920 Ok(proto::CheckForPushedCommitsResponse {
2921 pushed_to: branches
2922 .into_iter()
2923 .map(|commit| commit.to_string())
2924 .collect(),
2925 })
2926 }
2927
2928 async fn handle_git_diff(
2929 this: Entity<Self>,
2930 envelope: TypedEnvelope<proto::GitDiff>,
2931 mut cx: AsyncApp,
2932 ) -> Result<proto::GitDiffResponse> {
2933 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2934 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2935 let diff_type = match envelope.payload.diff_type() {
2936 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2937 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2938 proto::git_diff::DiffType::MergeBase => {
2939 let base_ref = envelope
2940 .payload
2941 .merge_base_ref
2942 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2943 DiffType::MergeBase {
2944 base_ref: base_ref.into(),
2945 }
2946 }
2947 };
2948
2949 let mut diff = repository_handle
2950 .update(&mut cx, |repository_handle, cx| {
2951 repository_handle.diff(diff_type, cx)
2952 })
2953 .await??;
2954 const ONE_MB: usize = 1_000_000;
2955 if diff.len() > ONE_MB {
2956 diff = diff.chars().take(ONE_MB).collect()
2957 }
2958
2959 Ok(proto::GitDiffResponse { diff })
2960 }
2961
2962 async fn handle_tree_diff(
2963 this: Entity<Self>,
2964 request: TypedEnvelope<proto::GetTreeDiff>,
2965 mut cx: AsyncApp,
2966 ) -> Result<proto::GetTreeDiffResponse> {
2967 let repository_id = RepositoryId(request.payload.repository_id);
2968 let diff_type = if request.payload.is_merge {
2969 DiffTreeType::MergeBase {
2970 base: request.payload.base.into(),
2971 head: request.payload.head.into(),
2972 }
2973 } else {
2974 DiffTreeType::Since {
2975 base: request.payload.base.into(),
2976 head: request.payload.head.into(),
2977 }
2978 };
2979
2980 let diff = this
2981 .update(&mut cx, |this, cx| {
2982 let repository = this.repositories().get(&repository_id)?;
2983 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2984 })
2985 .context("missing repository")?
2986 .await??;
2987
2988 Ok(proto::GetTreeDiffResponse {
2989 entries: diff
2990 .entries
2991 .into_iter()
2992 .map(|(path, status)| proto::TreeDiffStatus {
2993 path: path.as_ref().to_proto(),
2994 status: match status {
2995 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2996 TreeDiffStatus::Modified { .. } => {
2997 proto::tree_diff_status::Status::Modified.into()
2998 }
2999 TreeDiffStatus::Deleted { .. } => {
3000 proto::tree_diff_status::Status::Deleted.into()
3001 }
3002 },
3003 oid: match status {
3004 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3005 Some(old.to_string())
3006 }
3007 TreeDiffStatus::Added => None,
3008 },
3009 })
3010 .collect(),
3011 })
3012 }
3013
3014 async fn handle_get_blob_content(
3015 this: Entity<Self>,
3016 request: TypedEnvelope<proto::GetBlobContent>,
3017 mut cx: AsyncApp,
3018 ) -> Result<proto::GetBlobContentResponse> {
3019 let oid = git::Oid::from_str(&request.payload.oid)?;
3020 let repository_id = RepositoryId(request.payload.repository_id);
3021 let content = this
3022 .update(&mut cx, |this, cx| {
3023 let repository = this.repositories().get(&repository_id)?;
3024 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3025 })
3026 .context("missing repository")?
3027 .await?;
3028 Ok(proto::GetBlobContentResponse { content })
3029 }
3030
3031 async fn handle_open_unstaged_diff(
3032 this: Entity<Self>,
3033 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3034 mut cx: AsyncApp,
3035 ) -> Result<proto::OpenUnstagedDiffResponse> {
3036 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3037 let diff = this
3038 .update(&mut cx, |this, cx| {
3039 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3040 Some(this.open_unstaged_diff(buffer, cx))
3041 })
3042 .context("missing buffer")?
3043 .await?;
3044 this.update(&mut cx, |this, _| {
3045 let shared_diffs = this
3046 .shared_diffs
3047 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3048 .or_default();
3049 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3050 });
3051 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3052 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3053 }
3054
3055 async fn handle_open_uncommitted_diff(
3056 this: Entity<Self>,
3057 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3058 mut cx: AsyncApp,
3059 ) -> Result<proto::OpenUncommittedDiffResponse> {
3060 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3061 let diff = this
3062 .update(&mut cx, |this, cx| {
3063 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3064 Some(this.open_uncommitted_diff(buffer, cx))
3065 })
3066 .context("missing buffer")?
3067 .await?;
3068 this.update(&mut cx, |this, _| {
3069 let shared_diffs = this
3070 .shared_diffs
3071 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3072 .or_default();
3073 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3074 });
3075 Ok(diff.read_with(&cx, |diff, cx| {
3076 use proto::open_uncommitted_diff_response::Mode;
3077
3078 let unstaged_diff = diff.secondary_diff();
3079 let index_snapshot = unstaged_diff.and_then(|diff| {
3080 let diff = diff.read(cx);
3081 diff.base_text_exists().then(|| diff.base_text(cx))
3082 });
3083
3084 let mode;
3085 let staged_text;
3086 let committed_text;
3087 if diff.base_text_exists() {
3088 let committed_snapshot = diff.base_text(cx);
3089 committed_text = Some(committed_snapshot.text());
3090 if let Some(index_text) = index_snapshot {
3091 if index_text.remote_id() == committed_snapshot.remote_id() {
3092 mode = Mode::IndexMatchesHead;
3093 staged_text = None;
3094 } else {
3095 mode = Mode::IndexAndHead;
3096 staged_text = Some(index_text.text());
3097 }
3098 } else {
3099 mode = Mode::IndexAndHead;
3100 staged_text = None;
3101 }
3102 } else {
3103 mode = Mode::IndexAndHead;
3104 committed_text = None;
3105 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3106 }
3107
3108 proto::OpenUncommittedDiffResponse {
3109 committed_text,
3110 staged_text,
3111 mode: mode.into(),
3112 }
3113 }))
3114 }
3115
3116 async fn handle_update_diff_bases(
3117 this: Entity<Self>,
3118 request: TypedEnvelope<proto::UpdateDiffBases>,
3119 mut cx: AsyncApp,
3120 ) -> Result<()> {
3121 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3122 this.update(&mut cx, |this, cx| {
3123 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3124 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3125 {
3126 let buffer = buffer.read(cx).text_snapshot();
3127 diff_state.update(cx, |diff_state, cx| {
3128 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3129 })
3130 }
3131 });
3132 Ok(())
3133 }
3134
3135 async fn handle_blame_buffer(
3136 this: Entity<Self>,
3137 envelope: TypedEnvelope<proto::BlameBuffer>,
3138 mut cx: AsyncApp,
3139 ) -> Result<proto::BlameBufferResponse> {
3140 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3141 let version = deserialize_version(&envelope.payload.version);
3142 let buffer = this.read_with(&cx, |this, cx| {
3143 this.buffer_store.read(cx).get_existing(buffer_id)
3144 })?;
3145 buffer
3146 .update(&mut cx, |buffer, _| {
3147 buffer.wait_for_version(version.clone())
3148 })
3149 .await?;
3150 let blame = this
3151 .update(&mut cx, |this, cx| {
3152 this.blame_buffer(&buffer, Some(version), cx)
3153 })
3154 .await?;
3155 Ok(serialize_blame_buffer_response(blame))
3156 }
3157
3158 async fn handle_get_permalink_to_line(
3159 this: Entity<Self>,
3160 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3161 mut cx: AsyncApp,
3162 ) -> Result<proto::GetPermalinkToLineResponse> {
3163 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3164 // let version = deserialize_version(&envelope.payload.version);
3165 let selection = {
3166 let proto_selection = envelope
3167 .payload
3168 .selection
3169 .context("no selection to get permalink for defined")?;
3170 proto_selection.start as u32..proto_selection.end as u32
3171 };
3172 let buffer = this.read_with(&cx, |this, cx| {
3173 this.buffer_store.read(cx).get_existing(buffer_id)
3174 })?;
3175 let permalink = this
3176 .update(&mut cx, |this, cx| {
3177 this.get_permalink_to_line(&buffer, selection, cx)
3178 })
3179 .await?;
3180 Ok(proto::GetPermalinkToLineResponse {
3181 permalink: permalink.to_string(),
3182 })
3183 }
3184
3185 fn repository_for_request(
3186 this: &Entity<Self>,
3187 id: RepositoryId,
3188 cx: &mut AsyncApp,
3189 ) -> Result<Entity<Repository>> {
3190 this.read_with(cx, |this, _| {
3191 this.repositories
3192 .get(&id)
3193 .context("missing repository handle")
3194 .cloned()
3195 })
3196 }
3197
3198 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3199 self.repositories
3200 .iter()
3201 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3202 .collect()
3203 }
3204
3205 fn process_updated_entries(
3206 &self,
3207 worktree: &Entity<Worktree>,
3208 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3209 cx: &mut App,
3210 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3211 let path_style = worktree.read(cx).path_style();
3212 let mut repo_paths = self
3213 .repositories
3214 .values()
3215 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3216 .collect::<Vec<_>>();
3217 let mut entries: Vec<_> = updated_entries
3218 .iter()
3219 .map(|(path, _, _)| path.clone())
3220 .collect();
3221 entries.sort();
3222 let worktree = worktree.read(cx);
3223
3224 let entries = entries
3225 .into_iter()
3226 .map(|path| worktree.absolutize(&path))
3227 .collect::<Arc<[_]>>();
3228
3229 let executor = cx.background_executor().clone();
3230 cx.background_executor().spawn(async move {
3231 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3232 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3233 let mut tasks = FuturesOrdered::new();
3234 for (repo_path, repo) in repo_paths.into_iter().rev() {
3235 let entries = entries.clone();
3236 let task = executor.spawn(async move {
3237 // Find all repository paths that belong to this repo
3238 let mut ix = entries.partition_point(|path| path < &*repo_path);
3239 if ix == entries.len() {
3240 return None;
3241 };
3242
3243 let mut paths = Vec::new();
3244 // All paths prefixed by a given repo will constitute a continuous range.
3245 while let Some(path) = entries.get(ix)
3246 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3247 &repo_path, path, path_style,
3248 )
3249 {
3250 paths.push((repo_path, ix));
3251 ix += 1;
3252 }
3253 if paths.is_empty() {
3254 None
3255 } else {
3256 Some((repo, paths))
3257 }
3258 });
3259 tasks.push_back(task);
3260 }
3261
3262 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3263 let mut path_was_used = vec![false; entries.len()];
3264 let tasks = tasks.collect::<Vec<_>>().await;
3265 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3266 // We always want to assign a path to it's innermost repository.
3267 for t in tasks {
3268 let Some((repo, paths)) = t else {
3269 continue;
3270 };
3271 let entry = paths_by_git_repo.entry(repo).or_default();
3272 for (repo_path, ix) in paths {
3273 if path_was_used[ix] {
3274 continue;
3275 }
3276 path_was_used[ix] = true;
3277 entry.push(repo_path);
3278 }
3279 }
3280
3281 paths_by_git_repo
3282 })
3283 }
3284}
3285
3286impl BufferGitState {
3287 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3288 Self {
3289 unstaged_diff: Default::default(),
3290 uncommitted_diff: Default::default(),
3291 oid_diffs: Default::default(),
3292 recalculate_diff_task: Default::default(),
3293 language: Default::default(),
3294 language_registry: Default::default(),
3295 recalculating_tx: postage::watch::channel_with(false).0,
3296 hunk_staging_operation_count: 0,
3297 hunk_staging_operation_count_as_of_write: 0,
3298 head_text: Default::default(),
3299 index_text: Default::default(),
3300 oid_texts: Default::default(),
3301 head_changed: Default::default(),
3302 index_changed: Default::default(),
3303 language_changed: Default::default(),
3304 conflict_updated_futures: Default::default(),
3305 conflict_set: Default::default(),
3306 reparse_conflict_markers_task: Default::default(),
3307 }
3308 }
3309
3310 #[ztracing::instrument(skip_all)]
3311 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3312 self.language = buffer.read(cx).language().cloned();
3313 self.language_changed = true;
3314 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3315 }
3316
3317 fn reparse_conflict_markers(
3318 &mut self,
3319 buffer: text::BufferSnapshot,
3320 cx: &mut Context<Self>,
3321 ) -> oneshot::Receiver<()> {
3322 let (tx, rx) = oneshot::channel();
3323
3324 let Some(conflict_set) = self
3325 .conflict_set
3326 .as_ref()
3327 .and_then(|conflict_set| conflict_set.upgrade())
3328 else {
3329 return rx;
3330 };
3331
3332 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3333 if conflict_set.has_conflict {
3334 Some(conflict_set.snapshot())
3335 } else {
3336 None
3337 }
3338 });
3339
3340 if let Some(old_snapshot) = old_snapshot {
3341 self.conflict_updated_futures.push(tx);
3342 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3343 let (snapshot, changed_range) = cx
3344 .background_spawn(async move {
3345 let new_snapshot = ConflictSet::parse(&buffer);
3346 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3347 (new_snapshot, changed_range)
3348 })
3349 .await;
3350 this.update(cx, |this, cx| {
3351 if let Some(conflict_set) = &this.conflict_set {
3352 conflict_set
3353 .update(cx, |conflict_set, cx| {
3354 conflict_set.set_snapshot(snapshot, changed_range, cx);
3355 })
3356 .ok();
3357 }
3358 let futures = std::mem::take(&mut this.conflict_updated_futures);
3359 for tx in futures {
3360 tx.send(()).ok();
3361 }
3362 })
3363 }))
3364 }
3365
3366 rx
3367 }
3368
3369 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3370 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3371 }
3372
3373 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3374 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3375 }
3376
3377 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3378 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3379 }
3380
3381 fn handle_base_texts_updated(
3382 &mut self,
3383 buffer: text::BufferSnapshot,
3384 message: proto::UpdateDiffBases,
3385 cx: &mut Context<Self>,
3386 ) {
3387 use proto::update_diff_bases::Mode;
3388
3389 let Some(mode) = Mode::from_i32(message.mode) else {
3390 return;
3391 };
3392
3393 let diff_bases_change = match mode {
3394 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3395 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3396 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3397 Mode::IndexAndHead => DiffBasesChange::SetEach {
3398 index: message.staged_text,
3399 head: message.committed_text,
3400 },
3401 };
3402
3403 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3404 }
3405
3406 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3407 if *self.recalculating_tx.borrow() {
3408 let mut rx = self.recalculating_tx.subscribe();
3409 Some(async move {
3410 loop {
3411 let is_recalculating = rx.recv().await;
3412 if is_recalculating != Some(true) {
3413 break;
3414 }
3415 }
3416 })
3417 } else {
3418 None
3419 }
3420 }
3421
3422 fn diff_bases_changed(
3423 &mut self,
3424 buffer: text::BufferSnapshot,
3425 diff_bases_change: Option<DiffBasesChange>,
3426 cx: &mut Context<Self>,
3427 ) {
3428 match diff_bases_change {
3429 Some(DiffBasesChange::SetIndex(index)) => {
3430 self.index_text = index.map(|mut index| {
3431 text::LineEnding::normalize(&mut index);
3432 Arc::from(index.as_str())
3433 });
3434 self.index_changed = true;
3435 }
3436 Some(DiffBasesChange::SetHead(head)) => {
3437 self.head_text = head.map(|mut head| {
3438 text::LineEnding::normalize(&mut head);
3439 Arc::from(head.as_str())
3440 });
3441 self.head_changed = true;
3442 }
3443 Some(DiffBasesChange::SetBoth(text)) => {
3444 let text = text.map(|mut text| {
3445 text::LineEnding::normalize(&mut text);
3446 Arc::from(text.as_str())
3447 });
3448 self.head_text = text.clone();
3449 self.index_text = text;
3450 self.head_changed = true;
3451 self.index_changed = true;
3452 }
3453 Some(DiffBasesChange::SetEach { index, head }) => {
3454 self.index_text = index.map(|mut index| {
3455 text::LineEnding::normalize(&mut index);
3456 Arc::from(index.as_str())
3457 });
3458 self.index_changed = true;
3459 self.head_text = head.map(|mut head| {
3460 text::LineEnding::normalize(&mut head);
3461 Arc::from(head.as_str())
3462 });
3463 self.head_changed = true;
3464 }
3465 None => {}
3466 }
3467
3468 self.recalculate_diffs(buffer, cx)
3469 }
3470
3471 #[ztracing::instrument(skip_all)]
3472 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3473 *self.recalculating_tx.borrow_mut() = true;
3474
3475 let language = self.language.clone();
3476 let language_registry = self.language_registry.clone();
3477 let unstaged_diff = self.unstaged_diff();
3478 let uncommitted_diff = self.uncommitted_diff();
3479 let head = self.head_text.clone();
3480 let index = self.index_text.clone();
3481 let index_changed = self.index_changed;
3482 let head_changed = self.head_changed;
3483 let language_changed = self.language_changed;
3484 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3485 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3486 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3487 (None, None) => true,
3488 _ => false,
3489 };
3490
3491 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3492 .oid_diffs
3493 .iter()
3494 .filter_map(|(oid, weak)| {
3495 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3496 weak.upgrade().map(|diff| (*oid, diff, base_text))
3497 })
3498 .collect();
3499
3500 self.oid_diffs.retain(|oid, weak| {
3501 let alive = weak.upgrade().is_some();
3502 if !alive {
3503 if let Some(oid) = oid {
3504 self.oid_texts.remove(oid);
3505 }
3506 }
3507 alive
3508 });
3509 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3510 log::debug!(
3511 "start recalculating diffs for buffer {}",
3512 buffer.remote_id()
3513 );
3514
3515 let mut new_unstaged_diff = None;
3516 if let Some(unstaged_diff) = &unstaged_diff {
3517 new_unstaged_diff = Some(
3518 cx.update(|cx| {
3519 unstaged_diff.read(cx).update_diff(
3520 buffer.clone(),
3521 index,
3522 index_changed.then_some(false),
3523 language.clone(),
3524 cx,
3525 )
3526 })
3527 .await,
3528 );
3529 }
3530
3531 // Dropping BufferDiff can be expensive, so yield back to the event loop
3532 // for a bit
3533 yield_now().await;
3534
3535 let mut new_uncommitted_diff = None;
3536 if let Some(uncommitted_diff) = &uncommitted_diff {
3537 new_uncommitted_diff = if index_matches_head {
3538 new_unstaged_diff.clone()
3539 } else {
3540 Some(
3541 cx.update(|cx| {
3542 uncommitted_diff.read(cx).update_diff(
3543 buffer.clone(),
3544 head,
3545 head_changed.then_some(true),
3546 language.clone(),
3547 cx,
3548 )
3549 })
3550 .await,
3551 )
3552 }
3553 }
3554
3555 // Dropping BufferDiff can be expensive, so yield back to the event loop
3556 // for a bit
3557 yield_now().await;
3558
3559 let cancel = this.update(cx, |this, _| {
3560 // This checks whether all pending stage/unstage operations
3561 // have quiesced (i.e. both the corresponding write and the
3562 // read of that write have completed). If not, then we cancel
3563 // this recalculation attempt to avoid invalidating pending
3564 // state too quickly; another recalculation will come along
3565 // later and clear the pending state once the state of the index has settled.
3566 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3567 *this.recalculating_tx.borrow_mut() = false;
3568 true
3569 } else {
3570 false
3571 }
3572 })?;
3573 if cancel {
3574 log::debug!(
3575 concat!(
3576 "aborting recalculating diffs for buffer {}",
3577 "due to subsequent hunk operations",
3578 ),
3579 buffer.remote_id()
3580 );
3581 return Ok(());
3582 }
3583
3584 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3585 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3586 {
3587 let task = unstaged_diff.update(cx, |diff, cx| {
3588 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3589 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3590 // view multibuffers.
3591 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3592 });
3593 Some(task.await)
3594 } else {
3595 None
3596 };
3597
3598 yield_now().await;
3599
3600 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3601 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3602 {
3603 uncommitted_diff
3604 .update(cx, |diff, cx| {
3605 if language_changed {
3606 diff.language_changed(language.clone(), language_registry, cx);
3607 }
3608 diff.set_snapshot_with_secondary(
3609 new_uncommitted_diff,
3610 &buffer,
3611 unstaged_changed_range.flatten(),
3612 true,
3613 cx,
3614 )
3615 })
3616 .await;
3617 }
3618
3619 yield_now().await;
3620
3621 for (oid, oid_diff, base_text) in oid_diffs {
3622 let new_oid_diff = cx
3623 .update(|cx| {
3624 oid_diff.read(cx).update_diff(
3625 buffer.clone(),
3626 base_text,
3627 None,
3628 language.clone(),
3629 cx,
3630 )
3631 })
3632 .await;
3633
3634 oid_diff
3635 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3636 .await;
3637
3638 log::debug!(
3639 "finished recalculating oid diff for buffer {} oid {:?}",
3640 buffer.remote_id(),
3641 oid
3642 );
3643
3644 yield_now().await;
3645 }
3646
3647 log::debug!(
3648 "finished recalculating diffs for buffer {}",
3649 buffer.remote_id()
3650 );
3651
3652 if let Some(this) = this.upgrade() {
3653 this.update(cx, |this, _| {
3654 this.index_changed = false;
3655 this.head_changed = false;
3656 this.language_changed = false;
3657 *this.recalculating_tx.borrow_mut() = false;
3658 });
3659 }
3660
3661 Ok(())
3662 }));
3663 }
3664}
3665
3666fn make_remote_delegate(
3667 this: Entity<GitStore>,
3668 project_id: u64,
3669 repository_id: RepositoryId,
3670 askpass_id: u64,
3671 cx: &mut AsyncApp,
3672) -> AskPassDelegate {
3673 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3674 this.update(cx, |this, cx| {
3675 let Some((client, _)) = this.downstream_client() else {
3676 return;
3677 };
3678 let response = client.request(proto::AskPassRequest {
3679 project_id,
3680 repository_id: repository_id.to_proto(),
3681 askpass_id,
3682 prompt,
3683 });
3684 cx.spawn(async move |_, _| {
3685 let mut response = response.await?.response;
3686 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3687 .ok();
3688 response.zeroize();
3689 anyhow::Ok(())
3690 })
3691 .detach_and_log_err(cx);
3692 });
3693 })
3694}
3695
3696impl RepositoryId {
3697 pub fn to_proto(self) -> u64 {
3698 self.0
3699 }
3700
3701 pub fn from_proto(id: u64) -> Self {
3702 RepositoryId(id)
3703 }
3704}
3705
3706impl RepositorySnapshot {
3707 fn empty(
3708 id: RepositoryId,
3709 work_directory_abs_path: Arc<Path>,
3710 original_repo_abs_path: Option<Arc<Path>>,
3711 path_style: PathStyle,
3712 ) -> Self {
3713 Self {
3714 id,
3715 statuses_by_path: Default::default(),
3716 original_repo_abs_path: original_repo_abs_path
3717 .unwrap_or_else(|| work_directory_abs_path.clone()),
3718 work_directory_abs_path,
3719 branch: None,
3720 branch_list: Arc::from([]),
3721 head_commit: None,
3722 scan_id: 0,
3723 merge: Default::default(),
3724 remote_origin_url: None,
3725 remote_upstream_url: None,
3726 stash_entries: Default::default(),
3727 linked_worktrees: Arc::from([]),
3728 path_style,
3729 }
3730 }
3731
3732 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3733 proto::UpdateRepository {
3734 branch_summary: self.branch.as_ref().map(branch_to_proto),
3735 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3736 updated_statuses: self
3737 .statuses_by_path
3738 .iter()
3739 .map(|entry| entry.to_proto())
3740 .collect(),
3741 removed_statuses: Default::default(),
3742 current_merge_conflicts: self
3743 .merge
3744 .merge_heads_by_conflicted_path
3745 .iter()
3746 .map(|(repo_path, _)| repo_path.to_proto())
3747 .collect(),
3748 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3749 project_id,
3750 id: self.id.to_proto(),
3751 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3752 entry_ids: vec![self.id.to_proto()],
3753 scan_id: self.scan_id,
3754 is_last_update: true,
3755 stash_entries: self
3756 .stash_entries
3757 .entries
3758 .iter()
3759 .map(stash_to_proto)
3760 .collect(),
3761 remote_upstream_url: self.remote_upstream_url.clone(),
3762 remote_origin_url: self.remote_origin_url.clone(),
3763 original_repo_abs_path: Some(
3764 self.original_repo_abs_path.to_string_lossy().into_owned(),
3765 ),
3766 linked_worktrees: self
3767 .linked_worktrees
3768 .iter()
3769 .map(worktree_to_proto)
3770 .collect(),
3771 }
3772 }
3773
3774 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3775 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3776 let mut removed_statuses: Vec<String> = Vec::new();
3777
3778 let mut new_statuses = self.statuses_by_path.iter().peekable();
3779 let mut old_statuses = old.statuses_by_path.iter().peekable();
3780
3781 let mut current_new_entry = new_statuses.next();
3782 let mut current_old_entry = old_statuses.next();
3783 loop {
3784 match (current_new_entry, current_old_entry) {
3785 (Some(new_entry), Some(old_entry)) => {
3786 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3787 Ordering::Less => {
3788 updated_statuses.push(new_entry.to_proto());
3789 current_new_entry = new_statuses.next();
3790 }
3791 Ordering::Equal => {
3792 if new_entry.status != old_entry.status
3793 || new_entry.diff_stat != old_entry.diff_stat
3794 {
3795 updated_statuses.push(new_entry.to_proto());
3796 }
3797 current_old_entry = old_statuses.next();
3798 current_new_entry = new_statuses.next();
3799 }
3800 Ordering::Greater => {
3801 removed_statuses.push(old_entry.repo_path.to_proto());
3802 current_old_entry = old_statuses.next();
3803 }
3804 }
3805 }
3806 (None, Some(old_entry)) => {
3807 removed_statuses.push(old_entry.repo_path.to_proto());
3808 current_old_entry = old_statuses.next();
3809 }
3810 (Some(new_entry), None) => {
3811 updated_statuses.push(new_entry.to_proto());
3812 current_new_entry = new_statuses.next();
3813 }
3814 (None, None) => break,
3815 }
3816 }
3817
3818 proto::UpdateRepository {
3819 branch_summary: self.branch.as_ref().map(branch_to_proto),
3820 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3821 updated_statuses,
3822 removed_statuses,
3823 current_merge_conflicts: self
3824 .merge
3825 .merge_heads_by_conflicted_path
3826 .iter()
3827 .map(|(path, _)| path.to_proto())
3828 .collect(),
3829 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3830 project_id,
3831 id: self.id.to_proto(),
3832 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3833 entry_ids: vec![],
3834 scan_id: self.scan_id,
3835 is_last_update: true,
3836 stash_entries: self
3837 .stash_entries
3838 .entries
3839 .iter()
3840 .map(stash_to_proto)
3841 .collect(),
3842 remote_upstream_url: self.remote_upstream_url.clone(),
3843 remote_origin_url: self.remote_origin_url.clone(),
3844 original_repo_abs_path: Some(
3845 self.original_repo_abs_path.to_string_lossy().into_owned(),
3846 ),
3847 linked_worktrees: self
3848 .linked_worktrees
3849 .iter()
3850 .map(worktree_to_proto)
3851 .collect(),
3852 }
3853 }
3854
3855 /// The main worktree is the original checkout that other worktrees were
3856 /// created from.
3857 ///
3858 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3859 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3860 ///
3861 /// Submodules also return `true` here, since they are not linked worktrees.
3862 pub fn is_main_worktree(&self) -> bool {
3863 self.work_directory_abs_path == self.original_repo_abs_path
3864 }
3865
3866 /// Returns true if this repository is a linked worktree, that is, one that
3867 /// was created from another worktree.
3868 ///
3869 /// Returns `false` for both the main worktree and submodules.
3870 pub fn is_linked_worktree(&self) -> bool {
3871 !self.is_main_worktree()
3872 }
3873
3874 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3875 &self.linked_worktrees
3876 }
3877
3878 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3879 self.statuses_by_path.iter().cloned()
3880 }
3881
3882 pub fn status_summary(&self) -> GitSummary {
3883 self.statuses_by_path.summary().item_summary
3884 }
3885
3886 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3887 self.statuses_by_path
3888 .get(&PathKey(path.as_ref().clone()), ())
3889 .cloned()
3890 }
3891
3892 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3893 self.statuses_by_path
3894 .get(&PathKey(path.as_ref().clone()), ())
3895 .and_then(|entry| entry.diff_stat)
3896 }
3897
3898 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3899 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3900 }
3901
3902 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3903 self.path_style
3904 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3905 .unwrap()
3906 .into()
3907 }
3908
3909 #[inline]
3910 fn abs_path_to_repo_path_inner(
3911 work_directory_abs_path: &Path,
3912 abs_path: &Path,
3913 path_style: PathStyle,
3914 ) -> Option<RepoPath> {
3915 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3916 Some(RepoPath::from_rel_path(&rel_path))
3917 }
3918
3919 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3920 self.merge
3921 .merge_heads_by_conflicted_path
3922 .contains_key(repo_path)
3923 }
3924
3925 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3926 let had_conflict_on_last_merge_head_change = self
3927 .merge
3928 .merge_heads_by_conflicted_path
3929 .contains_key(repo_path);
3930 let has_conflict_currently = self
3931 .status_for_path(repo_path)
3932 .is_some_and(|entry| entry.status.is_conflicted());
3933 had_conflict_on_last_merge_head_change || has_conflict_currently
3934 }
3935
3936 /// This is the name that will be displayed in the repository selector for this repository.
3937 pub fn display_name(&self) -> SharedString {
3938 self.work_directory_abs_path
3939 .file_name()
3940 .unwrap_or_default()
3941 .to_string_lossy()
3942 .to_string()
3943 .into()
3944 }
3945}
3946
3947pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3948 proto::StashEntry {
3949 oid: entry.oid.as_bytes().to_vec(),
3950 message: entry.message.clone(),
3951 branch: entry.branch.clone(),
3952 index: entry.index as u64,
3953 timestamp: entry.timestamp,
3954 }
3955}
3956
3957pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3958 Ok(StashEntry {
3959 oid: Oid::from_bytes(&entry.oid)?,
3960 message: entry.message.clone(),
3961 index: entry.index as usize,
3962 branch: entry.branch.clone(),
3963 timestamp: entry.timestamp,
3964 })
3965}
3966
3967impl MergeDetails {
3968 async fn update(
3969 &mut self,
3970 backend: &Arc<dyn GitRepository>,
3971 current_conflicted_paths: Vec<RepoPath>,
3972 ) -> Result<bool> {
3973 log::debug!("load merge details");
3974 self.message = backend.merge_message().await.map(SharedString::from);
3975 let heads = backend
3976 .revparse_batch(vec![
3977 "MERGE_HEAD".into(),
3978 "CHERRY_PICK_HEAD".into(),
3979 "REBASE_HEAD".into(),
3980 "REVERT_HEAD".into(),
3981 "APPLY_HEAD".into(),
3982 ])
3983 .await
3984 .log_err()
3985 .unwrap_or_default()
3986 .into_iter()
3987 .map(|opt| opt.map(SharedString::from))
3988 .collect::<Vec<_>>();
3989
3990 let mut conflicts_changed = false;
3991
3992 // Record the merge state for newly conflicted paths
3993 for path in ¤t_conflicted_paths {
3994 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3995 conflicts_changed = true;
3996 self.merge_heads_by_conflicted_path
3997 .insert(path.clone(), heads.clone());
3998 }
3999 }
4000
4001 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4002 self.merge_heads_by_conflicted_path
4003 .retain(|path, old_merge_heads| {
4004 let keep = current_conflicted_paths.contains(path)
4005 || (old_merge_heads == &heads
4006 && old_merge_heads.iter().any(|head| head.is_some()));
4007 if !keep {
4008 conflicts_changed = true;
4009 }
4010 keep
4011 });
4012
4013 Ok(conflicts_changed)
4014 }
4015}
4016
4017impl Repository {
4018 pub fn is_trusted(&self) -> bool {
4019 match self.repository_state.peek() {
4020 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4021 _ => false,
4022 }
4023 }
4024
4025 pub fn snapshot(&self) -> RepositorySnapshot {
4026 self.snapshot.clone()
4027 }
4028
4029 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4030 self.pending_ops.iter().cloned()
4031 }
4032
4033 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4034 self.pending_ops.summary().clone()
4035 }
4036
4037 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4038 self.pending_ops
4039 .get(&PathKey(path.as_ref().clone()), ())
4040 .cloned()
4041 }
4042
4043 fn local(
4044 id: RepositoryId,
4045 work_directory_abs_path: Arc<Path>,
4046 original_repo_abs_path: Arc<Path>,
4047 dot_git_abs_path: Arc<Path>,
4048 project_environment: WeakEntity<ProjectEnvironment>,
4049 fs: Arc<dyn Fs>,
4050 is_trusted: bool,
4051 git_store: WeakEntity<GitStore>,
4052 cx: &mut Context<Self>,
4053 ) -> Self {
4054 let snapshot = RepositorySnapshot::empty(
4055 id,
4056 work_directory_abs_path.clone(),
4057 Some(original_repo_abs_path),
4058 PathStyle::local(),
4059 );
4060 let state = cx
4061 .spawn(async move |_, cx| {
4062 LocalRepositoryState::new(
4063 work_directory_abs_path,
4064 dot_git_abs_path,
4065 project_environment,
4066 fs,
4067 is_trusted,
4068 cx,
4069 )
4070 .await
4071 .map_err(|err| err.to_string())
4072 })
4073 .shared();
4074 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4075 let state = cx
4076 .spawn(async move |_, _| {
4077 let state = state.await?;
4078 Ok(RepositoryState::Local(state))
4079 })
4080 .shared();
4081
4082 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4083 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4084 if this.scan_id > 1 {
4085 this.initial_graph_data.clear();
4086 }
4087 }
4088 RepositoryEvent::StashEntriesChanged => {
4089 if this.scan_id > 1 {
4090 this.initial_graph_data
4091 .retain(|(log_source, _), _| *log_source != LogSource::All);
4092 }
4093 }
4094 _ => {}
4095 })
4096 .detach();
4097
4098 Repository {
4099 this: cx.weak_entity(),
4100 git_store,
4101 snapshot,
4102 pending_ops: Default::default(),
4103 repository_state: state,
4104 commit_message_buffer: None,
4105 askpass_delegates: Default::default(),
4106 paths_needing_status_update: Default::default(),
4107 latest_askpass_id: 0,
4108 job_sender,
4109 job_id: 0,
4110 active_jobs: Default::default(),
4111 initial_graph_data: Default::default(),
4112 commit_data: Default::default(),
4113 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4114 }
4115 }
4116
4117 fn remote(
4118 id: RepositoryId,
4119 work_directory_abs_path: Arc<Path>,
4120 original_repo_abs_path: Option<Arc<Path>>,
4121 path_style: PathStyle,
4122 project_id: ProjectId,
4123 client: AnyProtoClient,
4124 git_store: WeakEntity<GitStore>,
4125 cx: &mut Context<Self>,
4126 ) -> Self {
4127 let snapshot = RepositorySnapshot::empty(
4128 id,
4129 work_directory_abs_path,
4130 original_repo_abs_path,
4131 path_style,
4132 );
4133 let repository_state = RemoteRepositoryState { project_id, client };
4134 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4135 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4136 Self {
4137 this: cx.weak_entity(),
4138 snapshot,
4139 commit_message_buffer: None,
4140 git_store,
4141 pending_ops: Default::default(),
4142 paths_needing_status_update: Default::default(),
4143 job_sender,
4144 repository_state,
4145 askpass_delegates: Default::default(),
4146 latest_askpass_id: 0,
4147 active_jobs: Default::default(),
4148 job_id: 0,
4149 initial_graph_data: Default::default(),
4150 commit_data: Default::default(),
4151 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4152 }
4153 }
4154
4155 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4156 self.git_store.upgrade()
4157 }
4158
4159 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4160 let this = cx.weak_entity();
4161 let git_store = self.git_store.clone();
4162 let _ = self.send_keyed_job(
4163 Some(GitJobKey::ReloadBufferDiffBases),
4164 None,
4165 |state, mut cx| async move {
4166 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4167 log::error!("tried to recompute diffs for a non-local repository");
4168 return Ok(());
4169 };
4170
4171 let Some(this) = this.upgrade() else {
4172 return Ok(());
4173 };
4174
4175 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4176 git_store.update(cx, |git_store, cx| {
4177 git_store
4178 .diffs
4179 .iter()
4180 .filter_map(|(buffer_id, diff_state)| {
4181 let buffer_store = git_store.buffer_store.read(cx);
4182 let buffer = buffer_store.get(*buffer_id)?;
4183 let file = File::from_dyn(buffer.read(cx).file())?;
4184 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4185 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4186 log::debug!(
4187 "start reload diff bases for repo path {}",
4188 repo_path.as_unix_str()
4189 );
4190 diff_state.update(cx, |diff_state, _| {
4191 let has_unstaged_diff = diff_state
4192 .unstaged_diff
4193 .as_ref()
4194 .is_some_and(|diff| diff.is_upgradable());
4195 let has_uncommitted_diff = diff_state
4196 .uncommitted_diff
4197 .as_ref()
4198 .is_some_and(|set| set.is_upgradable());
4199
4200 Some((
4201 buffer,
4202 repo_path,
4203 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4204 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4205 ))
4206 })
4207 })
4208 .collect::<Vec<_>>()
4209 })
4210 })?;
4211
4212 let buffer_diff_base_changes = cx
4213 .background_spawn(async move {
4214 let mut changes = Vec::new();
4215 for (buffer, repo_path, current_index_text, current_head_text) in
4216 &repo_diff_state_updates
4217 {
4218 let index_text = if current_index_text.is_some() {
4219 backend.load_index_text(repo_path.clone()).await
4220 } else {
4221 None
4222 };
4223 let head_text = if current_head_text.is_some() {
4224 backend.load_committed_text(repo_path.clone()).await
4225 } else {
4226 None
4227 };
4228
4229 let change =
4230 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4231 (Some(current_index), Some(current_head)) => {
4232 let index_changed =
4233 index_text.as_deref() != current_index.as_deref();
4234 let head_changed =
4235 head_text.as_deref() != current_head.as_deref();
4236 if index_changed && head_changed {
4237 if index_text == head_text {
4238 Some(DiffBasesChange::SetBoth(head_text))
4239 } else {
4240 Some(DiffBasesChange::SetEach {
4241 index: index_text,
4242 head: head_text,
4243 })
4244 }
4245 } else if index_changed {
4246 Some(DiffBasesChange::SetIndex(index_text))
4247 } else if head_changed {
4248 Some(DiffBasesChange::SetHead(head_text))
4249 } else {
4250 None
4251 }
4252 }
4253 (Some(current_index), None) => {
4254 let index_changed =
4255 index_text.as_deref() != current_index.as_deref();
4256 index_changed
4257 .then_some(DiffBasesChange::SetIndex(index_text))
4258 }
4259 (None, Some(current_head)) => {
4260 let head_changed =
4261 head_text.as_deref() != current_head.as_deref();
4262 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4263 }
4264 (None, None) => None,
4265 };
4266
4267 changes.push((buffer.clone(), change))
4268 }
4269 changes
4270 })
4271 .await;
4272
4273 git_store.update(&mut cx, |git_store, cx| {
4274 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4275 let buffer_snapshot = buffer.read(cx).text_snapshot();
4276 let buffer_id = buffer_snapshot.remote_id();
4277 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4278 continue;
4279 };
4280
4281 let downstream_client = git_store.downstream_client();
4282 diff_state.update(cx, |diff_state, cx| {
4283 use proto::update_diff_bases::Mode;
4284
4285 if let Some((diff_bases_change, (client, project_id))) =
4286 diff_bases_change.clone().zip(downstream_client)
4287 {
4288 let (staged_text, committed_text, mode) = match diff_bases_change {
4289 DiffBasesChange::SetIndex(index) => {
4290 (index, None, Mode::IndexOnly)
4291 }
4292 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4293 DiffBasesChange::SetEach { index, head } => {
4294 (index, head, Mode::IndexAndHead)
4295 }
4296 DiffBasesChange::SetBoth(text) => {
4297 (None, text, Mode::IndexMatchesHead)
4298 }
4299 };
4300 client
4301 .send(proto::UpdateDiffBases {
4302 project_id: project_id.to_proto(),
4303 buffer_id: buffer_id.to_proto(),
4304 staged_text,
4305 committed_text,
4306 mode: mode as i32,
4307 })
4308 .log_err();
4309 }
4310
4311 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4312 });
4313 }
4314 })
4315 },
4316 );
4317 }
4318
4319 pub fn send_job<F, Fut, R>(
4320 &mut self,
4321 status: Option<SharedString>,
4322 job: F,
4323 ) -> oneshot::Receiver<R>
4324 where
4325 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4326 Fut: Future<Output = R> + 'static,
4327 R: Send + 'static,
4328 {
4329 self.send_keyed_job(None, status, job)
4330 }
4331
4332 fn send_keyed_job<F, Fut, R>(
4333 &mut self,
4334 key: Option<GitJobKey>,
4335 status: Option<SharedString>,
4336 job: F,
4337 ) -> oneshot::Receiver<R>
4338 where
4339 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4340 Fut: Future<Output = R> + 'static,
4341 R: Send + 'static,
4342 {
4343 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4344 let job_id = post_inc(&mut self.job_id);
4345 let this = self.this.clone();
4346 self.job_sender
4347 .unbounded_send(GitJob {
4348 key,
4349 job: Box::new(move |state, cx: &mut AsyncApp| {
4350 let job = job(state, cx.clone());
4351 cx.spawn(async move |cx| {
4352 if let Some(s) = status.clone() {
4353 this.update(cx, |this, cx| {
4354 this.active_jobs.insert(
4355 job_id,
4356 JobInfo {
4357 start: Instant::now(),
4358 message: s.clone(),
4359 },
4360 );
4361
4362 cx.notify();
4363 })
4364 .ok();
4365 }
4366 let result = job.await;
4367
4368 this.update(cx, |this, cx| {
4369 this.active_jobs.remove(&job_id);
4370 cx.notify();
4371 })
4372 .ok();
4373
4374 result_tx.send(result).ok();
4375 })
4376 }),
4377 })
4378 .ok();
4379 result_rx
4380 }
4381
4382 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4383 let Some(git_store) = self.git_store.upgrade() else {
4384 return;
4385 };
4386 let entity = cx.entity();
4387 git_store.update(cx, |git_store, cx| {
4388 let Some((&id, _)) = git_store
4389 .repositories
4390 .iter()
4391 .find(|(_, handle)| *handle == &entity)
4392 else {
4393 return;
4394 };
4395 git_store.active_repo_id = Some(id);
4396 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4397 });
4398 }
4399
4400 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4401 self.snapshot.status()
4402 }
4403
4404 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4405 self.snapshot.diff_stat_for_path(path)
4406 }
4407
4408 pub fn cached_stash(&self) -> GitStash {
4409 self.snapshot.stash_entries.clone()
4410 }
4411
4412 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4413 let git_store = self.git_store.upgrade()?;
4414 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4415 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4416 let abs_path = SanitizedPath::new(&abs_path);
4417 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4418 Some(ProjectPath {
4419 worktree_id: worktree.read(cx).id(),
4420 path: relative_path,
4421 })
4422 }
4423
4424 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4425 let git_store = self.git_store.upgrade()?;
4426 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4427 let abs_path = worktree_store.absolutize(path, cx)?;
4428 self.snapshot.abs_path_to_repo_path(&abs_path)
4429 }
4430
4431 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4432 other
4433 .read(cx)
4434 .snapshot
4435 .work_directory_abs_path
4436 .starts_with(&self.snapshot.work_directory_abs_path)
4437 }
4438
4439 pub fn open_commit_buffer(
4440 &mut self,
4441 languages: Option<Arc<LanguageRegistry>>,
4442 buffer_store: Entity<BufferStore>,
4443 cx: &mut Context<Self>,
4444 ) -> Task<Result<Entity<Buffer>>> {
4445 let id = self.id;
4446 if let Some(buffer) = self.commit_message_buffer.clone() {
4447 return Task::ready(Ok(buffer));
4448 }
4449 let this = cx.weak_entity();
4450
4451 let rx = self.send_job(None, move |state, mut cx| async move {
4452 let Some(this) = this.upgrade() else {
4453 bail!("git store was dropped");
4454 };
4455 match state {
4456 RepositoryState::Local(..) => {
4457 this.update(&mut cx, |_, cx| {
4458 Self::open_local_commit_buffer(languages, buffer_store, cx)
4459 })
4460 .await
4461 }
4462 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4463 let request = client.request(proto::OpenCommitMessageBuffer {
4464 project_id: project_id.0,
4465 repository_id: id.to_proto(),
4466 });
4467 let response = request.await.context("requesting to open commit buffer")?;
4468 let buffer_id = BufferId::new(response.buffer_id)?;
4469 let buffer = buffer_store
4470 .update(&mut cx, |buffer_store, cx| {
4471 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4472 })
4473 .await?;
4474 if let Some(language_registry) = languages {
4475 let git_commit_language =
4476 language_registry.language_for_name("Git Commit").await?;
4477 buffer.update(&mut cx, |buffer, cx| {
4478 buffer.set_language(Some(git_commit_language), cx);
4479 });
4480 }
4481 this.update(&mut cx, |this, _| {
4482 this.commit_message_buffer = Some(buffer.clone());
4483 });
4484 Ok(buffer)
4485 }
4486 }
4487 });
4488
4489 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4490 }
4491
4492 fn open_local_commit_buffer(
4493 language_registry: Option<Arc<LanguageRegistry>>,
4494 buffer_store: Entity<BufferStore>,
4495 cx: &mut Context<Self>,
4496 ) -> Task<Result<Entity<Buffer>>> {
4497 cx.spawn(async move |repository, cx| {
4498 let git_commit_language = match language_registry {
4499 Some(language_registry) => {
4500 Some(language_registry.language_for_name("Git Commit").await?)
4501 }
4502 None => None,
4503 };
4504 let buffer = buffer_store
4505 .update(cx, |buffer_store, cx| {
4506 buffer_store.create_buffer(git_commit_language, false, cx)
4507 })
4508 .await?;
4509
4510 repository.update(cx, |repository, _| {
4511 repository.commit_message_buffer = Some(buffer.clone());
4512 })?;
4513 Ok(buffer)
4514 })
4515 }
4516
4517 pub fn checkout_files(
4518 &mut self,
4519 commit: &str,
4520 paths: Vec<RepoPath>,
4521 cx: &mut Context<Self>,
4522 ) -> Task<Result<()>> {
4523 let commit = commit.to_string();
4524 let id = self.id;
4525
4526 self.spawn_job_with_tracking(
4527 paths.clone(),
4528 pending_op::GitStatus::Reverted,
4529 cx,
4530 async move |this, cx| {
4531 this.update(cx, |this, _cx| {
4532 this.send_job(
4533 Some(format!("git checkout {}", commit).into()),
4534 move |git_repo, _| async move {
4535 match git_repo {
4536 RepositoryState::Local(LocalRepositoryState {
4537 backend,
4538 environment,
4539 ..
4540 }) => {
4541 backend
4542 .checkout_files(commit, paths, environment.clone())
4543 .await
4544 }
4545 RepositoryState::Remote(RemoteRepositoryState {
4546 project_id,
4547 client,
4548 }) => {
4549 client
4550 .request(proto::GitCheckoutFiles {
4551 project_id: project_id.0,
4552 repository_id: id.to_proto(),
4553 commit,
4554 paths: paths
4555 .into_iter()
4556 .map(|p| p.to_proto())
4557 .collect(),
4558 })
4559 .await?;
4560
4561 Ok(())
4562 }
4563 }
4564 },
4565 )
4566 })?
4567 .await?
4568 },
4569 )
4570 }
4571
4572 pub fn reset(
4573 &mut self,
4574 commit: String,
4575 reset_mode: ResetMode,
4576 _cx: &mut App,
4577 ) -> oneshot::Receiver<Result<()>> {
4578 let id = self.id;
4579
4580 self.send_job(None, move |git_repo, _| async move {
4581 match git_repo {
4582 RepositoryState::Local(LocalRepositoryState {
4583 backend,
4584 environment,
4585 ..
4586 }) => backend.reset(commit, reset_mode, environment).await,
4587 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4588 client
4589 .request(proto::GitReset {
4590 project_id: project_id.0,
4591 repository_id: id.to_proto(),
4592 commit,
4593 mode: match reset_mode {
4594 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4595 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4596 },
4597 })
4598 .await?;
4599
4600 Ok(())
4601 }
4602 }
4603 })
4604 }
4605
4606 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4607 let id = self.id;
4608 self.send_job(None, move |git_repo, _cx| async move {
4609 match git_repo {
4610 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4611 backend.show(commit).await
4612 }
4613 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4614 let resp = client
4615 .request(proto::GitShow {
4616 project_id: project_id.0,
4617 repository_id: id.to_proto(),
4618 commit,
4619 })
4620 .await?;
4621
4622 Ok(CommitDetails {
4623 sha: resp.sha.into(),
4624 message: resp.message.into(),
4625 commit_timestamp: resp.commit_timestamp,
4626 author_email: resp.author_email.into(),
4627 author_name: resp.author_name.into(),
4628 })
4629 }
4630 }
4631 })
4632 }
4633
4634 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4635 let id = self.id;
4636 self.send_job(None, move |git_repo, cx| async move {
4637 match git_repo {
4638 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4639 backend.load_commit(commit, cx).await
4640 }
4641 RepositoryState::Remote(RemoteRepositoryState {
4642 client, project_id, ..
4643 }) => {
4644 let response = client
4645 .request(proto::LoadCommitDiff {
4646 project_id: project_id.0,
4647 repository_id: id.to_proto(),
4648 commit,
4649 })
4650 .await?;
4651 Ok(CommitDiff {
4652 files: response
4653 .files
4654 .into_iter()
4655 .map(|file| {
4656 Ok(CommitFile {
4657 path: RepoPath::from_proto(&file.path)?,
4658 old_text: file.old_text,
4659 new_text: file.new_text,
4660 is_binary: file.is_binary,
4661 })
4662 })
4663 .collect::<Result<Vec<_>>>()?,
4664 })
4665 }
4666 }
4667 })
4668 }
4669
4670 pub fn file_history(
4671 &mut self,
4672 path: RepoPath,
4673 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4674 self.file_history_paginated(path, 0, None)
4675 }
4676
4677 pub fn file_history_paginated(
4678 &mut self,
4679 path: RepoPath,
4680 skip: usize,
4681 limit: Option<usize>,
4682 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4683 let id = self.id;
4684 self.send_job(None, move |git_repo, _cx| async move {
4685 match git_repo {
4686 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4687 backend.file_history_paginated(path, skip, limit).await
4688 }
4689 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4690 let response = client
4691 .request(proto::GitFileHistory {
4692 project_id: project_id.0,
4693 repository_id: id.to_proto(),
4694 path: path.to_proto(),
4695 skip: skip as u64,
4696 limit: limit.map(|l| l as u64),
4697 })
4698 .await?;
4699 Ok(git::repository::FileHistory {
4700 entries: response
4701 .entries
4702 .into_iter()
4703 .map(|entry| git::repository::FileHistoryEntry {
4704 sha: entry.sha.into(),
4705 subject: entry.subject.into(),
4706 message: entry.message.into(),
4707 commit_timestamp: entry.commit_timestamp,
4708 author_name: entry.author_name.into(),
4709 author_email: entry.author_email.into(),
4710 })
4711 .collect(),
4712 path: RepoPath::from_proto(&response.path)?,
4713 })
4714 }
4715 }
4716 })
4717 }
4718
4719 pub fn get_graph_data(
4720 &self,
4721 log_source: LogSource,
4722 log_order: LogOrder,
4723 ) -> Option<&InitialGitGraphData> {
4724 self.initial_graph_data.get(&(log_source, log_order))
4725 }
4726
4727 pub fn search_commits(
4728 &mut self,
4729 log_source: LogSource,
4730 search_args: SearchCommitArgs,
4731 request_tx: smol::channel::Sender<Oid>,
4732 cx: &mut Context<Self>,
4733 ) {
4734 let repository_state = self.repository_state.clone();
4735
4736 cx.background_spawn(async move {
4737 let repo_state = repository_state.await;
4738
4739 match repo_state {
4740 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4741 backend
4742 .search_commits(log_source, search_args, request_tx)
4743 .await
4744 .log_err();
4745 }
4746 Ok(RepositoryState::Remote(_)) => {}
4747 Err(_) => {}
4748 };
4749 })
4750 .detach();
4751 }
4752
4753 pub fn graph_data(
4754 &mut self,
4755 log_source: LogSource,
4756 log_order: LogOrder,
4757 range: Range<usize>,
4758 cx: &mut Context<Self>,
4759 ) -> GraphDataResponse<'_> {
4760 let initial_commit_data = self
4761 .initial_graph_data
4762 .entry((log_source.clone(), log_order))
4763 .or_insert_with(|| {
4764 let state = self.repository_state.clone();
4765 let log_source = log_source.clone();
4766
4767 let fetch_task = cx.spawn(async move |repository, cx| {
4768 let state = state.await;
4769 let result = match state {
4770 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4771 Self::local_git_graph_data(
4772 repository.clone(),
4773 backend,
4774 log_source.clone(),
4775 log_order,
4776 cx,
4777 )
4778 .await
4779 }
4780 Ok(RepositoryState::Remote(_)) => {
4781 Err("Git graph is not supported for collab yet".into())
4782 }
4783 Err(e) => Err(SharedString::from(e)),
4784 };
4785
4786 if let Err(fetch_task_error) = result {
4787 repository
4788 .update(cx, |repository, _| {
4789 if let Some(data) = repository
4790 .initial_graph_data
4791 .get_mut(&(log_source, log_order))
4792 {
4793 data.error = Some(fetch_task_error);
4794 } else {
4795 debug_panic!(
4796 "This task would be dropped if this entry doesn't exist"
4797 );
4798 }
4799 })
4800 .ok();
4801 }
4802 });
4803
4804 InitialGitGraphData {
4805 fetch_task,
4806 error: None,
4807 commit_data: Vec::new(),
4808 commit_oid_to_index: HashMap::default(),
4809 }
4810 });
4811
4812 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4813 let max_end = initial_commit_data.commit_data.len();
4814
4815 GraphDataResponse {
4816 commits: &initial_commit_data.commit_data
4817 [range.start.min(max_start)..range.end.min(max_end)],
4818 is_loading: !initial_commit_data.fetch_task.is_ready(),
4819 error: initial_commit_data.error.clone(),
4820 }
4821 }
4822
4823 async fn local_git_graph_data(
4824 this: WeakEntity<Self>,
4825 backend: Arc<dyn GitRepository>,
4826 log_source: LogSource,
4827 log_order: LogOrder,
4828 cx: &mut AsyncApp,
4829 ) -> Result<(), SharedString> {
4830 let (request_tx, request_rx) =
4831 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4832
4833 let task = cx.background_executor().spawn({
4834 let log_source = log_source.clone();
4835 async move {
4836 backend
4837 .initial_graph_data(log_source, log_order, request_tx)
4838 .await
4839 .map_err(|err| SharedString::from(err.to_string()))
4840 }
4841 });
4842
4843 let graph_data_key = (log_source, log_order);
4844
4845 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4846 this.update(cx, |repository, cx| {
4847 let graph_data = repository
4848 .initial_graph_data
4849 .entry(graph_data_key.clone())
4850 .and_modify(|graph_data| {
4851 for commit_data in initial_graph_commit_data {
4852 graph_data
4853 .commit_oid_to_index
4854 .insert(commit_data.sha, graph_data.commit_data.len());
4855 graph_data.commit_data.push(commit_data);
4856 }
4857 cx.emit(RepositoryEvent::GraphEvent(
4858 graph_data_key.clone(),
4859 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4860 ));
4861 });
4862
4863 match &graph_data {
4864 Entry::Occupied(_) => {}
4865 Entry::Vacant(_) => {
4866 debug_panic!("This task should be dropped if data doesn't exist");
4867 }
4868 }
4869 })
4870 .ok();
4871 }
4872
4873 task.await?;
4874 Ok(())
4875 }
4876
4877 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4878 if !self.commit_data.contains_key(&sha) {
4879 match &self.graph_commit_data_handler {
4880 GraphCommitHandlerState::Open(handler) => {
4881 if handler.commit_data_request.try_send(sha).is_ok() {
4882 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4883 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4884 }
4885 }
4886 GraphCommitHandlerState::Closed => {
4887 self.open_graph_commit_data_handler(cx);
4888 }
4889 GraphCommitHandlerState::Starting => {}
4890 }
4891 }
4892
4893 self.commit_data
4894 .get(&sha)
4895 .unwrap_or(&CommitDataState::Loading)
4896 }
4897
4898 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4899 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4900
4901 let state = self.repository_state.clone();
4902 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4903 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4904
4905 let foreground_task = cx.spawn(async move |this, cx| {
4906 while let Ok((sha, commit_data)) = result_rx.recv().await {
4907 let result = this.update(cx, |this, cx| {
4908 let old_value = this
4909 .commit_data
4910 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4911 debug_assert!(
4912 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4913 "We should never overwrite commit data"
4914 );
4915
4916 cx.notify();
4917 });
4918 if result.is_err() {
4919 break;
4920 }
4921 }
4922
4923 this.update(cx, |this, _cx| {
4924 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4925 })
4926 .ok();
4927 });
4928
4929 let request_tx_for_handler = request_tx;
4930 let background_executor = cx.background_executor().clone();
4931
4932 cx.background_spawn(async move {
4933 let backend = match state.await {
4934 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4935 Ok(RepositoryState::Remote(_)) => {
4936 log::error!("commit_data_reader not supported for remote repositories");
4937 return;
4938 }
4939 Err(error) => {
4940 log::error!("failed to get repository state: {error}");
4941 return;
4942 }
4943 };
4944
4945 let reader = match backend.commit_data_reader() {
4946 Ok(reader) => reader,
4947 Err(error) => {
4948 log::error!("failed to create commit data reader: {error:?}");
4949 return;
4950 }
4951 };
4952
4953 loop {
4954 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4955
4956 futures::select_biased! {
4957 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4958 let Ok(sha) = sha else {
4959 break;
4960 };
4961
4962 match reader.read(sha).await {
4963 Ok(commit_data) => {
4964 if result_tx.send((sha, commit_data)).await.is_err() {
4965 break;
4966 }
4967 }
4968 Err(error) => {
4969 log::error!("failed to read commit data for {sha}: {error:?}");
4970 }
4971 }
4972 }
4973 _ = futures::FutureExt::fuse(timeout) => {
4974 break;
4975 }
4976 }
4977 }
4978
4979 drop(result_tx);
4980 })
4981 .detach();
4982
4983 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4984 _task: foreground_task,
4985 commit_data_request: request_tx_for_handler,
4986 });
4987 }
4988
4989 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4990 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4991 }
4992
4993 fn save_buffers<'a>(
4994 &self,
4995 entries: impl IntoIterator<Item = &'a RepoPath>,
4996 cx: &mut Context<Self>,
4997 ) -> Vec<Task<anyhow::Result<()>>> {
4998 let mut save_futures = Vec::new();
4999 if let Some(buffer_store) = self.buffer_store(cx) {
5000 buffer_store.update(cx, |buffer_store, cx| {
5001 for path in entries {
5002 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5003 continue;
5004 };
5005 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5006 && buffer
5007 .read(cx)
5008 .file()
5009 .is_some_and(|file| file.disk_state().exists())
5010 && buffer.read(cx).has_unsaved_edits()
5011 {
5012 save_futures.push(buffer_store.save_buffer(buffer, cx));
5013 }
5014 }
5015 })
5016 }
5017 save_futures
5018 }
5019
5020 pub fn stage_entries(
5021 &mut self,
5022 entries: Vec<RepoPath>,
5023 cx: &mut Context<Self>,
5024 ) -> Task<anyhow::Result<()>> {
5025 self.stage_or_unstage_entries(true, entries, cx)
5026 }
5027
5028 pub fn unstage_entries(
5029 &mut self,
5030 entries: Vec<RepoPath>,
5031 cx: &mut Context<Self>,
5032 ) -> Task<anyhow::Result<()>> {
5033 self.stage_or_unstage_entries(false, entries, cx)
5034 }
5035
5036 fn stage_or_unstage_entries(
5037 &mut self,
5038 stage: bool,
5039 entries: Vec<RepoPath>,
5040 cx: &mut Context<Self>,
5041 ) -> Task<anyhow::Result<()>> {
5042 if entries.is_empty() {
5043 return Task::ready(Ok(()));
5044 }
5045 let Some(git_store) = self.git_store.upgrade() else {
5046 return Task::ready(Ok(()));
5047 };
5048 let id = self.id;
5049 let save_tasks = self.save_buffers(&entries, cx);
5050 let paths = entries
5051 .iter()
5052 .map(|p| p.as_unix_str())
5053 .collect::<Vec<_>>()
5054 .join(" ");
5055 let status = if stage {
5056 format!("git add {paths}")
5057 } else {
5058 format!("git reset {paths}")
5059 };
5060 let job_key = GitJobKey::WriteIndex(entries.clone());
5061
5062 self.spawn_job_with_tracking(
5063 entries.clone(),
5064 if stage {
5065 pending_op::GitStatus::Staged
5066 } else {
5067 pending_op::GitStatus::Unstaged
5068 },
5069 cx,
5070 async move |this, cx| {
5071 for save_task in save_tasks {
5072 save_task.await?;
5073 }
5074
5075 this.update(cx, |this, cx| {
5076 let weak_this = cx.weak_entity();
5077 this.send_keyed_job(
5078 Some(job_key),
5079 Some(status.into()),
5080 move |git_repo, mut cx| async move {
5081 let hunk_staging_operation_counts = weak_this
5082 .update(&mut cx, |this, cx| {
5083 let mut hunk_staging_operation_counts = HashMap::default();
5084 for path in &entries {
5085 let Some(project_path) =
5086 this.repo_path_to_project_path(path, cx)
5087 else {
5088 continue;
5089 };
5090 let Some(buffer) = git_store
5091 .read(cx)
5092 .buffer_store
5093 .read(cx)
5094 .get_by_path(&project_path)
5095 else {
5096 continue;
5097 };
5098 let Some(diff_state) = git_store
5099 .read(cx)
5100 .diffs
5101 .get(&buffer.read(cx).remote_id())
5102 .cloned()
5103 else {
5104 continue;
5105 };
5106 let Some(uncommitted_diff) =
5107 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5108 |uncommitted_diff| uncommitted_diff.upgrade(),
5109 )
5110 else {
5111 continue;
5112 };
5113 let buffer_snapshot = buffer.read(cx).text_snapshot();
5114 let file_exists = buffer
5115 .read(cx)
5116 .file()
5117 .is_some_and(|file| file.disk_state().exists());
5118 let hunk_staging_operation_count =
5119 diff_state.update(cx, |diff_state, cx| {
5120 uncommitted_diff.update(
5121 cx,
5122 |uncommitted_diff, cx| {
5123 uncommitted_diff
5124 .stage_or_unstage_all_hunks(
5125 stage,
5126 &buffer_snapshot,
5127 file_exists,
5128 cx,
5129 );
5130 },
5131 );
5132
5133 diff_state.hunk_staging_operation_count += 1;
5134 diff_state.hunk_staging_operation_count
5135 });
5136 hunk_staging_operation_counts.insert(
5137 diff_state.downgrade(),
5138 hunk_staging_operation_count,
5139 );
5140 }
5141 hunk_staging_operation_counts
5142 })
5143 .unwrap_or_default();
5144
5145 let result = match git_repo {
5146 RepositoryState::Local(LocalRepositoryState {
5147 backend,
5148 environment,
5149 ..
5150 }) => {
5151 if stage {
5152 backend.stage_paths(entries, environment.clone()).await
5153 } else {
5154 backend.unstage_paths(entries, environment.clone()).await
5155 }
5156 }
5157 RepositoryState::Remote(RemoteRepositoryState {
5158 project_id,
5159 client,
5160 }) => {
5161 if stage {
5162 client
5163 .request(proto::Stage {
5164 project_id: project_id.0,
5165 repository_id: id.to_proto(),
5166 paths: entries
5167 .into_iter()
5168 .map(|repo_path| repo_path.to_proto())
5169 .collect(),
5170 })
5171 .await
5172 .context("sending stage request")
5173 .map(|_| ())
5174 } else {
5175 client
5176 .request(proto::Unstage {
5177 project_id: project_id.0,
5178 repository_id: id.to_proto(),
5179 paths: entries
5180 .into_iter()
5181 .map(|repo_path| repo_path.to_proto())
5182 .collect(),
5183 })
5184 .await
5185 .context("sending unstage request")
5186 .map(|_| ())
5187 }
5188 }
5189 };
5190
5191 for (diff_state, hunk_staging_operation_count) in
5192 hunk_staging_operation_counts
5193 {
5194 diff_state
5195 .update(&mut cx, |diff_state, cx| {
5196 if result.is_ok() {
5197 diff_state.hunk_staging_operation_count_as_of_write =
5198 hunk_staging_operation_count;
5199 } else if let Some(uncommitted_diff) =
5200 &diff_state.uncommitted_diff
5201 {
5202 uncommitted_diff
5203 .update(cx, |uncommitted_diff, cx| {
5204 uncommitted_diff.clear_pending_hunks(cx);
5205 })
5206 .ok();
5207 }
5208 })
5209 .ok();
5210 }
5211
5212 result
5213 },
5214 )
5215 })?
5216 .await?
5217 },
5218 )
5219 }
5220
5221 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5222 let snapshot = self.snapshot.clone();
5223 let pending_ops = self.pending_ops.clone();
5224 let to_stage = cx.background_spawn(async move {
5225 snapshot
5226 .status()
5227 .filter_map(|entry| {
5228 if let Some(ops) =
5229 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5230 {
5231 if ops.staging() || ops.staged() {
5232 None
5233 } else {
5234 Some(entry.repo_path)
5235 }
5236 } else if entry.status.staging().is_fully_staged() {
5237 None
5238 } else {
5239 Some(entry.repo_path)
5240 }
5241 })
5242 .collect()
5243 });
5244
5245 cx.spawn(async move |this, cx| {
5246 let to_stage = to_stage.await;
5247 this.update(cx, |this, cx| {
5248 this.stage_or_unstage_entries(true, to_stage, cx)
5249 })?
5250 .await
5251 })
5252 }
5253
5254 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5255 let snapshot = self.snapshot.clone();
5256 let pending_ops = self.pending_ops.clone();
5257 let to_unstage = cx.background_spawn(async move {
5258 snapshot
5259 .status()
5260 .filter_map(|entry| {
5261 if let Some(ops) =
5262 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5263 {
5264 if !ops.staging() && !ops.staged() {
5265 None
5266 } else {
5267 Some(entry.repo_path)
5268 }
5269 } else if entry.status.staging().is_fully_unstaged() {
5270 None
5271 } else {
5272 Some(entry.repo_path)
5273 }
5274 })
5275 .collect()
5276 });
5277
5278 cx.spawn(async move |this, cx| {
5279 let to_unstage = to_unstage.await;
5280 this.update(cx, |this, cx| {
5281 this.stage_or_unstage_entries(false, to_unstage, cx)
5282 })?
5283 .await
5284 })
5285 }
5286
5287 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5288 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5289
5290 self.stash_entries(to_stash, cx)
5291 }
5292
5293 pub fn stash_entries(
5294 &mut self,
5295 entries: Vec<RepoPath>,
5296 cx: &mut Context<Self>,
5297 ) -> Task<anyhow::Result<()>> {
5298 let id = self.id;
5299
5300 cx.spawn(async move |this, cx| {
5301 this.update(cx, |this, _| {
5302 this.send_job(None, move |git_repo, _cx| async move {
5303 match git_repo {
5304 RepositoryState::Local(LocalRepositoryState {
5305 backend,
5306 environment,
5307 ..
5308 }) => backend.stash_paths(entries, environment).await,
5309 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5310 client
5311 .request(proto::Stash {
5312 project_id: project_id.0,
5313 repository_id: id.to_proto(),
5314 paths: entries
5315 .into_iter()
5316 .map(|repo_path| repo_path.to_proto())
5317 .collect(),
5318 })
5319 .await?;
5320 Ok(())
5321 }
5322 }
5323 })
5324 })?
5325 .await??;
5326 Ok(())
5327 })
5328 }
5329
5330 pub fn stash_pop(
5331 &mut self,
5332 index: Option<usize>,
5333 cx: &mut Context<Self>,
5334 ) -> Task<anyhow::Result<()>> {
5335 let id = self.id;
5336 cx.spawn(async move |this, cx| {
5337 this.update(cx, |this, _| {
5338 this.send_job(None, move |git_repo, _cx| async move {
5339 match git_repo {
5340 RepositoryState::Local(LocalRepositoryState {
5341 backend,
5342 environment,
5343 ..
5344 }) => backend.stash_pop(index, environment).await,
5345 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5346 client
5347 .request(proto::StashPop {
5348 project_id: project_id.0,
5349 repository_id: id.to_proto(),
5350 stash_index: index.map(|i| i as u64),
5351 })
5352 .await
5353 .context("sending stash pop request")?;
5354 Ok(())
5355 }
5356 }
5357 })
5358 })?
5359 .await??;
5360 Ok(())
5361 })
5362 }
5363
5364 pub fn stash_apply(
5365 &mut self,
5366 index: Option<usize>,
5367 cx: &mut Context<Self>,
5368 ) -> Task<anyhow::Result<()>> {
5369 let id = self.id;
5370 cx.spawn(async move |this, cx| {
5371 this.update(cx, |this, _| {
5372 this.send_job(None, move |git_repo, _cx| async move {
5373 match git_repo {
5374 RepositoryState::Local(LocalRepositoryState {
5375 backend,
5376 environment,
5377 ..
5378 }) => backend.stash_apply(index, environment).await,
5379 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5380 client
5381 .request(proto::StashApply {
5382 project_id: project_id.0,
5383 repository_id: id.to_proto(),
5384 stash_index: index.map(|i| i as u64),
5385 })
5386 .await
5387 .context("sending stash apply request")?;
5388 Ok(())
5389 }
5390 }
5391 })
5392 })?
5393 .await??;
5394 Ok(())
5395 })
5396 }
5397
5398 pub fn stash_drop(
5399 &mut self,
5400 index: Option<usize>,
5401 cx: &mut Context<Self>,
5402 ) -> oneshot::Receiver<anyhow::Result<()>> {
5403 let id = self.id;
5404 let updates_tx = self
5405 .git_store()
5406 .and_then(|git_store| match &git_store.read(cx).state {
5407 GitStoreState::Local { downstream, .. } => downstream
5408 .as_ref()
5409 .map(|downstream| downstream.updates_tx.clone()),
5410 _ => None,
5411 });
5412 let this = cx.weak_entity();
5413 self.send_job(None, move |git_repo, mut cx| async move {
5414 match git_repo {
5415 RepositoryState::Local(LocalRepositoryState {
5416 backend,
5417 environment,
5418 ..
5419 }) => {
5420 // TODO would be nice to not have to do this manually
5421 let result = backend.stash_drop(index, environment).await;
5422 if result.is_ok()
5423 && let Ok(stash_entries) = backend.stash_entries().await
5424 {
5425 let snapshot = this.update(&mut cx, |this, cx| {
5426 this.snapshot.stash_entries = stash_entries;
5427 cx.emit(RepositoryEvent::StashEntriesChanged);
5428 this.snapshot.clone()
5429 })?;
5430 if let Some(updates_tx) = updates_tx {
5431 updates_tx
5432 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5433 .ok();
5434 }
5435 }
5436
5437 result
5438 }
5439 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5440 client
5441 .request(proto::StashDrop {
5442 project_id: project_id.0,
5443 repository_id: id.to_proto(),
5444 stash_index: index.map(|i| i as u64),
5445 })
5446 .await
5447 .context("sending stash pop request")?;
5448 Ok(())
5449 }
5450 }
5451 })
5452 }
5453
5454 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5455 let id = self.id;
5456 self.send_job(
5457 Some(format!("git hook {}", hook.as_str()).into()),
5458 move |git_repo, _cx| async move {
5459 match git_repo {
5460 RepositoryState::Local(LocalRepositoryState {
5461 backend,
5462 environment,
5463 ..
5464 }) => backend.run_hook(hook, environment.clone()).await,
5465 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5466 client
5467 .request(proto::RunGitHook {
5468 project_id: project_id.0,
5469 repository_id: id.to_proto(),
5470 hook: hook.to_proto(),
5471 })
5472 .await?;
5473
5474 Ok(())
5475 }
5476 }
5477 },
5478 )
5479 }
5480
5481 pub fn commit(
5482 &mut self,
5483 message: SharedString,
5484 name_and_email: Option<(SharedString, SharedString)>,
5485 options: CommitOptions,
5486 askpass: AskPassDelegate,
5487 cx: &mut App,
5488 ) -> oneshot::Receiver<Result<()>> {
5489 let id = self.id;
5490 let askpass_delegates = self.askpass_delegates.clone();
5491 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5492
5493 let rx = self.run_hook(RunHook::PreCommit, cx);
5494
5495 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5496 rx.await??;
5497
5498 match git_repo {
5499 RepositoryState::Local(LocalRepositoryState {
5500 backend,
5501 environment,
5502 ..
5503 }) => {
5504 backend
5505 .commit(message, name_and_email, options, askpass, environment)
5506 .await
5507 }
5508 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5509 askpass_delegates.lock().insert(askpass_id, askpass);
5510 let _defer = util::defer(|| {
5511 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5512 debug_assert!(askpass_delegate.is_some());
5513 });
5514 let (name, email) = name_and_email.unzip();
5515 client
5516 .request(proto::Commit {
5517 project_id: project_id.0,
5518 repository_id: id.to_proto(),
5519 message: String::from(message),
5520 name: name.map(String::from),
5521 email: email.map(String::from),
5522 options: Some(proto::commit::CommitOptions {
5523 amend: options.amend,
5524 signoff: options.signoff,
5525 allow_empty: options.allow_empty,
5526 }),
5527 askpass_id,
5528 })
5529 .await?;
5530
5531 Ok(())
5532 }
5533 }
5534 })
5535 }
5536
5537 pub fn fetch(
5538 &mut self,
5539 fetch_options: FetchOptions,
5540 askpass: AskPassDelegate,
5541 _cx: &mut App,
5542 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5543 let askpass_delegates = self.askpass_delegates.clone();
5544 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5545 let id = self.id;
5546
5547 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5548 match git_repo {
5549 RepositoryState::Local(LocalRepositoryState {
5550 backend,
5551 environment,
5552 ..
5553 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5554 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5555 askpass_delegates.lock().insert(askpass_id, askpass);
5556 let _defer = util::defer(|| {
5557 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5558 debug_assert!(askpass_delegate.is_some());
5559 });
5560
5561 let response = client
5562 .request(proto::Fetch {
5563 project_id: project_id.0,
5564 repository_id: id.to_proto(),
5565 askpass_id,
5566 remote: fetch_options.to_proto(),
5567 })
5568 .await?;
5569
5570 Ok(RemoteCommandOutput {
5571 stdout: response.stdout,
5572 stderr: response.stderr,
5573 })
5574 }
5575 }
5576 })
5577 }
5578
5579 pub fn push(
5580 &mut self,
5581 branch: SharedString,
5582 remote_branch: SharedString,
5583 remote: SharedString,
5584 options: Option<PushOptions>,
5585 askpass: AskPassDelegate,
5586 cx: &mut Context<Self>,
5587 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5588 let askpass_delegates = self.askpass_delegates.clone();
5589 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5590 let id = self.id;
5591
5592 let args = options
5593 .map(|option| match option {
5594 PushOptions::SetUpstream => " --set-upstream",
5595 PushOptions::Force => " --force-with-lease",
5596 })
5597 .unwrap_or("");
5598
5599 let updates_tx = self
5600 .git_store()
5601 .and_then(|git_store| match &git_store.read(cx).state {
5602 GitStoreState::Local { downstream, .. } => downstream
5603 .as_ref()
5604 .map(|downstream| downstream.updates_tx.clone()),
5605 _ => None,
5606 });
5607
5608 let this = cx.weak_entity();
5609 self.send_job(
5610 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5611 move |git_repo, mut cx| async move {
5612 match git_repo {
5613 RepositoryState::Local(LocalRepositoryState {
5614 backend,
5615 environment,
5616 ..
5617 }) => {
5618 let result = backend
5619 .push(
5620 branch.to_string(),
5621 remote_branch.to_string(),
5622 remote.to_string(),
5623 options,
5624 askpass,
5625 environment.clone(),
5626 cx.clone(),
5627 )
5628 .await;
5629 // TODO would be nice to not have to do this manually
5630 if result.is_ok() {
5631 let branches = backend.branches().await?;
5632 let branch = branches.into_iter().find(|branch| branch.is_head);
5633 log::info!("head branch after scan is {branch:?}");
5634 let snapshot = this.update(&mut cx, |this, cx| {
5635 this.snapshot.branch = branch;
5636 cx.emit(RepositoryEvent::HeadChanged);
5637 this.snapshot.clone()
5638 })?;
5639 if let Some(updates_tx) = updates_tx {
5640 updates_tx
5641 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5642 .ok();
5643 }
5644 }
5645 result
5646 }
5647 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5648 askpass_delegates.lock().insert(askpass_id, askpass);
5649 let _defer = util::defer(|| {
5650 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5651 debug_assert!(askpass_delegate.is_some());
5652 });
5653 let response = client
5654 .request(proto::Push {
5655 project_id: project_id.0,
5656 repository_id: id.to_proto(),
5657 askpass_id,
5658 branch_name: branch.to_string(),
5659 remote_branch_name: remote_branch.to_string(),
5660 remote_name: remote.to_string(),
5661 options: options.map(|options| match options {
5662 PushOptions::Force => proto::push::PushOptions::Force,
5663 PushOptions::SetUpstream => {
5664 proto::push::PushOptions::SetUpstream
5665 }
5666 }
5667 as i32),
5668 })
5669 .await?;
5670
5671 Ok(RemoteCommandOutput {
5672 stdout: response.stdout,
5673 stderr: response.stderr,
5674 })
5675 }
5676 }
5677 },
5678 )
5679 }
5680
5681 pub fn pull(
5682 &mut self,
5683 branch: Option<SharedString>,
5684 remote: SharedString,
5685 rebase: bool,
5686 askpass: AskPassDelegate,
5687 _cx: &mut App,
5688 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5689 let askpass_delegates = self.askpass_delegates.clone();
5690 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5691 let id = self.id;
5692
5693 let mut status = "git pull".to_string();
5694 if rebase {
5695 status.push_str(" --rebase");
5696 }
5697 status.push_str(&format!(" {}", remote));
5698 if let Some(b) = &branch {
5699 status.push_str(&format!(" {}", b));
5700 }
5701
5702 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5703 match git_repo {
5704 RepositoryState::Local(LocalRepositoryState {
5705 backend,
5706 environment,
5707 ..
5708 }) => {
5709 backend
5710 .pull(
5711 branch.as_ref().map(|b| b.to_string()),
5712 remote.to_string(),
5713 rebase,
5714 askpass,
5715 environment.clone(),
5716 cx,
5717 )
5718 .await
5719 }
5720 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5721 askpass_delegates.lock().insert(askpass_id, askpass);
5722 let _defer = util::defer(|| {
5723 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5724 debug_assert!(askpass_delegate.is_some());
5725 });
5726 let response = client
5727 .request(proto::Pull {
5728 project_id: project_id.0,
5729 repository_id: id.to_proto(),
5730 askpass_id,
5731 rebase,
5732 branch_name: branch.as_ref().map(|b| b.to_string()),
5733 remote_name: remote.to_string(),
5734 })
5735 .await?;
5736
5737 Ok(RemoteCommandOutput {
5738 stdout: response.stdout,
5739 stderr: response.stderr,
5740 })
5741 }
5742 }
5743 })
5744 }
5745
5746 fn spawn_set_index_text_job(
5747 &mut self,
5748 path: RepoPath,
5749 content: Option<String>,
5750 hunk_staging_operation_count: Option<usize>,
5751 cx: &mut Context<Self>,
5752 ) -> oneshot::Receiver<anyhow::Result<()>> {
5753 let id = self.id;
5754 let this = cx.weak_entity();
5755 let git_store = self.git_store.clone();
5756 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5757 self.send_keyed_job(
5758 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5759 None,
5760 move |git_repo, mut cx| async move {
5761 log::debug!(
5762 "start updating index text for buffer {}",
5763 path.as_unix_str()
5764 );
5765
5766 match git_repo {
5767 RepositoryState::Local(LocalRepositoryState {
5768 fs,
5769 backend,
5770 environment,
5771 ..
5772 }) => {
5773 let executable = match fs.metadata(&abs_path).await {
5774 Ok(Some(meta)) => meta.is_executable,
5775 Ok(None) => false,
5776 Err(_err) => false,
5777 };
5778 backend
5779 .set_index_text(path.clone(), content, environment.clone(), executable)
5780 .await?;
5781 }
5782 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5783 client
5784 .request(proto::SetIndexText {
5785 project_id: project_id.0,
5786 repository_id: id.to_proto(),
5787 path: path.to_proto(),
5788 text: content,
5789 })
5790 .await?;
5791 }
5792 }
5793 log::debug!(
5794 "finish updating index text for buffer {}",
5795 path.as_unix_str()
5796 );
5797
5798 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5799 let project_path = this
5800 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5801 .ok()
5802 .flatten();
5803 git_store
5804 .update(&mut cx, |git_store, cx| {
5805 let buffer_id = git_store
5806 .buffer_store
5807 .read(cx)
5808 .get_by_path(&project_path?)?
5809 .read(cx)
5810 .remote_id();
5811 let diff_state = git_store.diffs.get(&buffer_id)?;
5812 diff_state.update(cx, |diff_state, _| {
5813 diff_state.hunk_staging_operation_count_as_of_write =
5814 hunk_staging_operation_count;
5815 });
5816 Some(())
5817 })
5818 .context("Git store dropped")?;
5819 }
5820 Ok(())
5821 },
5822 )
5823 }
5824
5825 pub fn create_remote(
5826 &mut self,
5827 remote_name: String,
5828 remote_url: String,
5829 ) -> oneshot::Receiver<Result<()>> {
5830 let id = self.id;
5831 self.send_job(
5832 Some(format!("git remote add {remote_name} {remote_url}").into()),
5833 move |repo, _cx| async move {
5834 match repo {
5835 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5836 backend.create_remote(remote_name, remote_url).await
5837 }
5838 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5839 client
5840 .request(proto::GitCreateRemote {
5841 project_id: project_id.0,
5842 repository_id: id.to_proto(),
5843 remote_name,
5844 remote_url,
5845 })
5846 .await?;
5847
5848 Ok(())
5849 }
5850 }
5851 },
5852 )
5853 }
5854
5855 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5856 let id = self.id;
5857 self.send_job(
5858 Some(format!("git remove remote {remote_name}").into()),
5859 move |repo, _cx| async move {
5860 match repo {
5861 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5862 backend.remove_remote(remote_name).await
5863 }
5864 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5865 client
5866 .request(proto::GitRemoveRemote {
5867 project_id: project_id.0,
5868 repository_id: id.to_proto(),
5869 remote_name,
5870 })
5871 .await?;
5872
5873 Ok(())
5874 }
5875 }
5876 },
5877 )
5878 }
5879
5880 pub fn get_remotes(
5881 &mut self,
5882 branch_name: Option<String>,
5883 is_push: bool,
5884 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5885 let id = self.id;
5886 self.send_job(None, move |repo, _cx| async move {
5887 match repo {
5888 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5889 let remote = if let Some(branch_name) = branch_name {
5890 if is_push {
5891 backend.get_push_remote(branch_name).await?
5892 } else {
5893 backend.get_branch_remote(branch_name).await?
5894 }
5895 } else {
5896 None
5897 };
5898
5899 match remote {
5900 Some(remote) => Ok(vec![remote]),
5901 None => backend.get_all_remotes().await,
5902 }
5903 }
5904 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5905 let response = client
5906 .request(proto::GetRemotes {
5907 project_id: project_id.0,
5908 repository_id: id.to_proto(),
5909 branch_name,
5910 is_push,
5911 })
5912 .await?;
5913
5914 let remotes = response
5915 .remotes
5916 .into_iter()
5917 .map(|remotes| Remote {
5918 name: remotes.name.into(),
5919 })
5920 .collect();
5921
5922 Ok(remotes)
5923 }
5924 }
5925 })
5926 }
5927
5928 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5929 let id = self.id;
5930 self.send_job(None, move |repo, _| async move {
5931 match repo {
5932 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5933 backend.branches().await
5934 }
5935 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5936 let response = client
5937 .request(proto::GitGetBranches {
5938 project_id: project_id.0,
5939 repository_id: id.to_proto(),
5940 })
5941 .await?;
5942
5943 let branches = response
5944 .branches
5945 .into_iter()
5946 .map(|branch| proto_to_branch(&branch))
5947 .collect();
5948
5949 Ok(branches)
5950 }
5951 }
5952 })
5953 }
5954
5955 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5956 /// returns the pathed for the linked worktree.
5957 ///
5958 /// Returns None if this is the main checkout.
5959 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5960 if self.work_directory_abs_path != self.original_repo_abs_path {
5961 Some(&self.work_directory_abs_path)
5962 } else {
5963 None
5964 }
5965 }
5966
5967 pub fn path_for_new_linked_worktree(
5968 &self,
5969 branch_name: &str,
5970 worktree_directory_setting: &str,
5971 ) -> Result<PathBuf> {
5972 let original_repo = self.original_repo_abs_path.clone();
5973 let project_name = original_repo
5974 .file_name()
5975 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5976 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5977 Ok(directory.join(branch_name).join(project_name))
5978 }
5979
5980 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5981 let id = self.id;
5982 self.send_job(None, move |repo, _| async move {
5983 match repo {
5984 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5985 backend.worktrees().await
5986 }
5987 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5988 let response = client
5989 .request(proto::GitGetWorktrees {
5990 project_id: project_id.0,
5991 repository_id: id.to_proto(),
5992 })
5993 .await?;
5994
5995 let worktrees = response
5996 .worktrees
5997 .into_iter()
5998 .map(|worktree| proto_to_worktree(&worktree))
5999 .collect();
6000
6001 Ok(worktrees)
6002 }
6003 }
6004 })
6005 }
6006
6007 fn create_worktree_with_start_point(
6008 &mut self,
6009 start_point: CreateWorktreeStartPoint,
6010 path: PathBuf,
6011 commit: Option<String>,
6012 ) -> oneshot::Receiver<Result<()>> {
6013 if matches!(
6014 &start_point,
6015 CreateWorktreeStartPoint::Branched { name } if name.is_empty()
6016 ) {
6017 let (sender, receiver) = oneshot::channel();
6018 sender
6019 .send(Err(anyhow!("branch name cannot be empty")))
6020 .ok();
6021 return receiver;
6022 }
6023
6024 let id = self.id;
6025 let message = match &start_point {
6026 CreateWorktreeStartPoint::Detached => "git worktree add (detached)".into(),
6027 CreateWorktreeStartPoint::Branched { name } => {
6028 format!("git worktree add: {name}").into()
6029 }
6030 };
6031
6032 self.send_job(Some(message), move |repo, _cx| async move {
6033 let branch_name = match start_point {
6034 CreateWorktreeStartPoint::Detached => None,
6035 CreateWorktreeStartPoint::Branched { name } => Some(name),
6036 };
6037 let remote_name = branch_name.clone().unwrap_or_default();
6038
6039 match repo {
6040 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6041 backend.create_worktree(branch_name, path, commit).await
6042 }
6043 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6044 client
6045 .request(proto::GitCreateWorktree {
6046 project_id: project_id.0,
6047 repository_id: id.to_proto(),
6048 name: remote_name,
6049 directory: path.to_string_lossy().to_string(),
6050 commit,
6051 })
6052 .await?;
6053
6054 Ok(())
6055 }
6056 }
6057 })
6058 }
6059
6060 pub fn create_worktree(
6061 &mut self,
6062 branch_name: String,
6063 path: PathBuf,
6064 commit: Option<String>,
6065 ) -> oneshot::Receiver<Result<()>> {
6066 self.create_worktree_with_start_point(
6067 CreateWorktreeStartPoint::Branched { name: branch_name },
6068 path,
6069 commit,
6070 )
6071 }
6072
6073 pub fn create_worktree_detached(
6074 &mut self,
6075 path: PathBuf,
6076 commit: String,
6077 ) -> oneshot::Receiver<Result<()>> {
6078 self.create_worktree_with_start_point(
6079 CreateWorktreeStartPoint::Detached,
6080 path,
6081 Some(commit),
6082 )
6083 }
6084
6085 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6086 let id = self.id;
6087 self.send_job(None, move |repo, _cx| async move {
6088 match repo {
6089 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6090 Ok(backend.head_sha().await)
6091 }
6092 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6093 let response = client
6094 .request(proto::GitGetHeadSha {
6095 project_id: project_id.0,
6096 repository_id: id.to_proto(),
6097 })
6098 .await?;
6099
6100 Ok(response.sha)
6101 }
6102 }
6103 })
6104 }
6105
6106 pub fn update_ref(
6107 &mut self,
6108 ref_name: String,
6109 commit: String,
6110 ) -> oneshot::Receiver<Result<()>> {
6111 self.send_job(None, move |repo, _cx| async move {
6112 match repo {
6113 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6114 backend.update_ref(ref_name, commit).await
6115 }
6116 RepositoryState::Remote(_) => {
6117 anyhow::bail!("update_ref is not supported for remote repositories")
6118 }
6119 }
6120 })
6121 }
6122
6123 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6124 self.send_job(None, move |repo, _cx| async move {
6125 match repo {
6126 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6127 backend.delete_ref(ref_name).await
6128 }
6129 RepositoryState::Remote(_) => {
6130 anyhow::bail!("delete_ref is not supported for remote repositories")
6131 }
6132 }
6133 })
6134 }
6135
6136 pub fn resolve_commit(&mut self, sha: String) -> oneshot::Receiver<Result<bool>> {
6137 self.send_job(None, move |repo, _cx| async move {
6138 match repo {
6139 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6140 let results = backend.revparse_batch(vec![sha]).await?;
6141 Ok(results.into_iter().next().flatten().is_some())
6142 }
6143 RepositoryState::Remote(_) => {
6144 anyhow::bail!("resolve_commit is not supported for remote repositories")
6145 }
6146 }
6147 })
6148 }
6149
6150 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6151 self.send_job(None, move |repo, _cx| async move {
6152 match repo {
6153 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6154 backend.repair_worktrees().await
6155 }
6156 RepositoryState::Remote(_) => {
6157 anyhow::bail!("repair_worktrees is not supported for remote repositories")
6158 }
6159 }
6160 })
6161 }
6162
6163 pub fn commit_exists(&mut self, sha: String) -> oneshot::Receiver<Result<bool>> {
6164 self.send_job(None, move |repo, _cx| async move {
6165 match repo {
6166 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6167 let results = backend.revparse_batch(vec![sha]).await?;
6168 Ok(results.into_iter().next().flatten().is_some())
6169 }
6170 RepositoryState::Remote(_) => {
6171 anyhow::bail!("commit_exists is not supported for remote repositories")
6172 }
6173 }
6174 })
6175 }
6176
6177 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6178 let id = self.id;
6179 self.send_job(
6180 Some(format!("git worktree remove: {}", path.display()).into()),
6181 move |repo, _cx| async move {
6182 match repo {
6183 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6184 backend.remove_worktree(path, force).await
6185 }
6186 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6187 client
6188 .request(proto::GitRemoveWorktree {
6189 project_id: project_id.0,
6190 repository_id: id.to_proto(),
6191 path: path.to_string_lossy().to_string(),
6192 force,
6193 })
6194 .await?;
6195
6196 Ok(())
6197 }
6198 }
6199 },
6200 )
6201 }
6202
6203 pub fn rename_worktree(
6204 &mut self,
6205 old_path: PathBuf,
6206 new_path: PathBuf,
6207 ) -> oneshot::Receiver<Result<()>> {
6208 let id = self.id;
6209 self.send_job(
6210 Some(format!("git worktree move: {}", old_path.display()).into()),
6211 move |repo, _cx| async move {
6212 match repo {
6213 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6214 backend.rename_worktree(old_path, new_path).await
6215 }
6216 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6217 client
6218 .request(proto::GitRenameWorktree {
6219 project_id: project_id.0,
6220 repository_id: id.to_proto(),
6221 old_path: old_path.to_string_lossy().to_string(),
6222 new_path: new_path.to_string_lossy().to_string(),
6223 })
6224 .await?;
6225
6226 Ok(())
6227 }
6228 }
6229 },
6230 )
6231 }
6232
6233 pub fn default_branch(
6234 &mut self,
6235 include_remote_name: bool,
6236 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6237 let id = self.id;
6238 self.send_job(None, move |repo, _| async move {
6239 match repo {
6240 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6241 backend.default_branch(include_remote_name).await
6242 }
6243 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6244 let response = client
6245 .request(proto::GetDefaultBranch {
6246 project_id: project_id.0,
6247 repository_id: id.to_proto(),
6248 })
6249 .await?;
6250
6251 anyhow::Ok(response.branch.map(SharedString::from))
6252 }
6253 }
6254 })
6255 }
6256
6257 pub fn diff_tree(
6258 &mut self,
6259 diff_type: DiffTreeType,
6260 _cx: &App,
6261 ) -> oneshot::Receiver<Result<TreeDiff>> {
6262 let repository_id = self.snapshot.id;
6263 self.send_job(None, move |repo, _cx| async move {
6264 match repo {
6265 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6266 backend.diff_tree(diff_type).await
6267 }
6268 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6269 let response = client
6270 .request(proto::GetTreeDiff {
6271 project_id: project_id.0,
6272 repository_id: repository_id.0,
6273 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6274 base: diff_type.base().to_string(),
6275 head: diff_type.head().to_string(),
6276 })
6277 .await?;
6278
6279 let entries = response
6280 .entries
6281 .into_iter()
6282 .filter_map(|entry| {
6283 let status = match entry.status() {
6284 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6285 proto::tree_diff_status::Status::Modified => {
6286 TreeDiffStatus::Modified {
6287 old: git::Oid::from_str(
6288 &entry.oid.context("missing oid").log_err()?,
6289 )
6290 .log_err()?,
6291 }
6292 }
6293 proto::tree_diff_status::Status::Deleted => {
6294 TreeDiffStatus::Deleted {
6295 old: git::Oid::from_str(
6296 &entry.oid.context("missing oid").log_err()?,
6297 )
6298 .log_err()?,
6299 }
6300 }
6301 };
6302 Some((
6303 RepoPath::from_rel_path(
6304 &RelPath::from_proto(&entry.path).log_err()?,
6305 ),
6306 status,
6307 ))
6308 })
6309 .collect();
6310
6311 Ok(TreeDiff { entries })
6312 }
6313 }
6314 })
6315 }
6316
6317 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6318 let id = self.id;
6319 self.send_job(None, move |repo, _cx| async move {
6320 match repo {
6321 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6322 backend.diff(diff_type).await
6323 }
6324 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6325 let (proto_diff_type, merge_base_ref) = match &diff_type {
6326 DiffType::HeadToIndex => {
6327 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6328 }
6329 DiffType::HeadToWorktree => {
6330 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6331 }
6332 DiffType::MergeBase { base_ref } => (
6333 proto::git_diff::DiffType::MergeBase.into(),
6334 Some(base_ref.to_string()),
6335 ),
6336 };
6337 let response = client
6338 .request(proto::GitDiff {
6339 project_id: project_id.0,
6340 repository_id: id.to_proto(),
6341 diff_type: proto_diff_type,
6342 merge_base_ref,
6343 })
6344 .await?;
6345
6346 Ok(response.diff)
6347 }
6348 }
6349 })
6350 }
6351
6352 pub fn create_branch(
6353 &mut self,
6354 branch_name: String,
6355 base_branch: Option<String>,
6356 ) -> oneshot::Receiver<Result<()>> {
6357 let id = self.id;
6358 let status_msg = if let Some(ref base) = base_branch {
6359 format!("git switch -c {branch_name} {base}").into()
6360 } else {
6361 format!("git switch -c {branch_name}").into()
6362 };
6363 self.send_job(Some(status_msg), move |repo, _cx| async move {
6364 match repo {
6365 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6366 backend.create_branch(branch_name, base_branch).await
6367 }
6368 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6369 client
6370 .request(proto::GitCreateBranch {
6371 project_id: project_id.0,
6372 repository_id: id.to_proto(),
6373 branch_name,
6374 })
6375 .await?;
6376
6377 Ok(())
6378 }
6379 }
6380 })
6381 }
6382
6383 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6384 let id = self.id;
6385 self.send_job(
6386 Some(format!("git switch {branch_name}").into()),
6387 move |repo, _cx| async move {
6388 match repo {
6389 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6390 backend.change_branch(branch_name).await
6391 }
6392 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6393 client
6394 .request(proto::GitChangeBranch {
6395 project_id: project_id.0,
6396 repository_id: id.to_proto(),
6397 branch_name,
6398 })
6399 .await?;
6400
6401 Ok(())
6402 }
6403 }
6404 },
6405 )
6406 }
6407
6408 pub fn delete_branch(
6409 &mut self,
6410 is_remote: bool,
6411 branch_name: String,
6412 ) -> oneshot::Receiver<Result<()>> {
6413 let id = self.id;
6414 self.send_job(
6415 Some(
6416 format!(
6417 "git branch {} {}",
6418 if is_remote { "-dr" } else { "-d" },
6419 branch_name
6420 )
6421 .into(),
6422 ),
6423 move |repo, _cx| async move {
6424 match repo {
6425 RepositoryState::Local(state) => {
6426 state.backend.delete_branch(is_remote, branch_name).await
6427 }
6428 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6429 client
6430 .request(proto::GitDeleteBranch {
6431 project_id: project_id.0,
6432 repository_id: id.to_proto(),
6433 is_remote,
6434 branch_name,
6435 })
6436 .await?;
6437
6438 Ok(())
6439 }
6440 }
6441 },
6442 )
6443 }
6444
6445 pub fn rename_branch(
6446 &mut self,
6447 branch: String,
6448 new_name: String,
6449 ) -> oneshot::Receiver<Result<()>> {
6450 let id = self.id;
6451 self.send_job(
6452 Some(format!("git branch -m {branch} {new_name}").into()),
6453 move |repo, _cx| async move {
6454 match repo {
6455 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6456 backend.rename_branch(branch, new_name).await
6457 }
6458 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6459 client
6460 .request(proto::GitRenameBranch {
6461 project_id: project_id.0,
6462 repository_id: id.to_proto(),
6463 branch,
6464 new_name,
6465 })
6466 .await?;
6467
6468 Ok(())
6469 }
6470 }
6471 },
6472 )
6473 }
6474
6475 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6476 let id = self.id;
6477 self.send_job(None, move |repo, _cx| async move {
6478 match repo {
6479 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6480 backend.check_for_pushed_commit().await
6481 }
6482 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6483 let response = client
6484 .request(proto::CheckForPushedCommits {
6485 project_id: project_id.0,
6486 repository_id: id.to_proto(),
6487 })
6488 .await?;
6489
6490 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6491
6492 Ok(branches)
6493 }
6494 }
6495 })
6496 }
6497
6498 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6499 let id = self.id;
6500 self.send_job(None, move |repo, _cx| async move {
6501 match repo {
6502 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6503 backend.checkpoint().await
6504 }
6505 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6506 let response = client
6507 .request(proto::GitCreateCheckpoint {
6508 project_id: project_id.0,
6509 repository_id: id.to_proto(),
6510 })
6511 .await?;
6512
6513 Ok(GitRepositoryCheckpoint {
6514 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6515 })
6516 }
6517 }
6518 })
6519 }
6520
6521 pub fn restore_checkpoint(
6522 &mut self,
6523 checkpoint: GitRepositoryCheckpoint,
6524 ) -> oneshot::Receiver<Result<()>> {
6525 let id = self.id;
6526 self.send_job(None, move |repo, _cx| async move {
6527 match repo {
6528 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6529 backend.restore_checkpoint(checkpoint).await
6530 }
6531 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6532 client
6533 .request(proto::GitRestoreCheckpoint {
6534 project_id: project_id.0,
6535 repository_id: id.to_proto(),
6536 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6537 })
6538 .await?;
6539 Ok(())
6540 }
6541 }
6542 })
6543 }
6544
6545 pub(crate) fn apply_remote_update(
6546 &mut self,
6547 update: proto::UpdateRepository,
6548 cx: &mut Context<Self>,
6549 ) -> Result<()> {
6550 if let Some(main_path) = &update.original_repo_abs_path {
6551 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6552 }
6553
6554 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6555 let new_head_commit = update
6556 .head_commit_details
6557 .as_ref()
6558 .map(proto_to_commit_details);
6559 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6560 cx.emit(RepositoryEvent::HeadChanged)
6561 }
6562 self.snapshot.branch = new_branch;
6563 self.snapshot.head_commit = new_head_commit;
6564
6565 // We don't store any merge head state for downstream projects; the upstream
6566 // will track it and we will just get the updated conflicts
6567 let new_merge_heads = TreeMap::from_ordered_entries(
6568 update
6569 .current_merge_conflicts
6570 .into_iter()
6571 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6572 );
6573 let conflicts_changed =
6574 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6575 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6576 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6577 let new_stash_entries = GitStash {
6578 entries: update
6579 .stash_entries
6580 .iter()
6581 .filter_map(|entry| proto_to_stash(entry).ok())
6582 .collect(),
6583 };
6584 if self.snapshot.stash_entries != new_stash_entries {
6585 cx.emit(RepositoryEvent::StashEntriesChanged)
6586 }
6587 self.snapshot.stash_entries = new_stash_entries;
6588 let new_linked_worktrees: Arc<[GitWorktree]> = update
6589 .linked_worktrees
6590 .iter()
6591 .map(proto_to_worktree)
6592 .collect();
6593 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6594 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6595 }
6596 self.snapshot.linked_worktrees = new_linked_worktrees;
6597 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6598 self.snapshot.remote_origin_url = update.remote_origin_url;
6599
6600 let edits = update
6601 .removed_statuses
6602 .into_iter()
6603 .filter_map(|path| {
6604 Some(sum_tree::Edit::Remove(PathKey(
6605 RelPath::from_proto(&path).log_err()?,
6606 )))
6607 })
6608 .chain(
6609 update
6610 .updated_statuses
6611 .into_iter()
6612 .filter_map(|updated_status| {
6613 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6614 }),
6615 )
6616 .collect::<Vec<_>>();
6617 if conflicts_changed || !edits.is_empty() {
6618 cx.emit(RepositoryEvent::StatusesChanged);
6619 }
6620 self.snapshot.statuses_by_path.edit(edits, ());
6621
6622 if update.is_last_update {
6623 self.snapshot.scan_id = update.scan_id;
6624 }
6625 self.clear_pending_ops(cx);
6626 Ok(())
6627 }
6628
6629 pub fn compare_checkpoints(
6630 &mut self,
6631 left: GitRepositoryCheckpoint,
6632 right: GitRepositoryCheckpoint,
6633 ) -> oneshot::Receiver<Result<bool>> {
6634 let id = self.id;
6635 self.send_job(None, move |repo, _cx| async move {
6636 match repo {
6637 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6638 backend.compare_checkpoints(left, right).await
6639 }
6640 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6641 let response = client
6642 .request(proto::GitCompareCheckpoints {
6643 project_id: project_id.0,
6644 repository_id: id.to_proto(),
6645 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6646 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6647 })
6648 .await?;
6649 Ok(response.equal)
6650 }
6651 }
6652 })
6653 }
6654
6655 pub fn diff_checkpoints(
6656 &mut self,
6657 base_checkpoint: GitRepositoryCheckpoint,
6658 target_checkpoint: GitRepositoryCheckpoint,
6659 ) -> oneshot::Receiver<Result<String>> {
6660 let id = self.id;
6661 self.send_job(None, move |repo, _cx| async move {
6662 match repo {
6663 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6664 backend
6665 .diff_checkpoints(base_checkpoint, target_checkpoint)
6666 .await
6667 }
6668 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6669 let response = client
6670 .request(proto::GitDiffCheckpoints {
6671 project_id: project_id.0,
6672 repository_id: id.to_proto(),
6673 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6674 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6675 })
6676 .await?;
6677 Ok(response.diff)
6678 }
6679 }
6680 })
6681 }
6682
6683 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6684 let updated = SumTree::from_iter(
6685 self.pending_ops.iter().filter_map(|ops| {
6686 let inner_ops: Vec<PendingOp> =
6687 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6688 if inner_ops.is_empty() {
6689 None
6690 } else {
6691 Some(PendingOps {
6692 repo_path: ops.repo_path.clone(),
6693 ops: inner_ops,
6694 })
6695 }
6696 }),
6697 (),
6698 );
6699
6700 if updated != self.pending_ops {
6701 cx.emit(RepositoryEvent::PendingOpsChanged {
6702 pending_ops: self.pending_ops.clone(),
6703 })
6704 }
6705
6706 self.pending_ops = updated;
6707 }
6708
6709 fn schedule_scan(
6710 &mut self,
6711 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6712 cx: &mut Context<Self>,
6713 ) {
6714 let this = cx.weak_entity();
6715 let _ = self.send_keyed_job(
6716 Some(GitJobKey::ReloadGitState),
6717 None,
6718 |state, mut cx| async move {
6719 log::debug!("run scheduled git status scan");
6720
6721 let Some(this) = this.upgrade() else {
6722 return Ok(());
6723 };
6724 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6725 bail!("not a local repository")
6726 };
6727 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6728 this.update(&mut cx, |this, cx| {
6729 this.clear_pending_ops(cx);
6730 });
6731 if let Some(updates_tx) = updates_tx {
6732 updates_tx
6733 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6734 .ok();
6735 }
6736 Ok(())
6737 },
6738 );
6739 }
6740
6741 fn spawn_local_git_worker(
6742 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6743 cx: &mut Context<Self>,
6744 ) -> mpsc::UnboundedSender<GitJob> {
6745 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6746
6747 cx.spawn(async move |_, cx| {
6748 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6749 if let Some(git_hosting_provider_registry) =
6750 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6751 {
6752 git_hosting_providers::register_additional_providers(
6753 git_hosting_provider_registry,
6754 state.backend.clone(),
6755 )
6756 .await;
6757 }
6758 let state = RepositoryState::Local(state);
6759 let mut jobs = VecDeque::new();
6760 loop {
6761 while let Ok(next_job) = job_rx.try_recv() {
6762 jobs.push_back(next_job);
6763 }
6764
6765 if let Some(job) = jobs.pop_front() {
6766 if let Some(current_key) = &job.key
6767 && jobs
6768 .iter()
6769 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6770 {
6771 continue;
6772 }
6773 (job.job)(state.clone(), cx).await;
6774 } else if let Some(job) = job_rx.next().await {
6775 jobs.push_back(job);
6776 } else {
6777 break;
6778 }
6779 }
6780 anyhow::Ok(())
6781 })
6782 .detach_and_log_err(cx);
6783
6784 job_tx
6785 }
6786
6787 fn spawn_remote_git_worker(
6788 state: RemoteRepositoryState,
6789 cx: &mut Context<Self>,
6790 ) -> mpsc::UnboundedSender<GitJob> {
6791 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6792
6793 cx.spawn(async move |_, cx| {
6794 let state = RepositoryState::Remote(state);
6795 let mut jobs = VecDeque::new();
6796 loop {
6797 while let Ok(next_job) = job_rx.try_recv() {
6798 jobs.push_back(next_job);
6799 }
6800
6801 if let Some(job) = jobs.pop_front() {
6802 if let Some(current_key) = &job.key
6803 && jobs
6804 .iter()
6805 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6806 {
6807 continue;
6808 }
6809 (job.job)(state.clone(), cx).await;
6810 } else if let Some(job) = job_rx.next().await {
6811 jobs.push_back(job);
6812 } else {
6813 break;
6814 }
6815 }
6816 anyhow::Ok(())
6817 })
6818 .detach_and_log_err(cx);
6819
6820 job_tx
6821 }
6822
6823 fn load_staged_text(
6824 &mut self,
6825 buffer_id: BufferId,
6826 repo_path: RepoPath,
6827 cx: &App,
6828 ) -> Task<Result<Option<String>>> {
6829 let rx = self.send_job(None, move |state, _| async move {
6830 match state {
6831 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6832 anyhow::Ok(backend.load_index_text(repo_path).await)
6833 }
6834 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6835 let response = client
6836 .request(proto::OpenUnstagedDiff {
6837 project_id: project_id.to_proto(),
6838 buffer_id: buffer_id.to_proto(),
6839 })
6840 .await?;
6841 Ok(response.staged_text)
6842 }
6843 }
6844 });
6845 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6846 }
6847
6848 fn load_committed_text(
6849 &mut self,
6850 buffer_id: BufferId,
6851 repo_path: RepoPath,
6852 cx: &App,
6853 ) -> Task<Result<DiffBasesChange>> {
6854 let rx = self.send_job(None, move |state, _| async move {
6855 match state {
6856 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6857 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6858 let staged_text = backend.load_index_text(repo_path).await;
6859 let diff_bases_change = if committed_text == staged_text {
6860 DiffBasesChange::SetBoth(committed_text)
6861 } else {
6862 DiffBasesChange::SetEach {
6863 index: staged_text,
6864 head: committed_text,
6865 }
6866 };
6867 anyhow::Ok(diff_bases_change)
6868 }
6869 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6870 use proto::open_uncommitted_diff_response::Mode;
6871
6872 let response = client
6873 .request(proto::OpenUncommittedDiff {
6874 project_id: project_id.to_proto(),
6875 buffer_id: buffer_id.to_proto(),
6876 })
6877 .await?;
6878 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6879 let bases = match mode {
6880 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6881 Mode::IndexAndHead => DiffBasesChange::SetEach {
6882 head: response.committed_text,
6883 index: response.staged_text,
6884 },
6885 };
6886 Ok(bases)
6887 }
6888 }
6889 });
6890
6891 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6892 }
6893
6894 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6895 let repository_id = self.snapshot.id;
6896 let rx = self.send_job(None, move |state, _| async move {
6897 match state {
6898 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6899 backend.load_blob_content(oid).await
6900 }
6901 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6902 let response = client
6903 .request(proto::GetBlobContent {
6904 project_id: project_id.to_proto(),
6905 repository_id: repository_id.0,
6906 oid: oid.to_string(),
6907 })
6908 .await?;
6909 Ok(response.content)
6910 }
6911 }
6912 });
6913 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6914 }
6915
6916 fn paths_changed(
6917 &mut self,
6918 paths: Vec<RepoPath>,
6919 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6920 cx: &mut Context<Self>,
6921 ) {
6922 if !paths.is_empty() {
6923 self.paths_needing_status_update.push(paths);
6924 }
6925
6926 let this = cx.weak_entity();
6927 let _ = self.send_keyed_job(
6928 Some(GitJobKey::RefreshStatuses),
6929 None,
6930 |state, mut cx| async move {
6931 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6932 (
6933 this.snapshot.clone(),
6934 mem::take(&mut this.paths_needing_status_update),
6935 )
6936 })?;
6937 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6938 bail!("not a local repository")
6939 };
6940
6941 if changed_paths.is_empty() {
6942 return Ok(());
6943 }
6944
6945 let has_head = prev_snapshot.head_commit.is_some();
6946
6947 let stash_entries = backend.stash_entries().await?;
6948 let changed_path_statuses = cx
6949 .background_spawn(async move {
6950 let mut changed_paths =
6951 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6952 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6953
6954 let status_task = backend.status(&changed_paths_vec);
6955 let diff_stat_future = if has_head {
6956 backend.diff_stat(&changed_paths_vec)
6957 } else {
6958 future::ready(Ok(status::GitDiffStat {
6959 entries: Arc::default(),
6960 }))
6961 .boxed()
6962 };
6963
6964 let (statuses, diff_stats) =
6965 futures::future::try_join(status_task, diff_stat_future).await?;
6966
6967 let diff_stats: HashMap<RepoPath, DiffStat> =
6968 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6969
6970 let mut changed_path_statuses = Vec::new();
6971 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6972 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6973
6974 for (repo_path, status) in &*statuses.entries {
6975 let current_diff_stat = diff_stats.get(repo_path).copied();
6976
6977 changed_paths.remove(repo_path);
6978 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6979 && cursor.item().is_some_and(|entry| {
6980 entry.status == *status && entry.diff_stat == current_diff_stat
6981 })
6982 {
6983 continue;
6984 }
6985
6986 changed_path_statuses.push(Edit::Insert(StatusEntry {
6987 repo_path: repo_path.clone(),
6988 status: *status,
6989 diff_stat: current_diff_stat,
6990 }));
6991 }
6992 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6993 for path in changed_paths.into_iter() {
6994 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6995 changed_path_statuses
6996 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6997 }
6998 }
6999 anyhow::Ok(changed_path_statuses)
7000 })
7001 .await?;
7002
7003 this.update(&mut cx, |this, cx| {
7004 if this.snapshot.stash_entries != stash_entries {
7005 cx.emit(RepositoryEvent::StashEntriesChanged);
7006 this.snapshot.stash_entries = stash_entries;
7007 }
7008
7009 if !changed_path_statuses.is_empty() {
7010 cx.emit(RepositoryEvent::StatusesChanged);
7011 this.snapshot
7012 .statuses_by_path
7013 .edit(changed_path_statuses, ());
7014 this.snapshot.scan_id += 1;
7015 }
7016
7017 if let Some(updates_tx) = updates_tx {
7018 updates_tx
7019 .unbounded_send(DownstreamUpdate::UpdateRepository(
7020 this.snapshot.clone(),
7021 ))
7022 .ok();
7023 }
7024 })
7025 },
7026 );
7027 }
7028
7029 /// currently running git command and when it started
7030 pub fn current_job(&self) -> Option<JobInfo> {
7031 self.active_jobs.values().next().cloned()
7032 }
7033
7034 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7035 self.send_job(None, |_, _| async {})
7036 }
7037
7038 fn spawn_job_with_tracking<AsyncFn>(
7039 &mut self,
7040 paths: Vec<RepoPath>,
7041 git_status: pending_op::GitStatus,
7042 cx: &mut Context<Self>,
7043 f: AsyncFn,
7044 ) -> Task<Result<()>>
7045 where
7046 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7047 {
7048 let ids = self.new_pending_ops_for_paths(paths, git_status);
7049
7050 cx.spawn(async move |this, cx| {
7051 let (job_status, result) = match f(this.clone(), cx).await {
7052 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7053 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7054 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7055 };
7056
7057 this.update(cx, |this, _| {
7058 let mut edits = Vec::with_capacity(ids.len());
7059 for (id, entry) in ids {
7060 if let Some(mut ops) = this
7061 .pending_ops
7062 .get(&PathKey(entry.as_ref().clone()), ())
7063 .cloned()
7064 {
7065 if let Some(op) = ops.op_by_id_mut(id) {
7066 op.job_status = job_status;
7067 }
7068 edits.push(sum_tree::Edit::Insert(ops));
7069 }
7070 }
7071 this.pending_ops.edit(edits, ());
7072 })?;
7073
7074 result
7075 })
7076 }
7077
7078 fn new_pending_ops_for_paths(
7079 &mut self,
7080 paths: Vec<RepoPath>,
7081 git_status: pending_op::GitStatus,
7082 ) -> Vec<(PendingOpId, RepoPath)> {
7083 let mut edits = Vec::with_capacity(paths.len());
7084 let mut ids = Vec::with_capacity(paths.len());
7085 for path in paths {
7086 let mut ops = self
7087 .pending_ops
7088 .get(&PathKey(path.as_ref().clone()), ())
7089 .cloned()
7090 .unwrap_or_else(|| PendingOps::new(&path));
7091 let id = ops.max_id() + 1;
7092 ops.ops.push(PendingOp {
7093 id,
7094 git_status,
7095 job_status: pending_op::JobStatus::Running,
7096 });
7097 edits.push(sum_tree::Edit::Insert(ops));
7098 ids.push((id, path));
7099 }
7100 self.pending_ops.edit(edits, ());
7101 ids
7102 }
7103 pub fn default_remote_url(&self) -> Option<String> {
7104 self.remote_upstream_url
7105 .clone()
7106 .or(self.remote_origin_url.clone())
7107 }
7108}
7109
7110/// If `path` is a git linked worktree checkout, resolves it to the main
7111/// repository's working directory path. Returns `None` if `path` is a normal
7112/// repository, not a git repo, or if resolution fails.
7113///
7114/// Resolution works by:
7115/// 1. Reading the `.git` file to get the `gitdir:` pointer
7116/// 2. Following that to the worktree-specific git directory
7117/// 3. Reading the `commondir` file to find the shared `.git` directory
7118/// 4. Deriving the main repo's working directory from the common dir
7119pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7120 let dot_git = path.join(".git");
7121 let metadata = fs.metadata(&dot_git).await.ok()??;
7122 if metadata.is_dir {
7123 return None; // Normal repo, not a linked worktree
7124 }
7125 // It's a .git file — parse the gitdir: pointer
7126 let content = fs.load(&dot_git).await.ok()?;
7127 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7128 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7129 // Read commondir to find the main .git directory
7130 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7131 let common_dir = fs
7132 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7133 .await
7134 .ok()?;
7135 Some(git::repository::original_repo_path_from_common_dir(
7136 &common_dir,
7137 ))
7138}
7139
7140/// Validates that the resolved worktree directory is acceptable:
7141/// - The setting must not be an absolute path.
7142/// - The resolved path must be either a subdirectory of the working
7143/// directory or a subdirectory of its parent (i.e., a sibling).
7144///
7145/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7146pub fn worktrees_directory_for_repo(
7147 original_repo_abs_path: &Path,
7148 worktree_directory_setting: &str,
7149) -> Result<PathBuf> {
7150 // Check the original setting before trimming, since a path like "///"
7151 // is absolute but becomes "" after stripping trailing separators.
7152 // Also check for leading `/` or `\` explicitly, because on Windows
7153 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7154 // would slip through even though it's clearly not a relative path.
7155 if Path::new(worktree_directory_setting).is_absolute()
7156 || worktree_directory_setting.starts_with('/')
7157 || worktree_directory_setting.starts_with('\\')
7158 {
7159 anyhow::bail!(
7160 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7161 );
7162 }
7163
7164 if worktree_directory_setting.is_empty() {
7165 anyhow::bail!("git.worktree_directory must not be empty");
7166 }
7167
7168 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7169 if trimmed == ".." {
7170 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7171 }
7172
7173 let joined = original_repo_abs_path.join(trimmed);
7174 let resolved = util::normalize_path(&joined);
7175 let resolved = if resolved.starts_with(original_repo_abs_path) {
7176 resolved
7177 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7178 resolved.join(repo_dir_name)
7179 } else {
7180 resolved
7181 };
7182
7183 let parent = original_repo_abs_path
7184 .parent()
7185 .unwrap_or(original_repo_abs_path);
7186
7187 if !resolved.starts_with(parent) {
7188 anyhow::bail!(
7189 "git.worktree_directory resolved to {resolved:?}, which is outside \
7190 the project root and its parent directory. It must resolve to a \
7191 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7192 );
7193 }
7194
7195 Ok(resolved)
7196}
7197
7198/// Returns a short name for a linked worktree suitable for UI display
7199///
7200/// Uses the main worktree path to come up with a short name that disambiguates
7201/// the linked worktree from the main worktree.
7202pub fn linked_worktree_short_name(
7203 main_worktree_path: &Path,
7204 linked_worktree_path: &Path,
7205) -> Option<SharedString> {
7206 if main_worktree_path == linked_worktree_path {
7207 return None;
7208 }
7209
7210 let project_name = main_worktree_path.file_name()?.to_str()?;
7211 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7212 let name = if directory_name != project_name {
7213 directory_name.to_string()
7214 } else {
7215 linked_worktree_path
7216 .parent()?
7217 .file_name()?
7218 .to_str()?
7219 .to_string()
7220 };
7221 Some(name.into())
7222}
7223
7224fn get_permalink_in_rust_registry_src(
7225 provider_registry: Arc<GitHostingProviderRegistry>,
7226 path: PathBuf,
7227 selection: Range<u32>,
7228) -> Result<url::Url> {
7229 #[derive(Deserialize)]
7230 struct CargoVcsGit {
7231 sha1: String,
7232 }
7233
7234 #[derive(Deserialize)]
7235 struct CargoVcsInfo {
7236 git: CargoVcsGit,
7237 path_in_vcs: String,
7238 }
7239
7240 #[derive(Deserialize)]
7241 struct CargoPackage {
7242 repository: String,
7243 }
7244
7245 #[derive(Deserialize)]
7246 struct CargoToml {
7247 package: CargoPackage,
7248 }
7249
7250 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7251 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7252 Some((dir, json))
7253 }) else {
7254 bail!("No .cargo_vcs_info.json found in parent directories")
7255 };
7256 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7257 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7258 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7259 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7260 .context("parsing package.repository field of manifest")?;
7261 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7262 let permalink = provider.build_permalink(
7263 remote,
7264 BuildPermalinkParams::new(
7265 &cargo_vcs_info.git.sha1,
7266 &RepoPath::from_rel_path(
7267 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7268 ),
7269 Some(selection),
7270 ),
7271 );
7272 Ok(permalink)
7273}
7274
7275fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7276 let Some(blame) = blame else {
7277 return proto::BlameBufferResponse {
7278 blame_response: None,
7279 };
7280 };
7281
7282 let entries = blame
7283 .entries
7284 .into_iter()
7285 .map(|entry| proto::BlameEntry {
7286 sha: entry.sha.as_bytes().into(),
7287 start_line: entry.range.start,
7288 end_line: entry.range.end,
7289 original_line_number: entry.original_line_number,
7290 author: entry.author,
7291 author_mail: entry.author_mail,
7292 author_time: entry.author_time,
7293 author_tz: entry.author_tz,
7294 committer: entry.committer_name,
7295 committer_mail: entry.committer_email,
7296 committer_time: entry.committer_time,
7297 committer_tz: entry.committer_tz,
7298 summary: entry.summary,
7299 previous: entry.previous,
7300 filename: entry.filename,
7301 })
7302 .collect::<Vec<_>>();
7303
7304 let messages = blame
7305 .messages
7306 .into_iter()
7307 .map(|(oid, message)| proto::CommitMessage {
7308 oid: oid.as_bytes().into(),
7309 message,
7310 })
7311 .collect::<Vec<_>>();
7312
7313 proto::BlameBufferResponse {
7314 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7315 }
7316}
7317
7318fn deserialize_blame_buffer_response(
7319 response: proto::BlameBufferResponse,
7320) -> Option<git::blame::Blame> {
7321 let response = response.blame_response?;
7322 let entries = response
7323 .entries
7324 .into_iter()
7325 .filter_map(|entry| {
7326 Some(git::blame::BlameEntry {
7327 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7328 range: entry.start_line..entry.end_line,
7329 original_line_number: entry.original_line_number,
7330 committer_name: entry.committer,
7331 committer_time: entry.committer_time,
7332 committer_tz: entry.committer_tz,
7333 committer_email: entry.committer_mail,
7334 author: entry.author,
7335 author_mail: entry.author_mail,
7336 author_time: entry.author_time,
7337 author_tz: entry.author_tz,
7338 summary: entry.summary,
7339 previous: entry.previous,
7340 filename: entry.filename,
7341 })
7342 })
7343 .collect::<Vec<_>>();
7344
7345 let messages = response
7346 .messages
7347 .into_iter()
7348 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7349 .collect::<HashMap<_, _>>();
7350
7351 Some(Blame { entries, messages })
7352}
7353
7354fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7355 proto::Branch {
7356 is_head: branch.is_head,
7357 ref_name: branch.ref_name.to_string(),
7358 unix_timestamp: branch
7359 .most_recent_commit
7360 .as_ref()
7361 .map(|commit| commit.commit_timestamp as u64),
7362 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7363 ref_name: upstream.ref_name.to_string(),
7364 tracking: upstream
7365 .tracking
7366 .status()
7367 .map(|upstream| proto::UpstreamTracking {
7368 ahead: upstream.ahead as u64,
7369 behind: upstream.behind as u64,
7370 }),
7371 }),
7372 most_recent_commit: branch
7373 .most_recent_commit
7374 .as_ref()
7375 .map(|commit| proto::CommitSummary {
7376 sha: commit.sha.to_string(),
7377 subject: commit.subject.to_string(),
7378 commit_timestamp: commit.commit_timestamp,
7379 author_name: commit.author_name.to_string(),
7380 }),
7381 }
7382}
7383
7384fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7385 proto::Worktree {
7386 path: worktree.path.to_string_lossy().to_string(),
7387 ref_name: worktree
7388 .ref_name
7389 .as_ref()
7390 .map(|s| s.to_string())
7391 .unwrap_or_default(),
7392 sha: worktree.sha.to_string(),
7393 is_main: worktree.is_main,
7394 }
7395}
7396
7397fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7398 git::repository::Worktree {
7399 path: PathBuf::from(proto.path.clone()),
7400 ref_name: Some(SharedString::from(&proto.ref_name)),
7401 sha: proto.sha.clone().into(),
7402 is_main: proto.is_main,
7403 }
7404}
7405
7406fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7407 git::repository::Branch {
7408 is_head: proto.is_head,
7409 ref_name: proto.ref_name.clone().into(),
7410 upstream: proto
7411 .upstream
7412 .as_ref()
7413 .map(|upstream| git::repository::Upstream {
7414 ref_name: upstream.ref_name.to_string().into(),
7415 tracking: upstream
7416 .tracking
7417 .as_ref()
7418 .map(|tracking| {
7419 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7420 ahead: tracking.ahead as u32,
7421 behind: tracking.behind as u32,
7422 })
7423 })
7424 .unwrap_or(git::repository::UpstreamTracking::Gone),
7425 }),
7426 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7427 git::repository::CommitSummary {
7428 sha: commit.sha.to_string().into(),
7429 subject: commit.subject.to_string().into(),
7430 commit_timestamp: commit.commit_timestamp,
7431 author_name: commit.author_name.to_string().into(),
7432 has_parent: true,
7433 }
7434 }),
7435 }
7436}
7437
7438fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7439 proto::GitCommitDetails {
7440 sha: commit.sha.to_string(),
7441 message: commit.message.to_string(),
7442 commit_timestamp: commit.commit_timestamp,
7443 author_email: commit.author_email.to_string(),
7444 author_name: commit.author_name.to_string(),
7445 }
7446}
7447
7448fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7449 CommitDetails {
7450 sha: proto.sha.clone().into(),
7451 message: proto.message.clone().into(),
7452 commit_timestamp: proto.commit_timestamp,
7453 author_email: proto.author_email.clone().into(),
7454 author_name: proto.author_name.clone().into(),
7455 }
7456}
7457
7458/// This snapshot computes the repository state on the foreground thread while
7459/// running the git commands on the background thread. We update branch, head,
7460/// remotes, and worktrees first so the UI can react sooner, then compute file
7461/// state and emit those events immediately after.
7462async fn compute_snapshot(
7463 this: Entity<Repository>,
7464 backend: Arc<dyn GitRepository>,
7465 cx: &mut AsyncApp,
7466) -> Result<RepositorySnapshot> {
7467 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7468 this.paths_needing_status_update.clear();
7469 (
7470 this.id,
7471 this.work_directory_abs_path.clone(),
7472 this.snapshot.clone(),
7473 )
7474 });
7475
7476 let head_commit_future = {
7477 let backend = backend.clone();
7478 async move {
7479 Ok(match backend.head_sha().await {
7480 Some(head_sha) => backend.show(head_sha).await.log_err(),
7481 None => None,
7482 })
7483 }
7484 };
7485 let (branches, head_commit, all_worktrees) = cx
7486 .background_spawn({
7487 let backend = backend.clone();
7488 async move {
7489 futures::future::try_join3(
7490 backend.branches(),
7491 head_commit_future,
7492 backend.worktrees(),
7493 )
7494 .await
7495 }
7496 })
7497 .await?;
7498 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7499 let branch_list: Arc<[Branch]> = branches.into();
7500
7501 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7502 .into_iter()
7503 .filter(|wt| wt.path != *work_directory_abs_path)
7504 .collect();
7505
7506 let (remote_origin_url, remote_upstream_url) = cx
7507 .background_spawn({
7508 let backend = backend.clone();
7509 async move {
7510 Ok::<_, anyhow::Error>(
7511 futures::future::join(
7512 backend.remote_url("origin"),
7513 backend.remote_url("upstream"),
7514 )
7515 .await,
7516 )
7517 }
7518 })
7519 .await?;
7520
7521 let snapshot = this.update(cx, |this, cx| {
7522 let head_changed =
7523 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7524 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7525 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7526
7527 this.snapshot = RepositorySnapshot {
7528 id,
7529 work_directory_abs_path,
7530 branch,
7531 branch_list: branch_list.clone(),
7532 head_commit,
7533 remote_origin_url,
7534 remote_upstream_url,
7535 linked_worktrees,
7536 scan_id: prev_snapshot.scan_id + 1,
7537 ..prev_snapshot
7538 };
7539
7540 if head_changed {
7541 cx.emit(RepositoryEvent::HeadChanged);
7542 }
7543
7544 if branch_list_changed {
7545 cx.emit(RepositoryEvent::BranchListChanged);
7546 }
7547
7548 if worktrees_changed {
7549 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7550 }
7551
7552 this.snapshot.clone()
7553 });
7554
7555 let (statuses, diff_stats, stash_entries) = cx
7556 .background_spawn({
7557 let backend = backend.clone();
7558 let snapshot = snapshot.clone();
7559 async move {
7560 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7561 if snapshot.head_commit.is_some() {
7562 backend.diff_stat(&[])
7563 } else {
7564 future::ready(Ok(status::GitDiffStat {
7565 entries: Arc::default(),
7566 }))
7567 .boxed()
7568 };
7569 futures::future::try_join3(
7570 backend.status(&[RepoPath::from_rel_path(
7571 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7572 )]),
7573 diff_stat_future,
7574 backend.stash_entries(),
7575 )
7576 .await
7577 }
7578 })
7579 .await?;
7580
7581 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7582 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7583 let mut conflicted_paths = Vec::new();
7584 let statuses_by_path = SumTree::from_iter(
7585 statuses.entries.iter().map(|(repo_path, status)| {
7586 if status.is_conflicted() {
7587 conflicted_paths.push(repo_path.clone());
7588 }
7589 StatusEntry {
7590 repo_path: repo_path.clone(),
7591 status: *status,
7592 diff_stat: diff_stat_map.get(repo_path).copied(),
7593 }
7594 }),
7595 (),
7596 );
7597
7598 let merge_details = cx
7599 .background_spawn({
7600 let backend = backend.clone();
7601 let mut merge_details = snapshot.merge.clone();
7602 async move {
7603 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7604 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7605 }
7606 })
7607 .await?;
7608 let (merge_details, conflicts_changed) = merge_details;
7609 log::debug!("new merge details: {merge_details:?}");
7610
7611 Ok(this.update(cx, |this, cx| {
7612 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7613 cx.emit(RepositoryEvent::StatusesChanged);
7614 }
7615 if stash_entries != this.snapshot.stash_entries {
7616 cx.emit(RepositoryEvent::StashEntriesChanged);
7617 }
7618
7619 this.snapshot.scan_id += 1;
7620 this.snapshot.merge = merge_details;
7621 this.snapshot.statuses_by_path = statuses_by_path;
7622 this.snapshot.stash_entries = stash_entries;
7623
7624 this.snapshot.clone()
7625 }))
7626}
7627
7628fn status_from_proto(
7629 simple_status: i32,
7630 status: Option<proto::GitFileStatus>,
7631) -> anyhow::Result<FileStatus> {
7632 use proto::git_file_status::Variant;
7633
7634 let Some(variant) = status.and_then(|status| status.variant) else {
7635 let code = proto::GitStatus::from_i32(simple_status)
7636 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7637 let result = match code {
7638 proto::GitStatus::Added => TrackedStatus {
7639 worktree_status: StatusCode::Added,
7640 index_status: StatusCode::Unmodified,
7641 }
7642 .into(),
7643 proto::GitStatus::Modified => TrackedStatus {
7644 worktree_status: StatusCode::Modified,
7645 index_status: StatusCode::Unmodified,
7646 }
7647 .into(),
7648 proto::GitStatus::Conflict => UnmergedStatus {
7649 first_head: UnmergedStatusCode::Updated,
7650 second_head: UnmergedStatusCode::Updated,
7651 }
7652 .into(),
7653 proto::GitStatus::Deleted => TrackedStatus {
7654 worktree_status: StatusCode::Deleted,
7655 index_status: StatusCode::Unmodified,
7656 }
7657 .into(),
7658 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7659 };
7660 return Ok(result);
7661 };
7662
7663 let result = match variant {
7664 Variant::Untracked(_) => FileStatus::Untracked,
7665 Variant::Ignored(_) => FileStatus::Ignored,
7666 Variant::Unmerged(unmerged) => {
7667 let [first_head, second_head] =
7668 [unmerged.first_head, unmerged.second_head].map(|head| {
7669 let code = proto::GitStatus::from_i32(head)
7670 .with_context(|| format!("Invalid git status code: {head}"))?;
7671 let result = match code {
7672 proto::GitStatus::Added => UnmergedStatusCode::Added,
7673 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7674 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7675 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7676 };
7677 Ok(result)
7678 });
7679 let [first_head, second_head] = [first_head?, second_head?];
7680 UnmergedStatus {
7681 first_head,
7682 second_head,
7683 }
7684 .into()
7685 }
7686 Variant::Tracked(tracked) => {
7687 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7688 .map(|status| {
7689 let code = proto::GitStatus::from_i32(status)
7690 .with_context(|| format!("Invalid git status code: {status}"))?;
7691 let result = match code {
7692 proto::GitStatus::Modified => StatusCode::Modified,
7693 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7694 proto::GitStatus::Added => StatusCode::Added,
7695 proto::GitStatus::Deleted => StatusCode::Deleted,
7696 proto::GitStatus::Renamed => StatusCode::Renamed,
7697 proto::GitStatus::Copied => StatusCode::Copied,
7698 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7699 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7700 };
7701 Ok(result)
7702 });
7703 let [index_status, worktree_status] = [index_status?, worktree_status?];
7704 TrackedStatus {
7705 index_status,
7706 worktree_status,
7707 }
7708 .into()
7709 }
7710 };
7711 Ok(result)
7712}
7713
7714fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7715 use proto::git_file_status::{Tracked, Unmerged, Variant};
7716
7717 let variant = match status {
7718 FileStatus::Untracked => Variant::Untracked(Default::default()),
7719 FileStatus::Ignored => Variant::Ignored(Default::default()),
7720 FileStatus::Unmerged(UnmergedStatus {
7721 first_head,
7722 second_head,
7723 }) => Variant::Unmerged(Unmerged {
7724 first_head: unmerged_status_to_proto(first_head),
7725 second_head: unmerged_status_to_proto(second_head),
7726 }),
7727 FileStatus::Tracked(TrackedStatus {
7728 index_status,
7729 worktree_status,
7730 }) => Variant::Tracked(Tracked {
7731 index_status: tracked_status_to_proto(index_status),
7732 worktree_status: tracked_status_to_proto(worktree_status),
7733 }),
7734 };
7735 proto::GitFileStatus {
7736 variant: Some(variant),
7737 }
7738}
7739
7740fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7741 match code {
7742 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7743 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7744 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7745 }
7746}
7747
7748fn tracked_status_to_proto(code: StatusCode) -> i32 {
7749 match code {
7750 StatusCode::Added => proto::GitStatus::Added as _,
7751 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7752 StatusCode::Modified => proto::GitStatus::Modified as _,
7753 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7754 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7755 StatusCode::Copied => proto::GitStatus::Copied as _,
7756 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7757 }
7758}