1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
36 DiffType, FetchOptions, GitRepository, GitRepositoryCheckpoint, GraphCommitData,
37 InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RemoteCommandOutput,
38 RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332pub struct Repository {
333 this: WeakEntity<Self>,
334 snapshot: RepositorySnapshot,
335 commit_message_buffer: Option<Entity<Buffer>>,
336 git_store: WeakEntity<GitStore>,
337 // For a local repository, holds paths that have had worktree events since the last status scan completed,
338 // and that should be examined during the next status scan.
339 paths_needing_status_update: Vec<Vec<RepoPath>>,
340 job_sender: mpsc::UnboundedSender<GitJob>,
341 active_jobs: HashMap<JobId, JobInfo>,
342 pending_ops: SumTree<PendingOps>,
343 job_id: JobId,
344 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
345 latest_askpass_id: u64,
346 repository_state: Shared<Task<Result<RepositoryState, String>>>,
347 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
348 graph_commit_data_handler: GraphCommitHandlerState,
349 commit_data: HashMap<Oid, CommitDataState>,
350}
351
352impl std::ops::Deref for Repository {
353 type Target = RepositorySnapshot;
354
355 fn deref(&self) -> &Self::Target {
356 &self.snapshot
357 }
358}
359
360#[derive(Clone)]
361pub struct LocalRepositoryState {
362 pub fs: Arc<dyn Fs>,
363 pub backend: Arc<dyn GitRepository>,
364 pub environment: Arc<HashMap<String, String>>,
365}
366
367impl LocalRepositoryState {
368 async fn new(
369 work_directory_abs_path: Arc<Path>,
370 dot_git_abs_path: Arc<Path>,
371 project_environment: WeakEntity<ProjectEnvironment>,
372 fs: Arc<dyn Fs>,
373 is_trusted: bool,
374 cx: &mut AsyncApp,
375 ) -> anyhow::Result<Self> {
376 let environment = project_environment
377 .update(cx, |project_environment, cx| {
378 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
379 })?
380 .await
381 .unwrap_or_else(|| {
382 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
383 HashMap::default()
384 });
385 let search_paths = environment.get("PATH").map(|val| val.to_owned());
386 let backend = cx
387 .background_spawn({
388 let fs = fs.clone();
389 async move {
390 let system_git_binary_path = search_paths
391 .and_then(|search_paths| {
392 which::which_in("git", Some(search_paths), &work_directory_abs_path)
393 .ok()
394 })
395 .or_else(|| which::which("git").ok());
396 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
397 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
398 }
399 })
400 .await?;
401 backend.set_trusted(is_trusted);
402 Ok(LocalRepositoryState {
403 backend,
404 environment: Arc::new(environment),
405 fs,
406 })
407 }
408}
409
410#[derive(Clone)]
411pub struct RemoteRepositoryState {
412 pub project_id: ProjectId,
413 pub client: AnyProtoClient,
414}
415
416#[derive(Clone)]
417pub enum RepositoryState {
418 Local(LocalRepositoryState),
419 Remote(RemoteRepositoryState),
420}
421
422#[derive(Clone, Debug, PartialEq, Eq)]
423pub enum GitGraphEvent {
424 CountUpdated(usize),
425 FullyLoaded,
426 LoadingError,
427}
428
429#[derive(Clone, Debug, PartialEq, Eq)]
430pub enum RepositoryEvent {
431 StatusesChanged,
432 HeadChanged,
433 BranchListChanged,
434 StashEntriesChanged,
435 GitWorktreeListChanged,
436 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
437 GraphEvent((LogSource, LogOrder), GitGraphEvent),
438}
439
440#[derive(Clone, Debug)]
441pub struct JobsUpdated;
442
443#[derive(Debug)]
444pub enum GitStoreEvent {
445 ActiveRepositoryChanged(Option<RepositoryId>),
446 /// Bool is true when the repository that's updated is the active repository
447 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
448 RepositoryAdded,
449 RepositoryRemoved(RepositoryId),
450 IndexWriteError(anyhow::Error),
451 JobsUpdated,
452 ConflictsUpdated,
453}
454
455impl EventEmitter<RepositoryEvent> for Repository {}
456impl EventEmitter<JobsUpdated> for Repository {}
457impl EventEmitter<GitStoreEvent> for GitStore {}
458
459pub struct GitJob {
460 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
461 key: Option<GitJobKey>,
462}
463
464#[derive(PartialEq, Eq)]
465enum GitJobKey {
466 WriteIndex(Vec<RepoPath>),
467 ReloadBufferDiffBases,
468 RefreshStatuses,
469 ReloadGitState,
470}
471
472impl GitStore {
473 pub fn local(
474 worktree_store: &Entity<WorktreeStore>,
475 buffer_store: Entity<BufferStore>,
476 environment: Entity<ProjectEnvironment>,
477 fs: Arc<dyn Fs>,
478 cx: &mut Context<Self>,
479 ) -> Self {
480 Self::new(
481 worktree_store.clone(),
482 buffer_store,
483 GitStoreState::Local {
484 next_repository_id: Arc::new(AtomicU64::new(1)),
485 downstream: None,
486 project_environment: environment,
487 fs,
488 },
489 cx,
490 )
491 }
492
493 pub fn remote(
494 worktree_store: &Entity<WorktreeStore>,
495 buffer_store: Entity<BufferStore>,
496 upstream_client: AnyProtoClient,
497 project_id: u64,
498 cx: &mut Context<Self>,
499 ) -> Self {
500 Self::new(
501 worktree_store.clone(),
502 buffer_store,
503 GitStoreState::Remote {
504 upstream_client,
505 upstream_project_id: project_id,
506 downstream: None,
507 },
508 cx,
509 )
510 }
511
512 fn new(
513 worktree_store: Entity<WorktreeStore>,
514 buffer_store: Entity<BufferStore>,
515 state: GitStoreState,
516 cx: &mut Context<Self>,
517 ) -> Self {
518 let mut _subscriptions = vec![
519 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
520 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
521 ];
522
523 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
524 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
525 }
526
527 GitStore {
528 state,
529 buffer_store,
530 worktree_store,
531 repositories: HashMap::default(),
532 worktree_ids: HashMap::default(),
533 active_repo_id: None,
534 _subscriptions,
535 loading_diffs: HashMap::default(),
536 shared_diffs: HashMap::default(),
537 diffs: HashMap::default(),
538 }
539 }
540
541 pub fn init(client: &AnyProtoClient) {
542 client.add_entity_request_handler(Self::handle_get_remotes);
543 client.add_entity_request_handler(Self::handle_get_branches);
544 client.add_entity_request_handler(Self::handle_get_default_branch);
545 client.add_entity_request_handler(Self::handle_change_branch);
546 client.add_entity_request_handler(Self::handle_create_branch);
547 client.add_entity_request_handler(Self::handle_rename_branch);
548 client.add_entity_request_handler(Self::handle_create_remote);
549 client.add_entity_request_handler(Self::handle_remove_remote);
550 client.add_entity_request_handler(Self::handle_delete_branch);
551 client.add_entity_request_handler(Self::handle_git_init);
552 client.add_entity_request_handler(Self::handle_push);
553 client.add_entity_request_handler(Self::handle_pull);
554 client.add_entity_request_handler(Self::handle_fetch);
555 client.add_entity_request_handler(Self::handle_stage);
556 client.add_entity_request_handler(Self::handle_unstage);
557 client.add_entity_request_handler(Self::handle_stash);
558 client.add_entity_request_handler(Self::handle_stash_pop);
559 client.add_entity_request_handler(Self::handle_stash_apply);
560 client.add_entity_request_handler(Self::handle_stash_drop);
561 client.add_entity_request_handler(Self::handle_commit);
562 client.add_entity_request_handler(Self::handle_run_hook);
563 client.add_entity_request_handler(Self::handle_reset);
564 client.add_entity_request_handler(Self::handle_show);
565 client.add_entity_request_handler(Self::handle_create_checkpoint);
566 client.add_entity_request_handler(Self::handle_restore_checkpoint);
567 client.add_entity_request_handler(Self::handle_compare_checkpoints);
568 client.add_entity_request_handler(Self::handle_diff_checkpoints);
569 client.add_entity_request_handler(Self::handle_load_commit_diff);
570 client.add_entity_request_handler(Self::handle_file_history);
571 client.add_entity_request_handler(Self::handle_checkout_files);
572 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
573 client.add_entity_request_handler(Self::handle_set_index_text);
574 client.add_entity_request_handler(Self::handle_askpass);
575 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
576 client.add_entity_request_handler(Self::handle_git_diff);
577 client.add_entity_request_handler(Self::handle_tree_diff);
578 client.add_entity_request_handler(Self::handle_get_blob_content);
579 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
580 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
581 client.add_entity_message_handler(Self::handle_update_diff_bases);
582 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
583 client.add_entity_request_handler(Self::handle_blame_buffer);
584 client.add_entity_message_handler(Self::handle_update_repository);
585 client.add_entity_message_handler(Self::handle_remove_repository);
586 client.add_entity_request_handler(Self::handle_git_clone);
587 client.add_entity_request_handler(Self::handle_get_worktrees);
588 client.add_entity_request_handler(Self::handle_create_worktree);
589 client.add_entity_request_handler(Self::handle_remove_worktree);
590 client.add_entity_request_handler(Self::handle_rename_worktree);
591 client.add_entity_request_handler(Self::handle_get_head_sha);
592 }
593
594 pub fn is_local(&self) -> bool {
595 matches!(self.state, GitStoreState::Local { .. })
596 }
597 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
598 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
599 let id = repo.read(cx).id;
600 if self.active_repo_id != Some(id) {
601 self.active_repo_id = Some(id);
602 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
603 }
604 }
605 }
606
607 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
608 match &mut self.state {
609 GitStoreState::Remote {
610 downstream: downstream_client,
611 ..
612 } => {
613 for repo in self.repositories.values() {
614 let update = repo.read(cx).snapshot.initial_update(project_id);
615 for update in split_repository_update(update) {
616 client.send(update).log_err();
617 }
618 }
619 *downstream_client = Some((client, ProjectId(project_id)));
620 }
621 GitStoreState::Local {
622 downstream: downstream_client,
623 ..
624 } => {
625 let mut snapshots = HashMap::default();
626 let (updates_tx, mut updates_rx) = mpsc::unbounded();
627 for repo in self.repositories.values() {
628 updates_tx
629 .unbounded_send(DownstreamUpdate::UpdateRepository(
630 repo.read(cx).snapshot.clone(),
631 ))
632 .ok();
633 }
634 *downstream_client = Some(LocalDownstreamState {
635 client: client.clone(),
636 project_id: ProjectId(project_id),
637 updates_tx,
638 _task: cx.spawn(async move |this, cx| {
639 cx.background_spawn(async move {
640 while let Some(update) = updates_rx.next().await {
641 match update {
642 DownstreamUpdate::UpdateRepository(snapshot) => {
643 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
644 {
645 let update =
646 snapshot.build_update(old_snapshot, project_id);
647 *old_snapshot = snapshot;
648 for update in split_repository_update(update) {
649 client.send(update)?;
650 }
651 } else {
652 let update = snapshot.initial_update(project_id);
653 for update in split_repository_update(update) {
654 client.send(update)?;
655 }
656 snapshots.insert(snapshot.id, snapshot);
657 }
658 }
659 DownstreamUpdate::RemoveRepository(id) => {
660 client.send(proto::RemoveRepository {
661 project_id,
662 id: id.to_proto(),
663 })?;
664 }
665 }
666 }
667 anyhow::Ok(())
668 })
669 .await
670 .ok();
671 this.update(cx, |this, _| {
672 if let GitStoreState::Local {
673 downstream: downstream_client,
674 ..
675 } = &mut this.state
676 {
677 downstream_client.take();
678 } else {
679 unreachable!("unshared called on remote store");
680 }
681 })
682 }),
683 });
684 }
685 }
686 }
687
688 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
689 match &mut self.state {
690 GitStoreState::Local {
691 downstream: downstream_client,
692 ..
693 } => {
694 downstream_client.take();
695 }
696 GitStoreState::Remote {
697 downstream: downstream_client,
698 ..
699 } => {
700 downstream_client.take();
701 }
702 }
703 self.shared_diffs.clear();
704 }
705
706 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
707 self.shared_diffs.remove(peer_id);
708 }
709
710 pub fn active_repository(&self) -> Option<Entity<Repository>> {
711 self.active_repo_id
712 .as_ref()
713 .map(|id| self.repositories[id].clone())
714 }
715
716 pub fn open_unstaged_diff(
717 &mut self,
718 buffer: Entity<Buffer>,
719 cx: &mut Context<Self>,
720 ) -> Task<Result<Entity<BufferDiff>>> {
721 let buffer_id = buffer.read(cx).remote_id();
722 if let Some(diff_state) = self.diffs.get(&buffer_id)
723 && let Some(unstaged_diff) = diff_state
724 .read(cx)
725 .unstaged_diff
726 .as_ref()
727 .and_then(|weak| weak.upgrade())
728 {
729 if let Some(task) =
730 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
731 {
732 return cx.background_executor().spawn(async move {
733 task.await;
734 Ok(unstaged_diff)
735 });
736 }
737 return Task::ready(Ok(unstaged_diff));
738 }
739
740 let Some((repo, repo_path)) =
741 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
742 else {
743 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
744 };
745
746 let task = self
747 .loading_diffs
748 .entry((buffer_id, DiffKind::Unstaged))
749 .or_insert_with(|| {
750 let staged_text = repo.update(cx, |repo, cx| {
751 repo.load_staged_text(buffer_id, repo_path, cx)
752 });
753 cx.spawn(async move |this, cx| {
754 Self::open_diff_internal(
755 this,
756 DiffKind::Unstaged,
757 staged_text.await.map(DiffBasesChange::SetIndex),
758 buffer,
759 cx,
760 )
761 .await
762 .map_err(Arc::new)
763 })
764 .shared()
765 })
766 .clone();
767
768 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
769 }
770
771 pub fn open_diff_since(
772 &mut self,
773 oid: Option<git::Oid>,
774 buffer: Entity<Buffer>,
775 repo: Entity<Repository>,
776 cx: &mut Context<Self>,
777 ) -> Task<Result<Entity<BufferDiff>>> {
778 let buffer_id = buffer.read(cx).remote_id();
779
780 if let Some(diff_state) = self.diffs.get(&buffer_id)
781 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
782 {
783 if let Some(task) =
784 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
785 {
786 return cx.background_executor().spawn(async move {
787 task.await;
788 Ok(oid_diff)
789 });
790 }
791 return Task::ready(Ok(oid_diff));
792 }
793
794 let diff_kind = DiffKind::SinceOid(oid);
795 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
796 let task = task.clone();
797 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
798 }
799
800 let task = cx
801 .spawn(async move |this, cx| {
802 let result: Result<Entity<BufferDiff>> = async {
803 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
804 let language_registry =
805 buffer.update(cx, |buffer, _| buffer.language_registry());
806 let content: Option<Arc<str>> = match oid {
807 None => None,
808 Some(oid) => Some(
809 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
810 .await?
811 .into(),
812 ),
813 };
814 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
815
816 buffer_diff
817 .update(cx, |buffer_diff, cx| {
818 buffer_diff.language_changed(
819 buffer_snapshot.language().cloned(),
820 language_registry,
821 cx,
822 );
823 buffer_diff.set_base_text(
824 content.clone(),
825 buffer_snapshot.language().cloned(),
826 buffer_snapshot.text,
827 cx,
828 )
829 })
830 .await?;
831 let unstaged_diff = this
832 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
833 .await?;
834 buffer_diff.update(cx, |buffer_diff, _| {
835 buffer_diff.set_secondary_diff(unstaged_diff);
836 });
837
838 this.update(cx, |this, cx| {
839 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
840 .detach();
841
842 this.loading_diffs.remove(&(buffer_id, diff_kind));
843
844 let git_store = cx.weak_entity();
845 let diff_state = this
846 .diffs
847 .entry(buffer_id)
848 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
849
850 diff_state.update(cx, |state, _| {
851 if let Some(oid) = oid {
852 if let Some(content) = content {
853 state.oid_texts.insert(oid, content);
854 }
855 }
856 state.oid_diffs.insert(oid, buffer_diff.downgrade());
857 });
858 })?;
859
860 Ok(buffer_diff)
861 }
862 .await;
863 result.map_err(Arc::new)
864 })
865 .shared();
866
867 self.loading_diffs
868 .insert((buffer_id, diff_kind), task.clone());
869 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
870 }
871
872 #[ztracing::instrument(skip_all)]
873 pub fn open_uncommitted_diff(
874 &mut self,
875 buffer: Entity<Buffer>,
876 cx: &mut Context<Self>,
877 ) -> Task<Result<Entity<BufferDiff>>> {
878 let buffer_id = buffer.read(cx).remote_id();
879
880 if let Some(diff_state) = self.diffs.get(&buffer_id)
881 && let Some(uncommitted_diff) = diff_state
882 .read(cx)
883 .uncommitted_diff
884 .as_ref()
885 .and_then(|weak| weak.upgrade())
886 {
887 if let Some(task) =
888 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
889 {
890 return cx.background_executor().spawn(async move {
891 task.await;
892 Ok(uncommitted_diff)
893 });
894 }
895 return Task::ready(Ok(uncommitted_diff));
896 }
897
898 let Some((repo, repo_path)) =
899 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
900 else {
901 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
902 };
903
904 let task = self
905 .loading_diffs
906 .entry((buffer_id, DiffKind::Uncommitted))
907 .or_insert_with(|| {
908 let changes = repo.update(cx, |repo, cx| {
909 repo.load_committed_text(buffer_id, repo_path, cx)
910 });
911
912 // todo(lw): hot foreground spawn
913 cx.spawn(async move |this, cx| {
914 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
915 .await
916 .map_err(Arc::new)
917 })
918 .shared()
919 })
920 .clone();
921
922 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
923 }
924
925 #[ztracing::instrument(skip_all)]
926 async fn open_diff_internal(
927 this: WeakEntity<Self>,
928 kind: DiffKind,
929 texts: Result<DiffBasesChange>,
930 buffer_entity: Entity<Buffer>,
931 cx: &mut AsyncApp,
932 ) -> Result<Entity<BufferDiff>> {
933 let diff_bases_change = match texts {
934 Err(e) => {
935 this.update(cx, |this, cx| {
936 let buffer = buffer_entity.read(cx);
937 let buffer_id = buffer.remote_id();
938 this.loading_diffs.remove(&(buffer_id, kind));
939 })?;
940 return Err(e);
941 }
942 Ok(change) => change,
943 };
944
945 this.update(cx, |this, cx| {
946 let buffer = buffer_entity.read(cx);
947 let buffer_id = buffer.remote_id();
948 let language = buffer.language().cloned();
949 let language_registry = buffer.language_registry();
950 let text_snapshot = buffer.text_snapshot();
951 this.loading_diffs.remove(&(buffer_id, kind));
952
953 let git_store = cx.weak_entity();
954 let diff_state = this
955 .diffs
956 .entry(buffer_id)
957 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
958
959 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
960
961 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
962 diff_state.update(cx, |diff_state, cx| {
963 diff_state.language_changed = true;
964 diff_state.language = language;
965 diff_state.language_registry = language_registry;
966
967 match kind {
968 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
969 DiffKind::Uncommitted => {
970 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
971 diff
972 } else {
973 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
974 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
975 unstaged_diff
976 };
977
978 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
979 diff_state.uncommitted_diff = Some(diff.downgrade())
980 }
981 DiffKind::SinceOid(_) => {
982 unreachable!("open_diff_internal is not used for OID diffs")
983 }
984 }
985
986 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
987 let rx = diff_state.wait_for_recalculation();
988
989 anyhow::Ok(async move {
990 if let Some(rx) = rx {
991 rx.await;
992 }
993 Ok(diff)
994 })
995 })
996 })??
997 .await
998 }
999
1000 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1001 let diff_state = self.diffs.get(&buffer_id)?;
1002 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1003 }
1004
1005 pub fn get_uncommitted_diff(
1006 &self,
1007 buffer_id: BufferId,
1008 cx: &App,
1009 ) -> Option<Entity<BufferDiff>> {
1010 let diff_state = self.diffs.get(&buffer_id)?;
1011 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1012 }
1013
1014 pub fn get_diff_since_oid(
1015 &self,
1016 buffer_id: BufferId,
1017 oid: Option<git::Oid>,
1018 cx: &App,
1019 ) -> Option<Entity<BufferDiff>> {
1020 let diff_state = self.diffs.get(&buffer_id)?;
1021 diff_state.read(cx).oid_diff(oid)
1022 }
1023
1024 pub fn open_conflict_set(
1025 &mut self,
1026 buffer: Entity<Buffer>,
1027 cx: &mut Context<Self>,
1028 ) -> Entity<ConflictSet> {
1029 log::debug!("open conflict set");
1030 let buffer_id = buffer.read(cx).remote_id();
1031
1032 if let Some(git_state) = self.diffs.get(&buffer_id)
1033 && let Some(conflict_set) = git_state
1034 .read(cx)
1035 .conflict_set
1036 .as_ref()
1037 .and_then(|weak| weak.upgrade())
1038 {
1039 let conflict_set = conflict_set;
1040 let buffer_snapshot = buffer.read(cx).text_snapshot();
1041
1042 git_state.update(cx, |state, cx| {
1043 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1044 });
1045
1046 return conflict_set;
1047 }
1048
1049 let is_unmerged = self
1050 .repository_and_path_for_buffer_id(buffer_id, cx)
1051 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1052 let git_store = cx.weak_entity();
1053 let buffer_git_state = self
1054 .diffs
1055 .entry(buffer_id)
1056 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1057 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1058
1059 self._subscriptions
1060 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1061 cx.emit(GitStoreEvent::ConflictsUpdated);
1062 }));
1063
1064 buffer_git_state.update(cx, |state, cx| {
1065 state.conflict_set = Some(conflict_set.downgrade());
1066 let buffer_snapshot = buffer.read(cx).text_snapshot();
1067 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1068 });
1069
1070 conflict_set
1071 }
1072
1073 pub fn project_path_git_status(
1074 &self,
1075 project_path: &ProjectPath,
1076 cx: &App,
1077 ) -> Option<FileStatus> {
1078 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1079 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1080 }
1081
1082 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1083 let mut work_directory_abs_paths = Vec::new();
1084 let mut checkpoints = Vec::new();
1085 for repository in self.repositories.values() {
1086 repository.update(cx, |repository, _| {
1087 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1088 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1089 });
1090 }
1091
1092 cx.background_executor().spawn(async move {
1093 let checkpoints = future::try_join_all(checkpoints).await?;
1094 Ok(GitStoreCheckpoint {
1095 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1096 .into_iter()
1097 .zip(checkpoints)
1098 .collect(),
1099 })
1100 })
1101 }
1102
1103 pub fn restore_checkpoint(
1104 &self,
1105 checkpoint: GitStoreCheckpoint,
1106 cx: &mut App,
1107 ) -> Task<Result<()>> {
1108 let repositories_by_work_dir_abs_path = self
1109 .repositories
1110 .values()
1111 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1112 .collect::<HashMap<_, _>>();
1113
1114 let mut tasks = Vec::new();
1115 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1116 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1117 let restore = repository.update(cx, |repository, _| {
1118 repository.restore_checkpoint(checkpoint)
1119 });
1120 tasks.push(async move { restore.await? });
1121 }
1122 }
1123 cx.background_spawn(async move {
1124 future::try_join_all(tasks).await?;
1125 Ok(())
1126 })
1127 }
1128
1129 /// Compares two checkpoints, returning true if they are equal.
1130 pub fn compare_checkpoints(
1131 &self,
1132 left: GitStoreCheckpoint,
1133 mut right: GitStoreCheckpoint,
1134 cx: &mut App,
1135 ) -> Task<Result<bool>> {
1136 let repositories_by_work_dir_abs_path = self
1137 .repositories
1138 .values()
1139 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1140 .collect::<HashMap<_, _>>();
1141
1142 let mut tasks = Vec::new();
1143 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1144 if let Some(right_checkpoint) = right
1145 .checkpoints_by_work_dir_abs_path
1146 .remove(&work_dir_abs_path)
1147 {
1148 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1149 {
1150 let compare = repository.update(cx, |repository, _| {
1151 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1152 });
1153
1154 tasks.push(async move { compare.await? });
1155 }
1156 } else {
1157 return Task::ready(Ok(false));
1158 }
1159 }
1160 cx.background_spawn(async move {
1161 Ok(future::try_join_all(tasks)
1162 .await?
1163 .into_iter()
1164 .all(|result| result))
1165 })
1166 }
1167
1168 /// Blames a buffer.
1169 pub fn blame_buffer(
1170 &self,
1171 buffer: &Entity<Buffer>,
1172 version: Option<clock::Global>,
1173 cx: &mut Context<Self>,
1174 ) -> Task<Result<Option<Blame>>> {
1175 let buffer = buffer.read(cx);
1176 let Some((repo, repo_path)) =
1177 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1178 else {
1179 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1180 };
1181 let content = match &version {
1182 Some(version) => buffer.rope_for_version(version),
1183 None => buffer.as_rope().clone(),
1184 };
1185 let line_ending = buffer.line_ending();
1186 let version = version.unwrap_or(buffer.version());
1187 let buffer_id = buffer.remote_id();
1188
1189 let repo = repo.downgrade();
1190 cx.spawn(async move |_, cx| {
1191 let repository_state = repo
1192 .update(cx, |repo, _| repo.repository_state.clone())?
1193 .await
1194 .map_err(|err| anyhow::anyhow!(err))?;
1195 match repository_state {
1196 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1197 .blame(repo_path.clone(), content, line_ending)
1198 .await
1199 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1200 .map(Some),
1201 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1202 let response = client
1203 .request(proto::BlameBuffer {
1204 project_id: project_id.to_proto(),
1205 buffer_id: buffer_id.into(),
1206 version: serialize_version(&version),
1207 })
1208 .await?;
1209 Ok(deserialize_blame_buffer_response(response))
1210 }
1211 }
1212 })
1213 }
1214
1215 pub fn file_history(
1216 &self,
1217 repo: &Entity<Repository>,
1218 path: RepoPath,
1219 cx: &mut App,
1220 ) -> Task<Result<git::repository::FileHistory>> {
1221 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1222
1223 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1224 }
1225
1226 pub fn file_history_paginated(
1227 &self,
1228 repo: &Entity<Repository>,
1229 path: RepoPath,
1230 skip: usize,
1231 limit: Option<usize>,
1232 cx: &mut App,
1233 ) -> Task<Result<git::repository::FileHistory>> {
1234 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1235
1236 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1237 }
1238
1239 pub fn get_permalink_to_line(
1240 &self,
1241 buffer: &Entity<Buffer>,
1242 selection: Range<u32>,
1243 cx: &mut App,
1244 ) -> Task<Result<url::Url>> {
1245 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1246 return Task::ready(Err(anyhow!("buffer has no file")));
1247 };
1248
1249 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1250 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1251 cx,
1252 ) else {
1253 // If we're not in a Git repo, check whether this is a Rust source
1254 // file in the Cargo registry (presumably opened with go-to-definition
1255 // from a normal Rust file). If so, we can put together a permalink
1256 // using crate metadata.
1257 if buffer
1258 .read(cx)
1259 .language()
1260 .is_none_or(|lang| lang.name() != "Rust")
1261 {
1262 return Task::ready(Err(anyhow!("no permalink available")));
1263 }
1264 let file_path = file.worktree.read(cx).absolutize(&file.path);
1265 return cx.spawn(async move |cx| {
1266 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1267 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1268 .context("no permalink available")
1269 });
1270 };
1271
1272 let buffer_id = buffer.read(cx).remote_id();
1273 let branch = repo.read(cx).branch.clone();
1274 let remote = branch
1275 .as_ref()
1276 .and_then(|b| b.upstream.as_ref())
1277 .and_then(|b| b.remote_name())
1278 .unwrap_or("origin")
1279 .to_string();
1280
1281 let rx = repo.update(cx, |repo, _| {
1282 repo.send_job(None, move |state, cx| async move {
1283 match state {
1284 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1285 let origin_url = backend
1286 .remote_url(&remote)
1287 .await
1288 .with_context(|| format!("remote \"{remote}\" not found"))?;
1289
1290 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1291
1292 let provider_registry =
1293 cx.update(GitHostingProviderRegistry::default_global);
1294
1295 let (provider, remote) =
1296 parse_git_remote_url(provider_registry, &origin_url)
1297 .context("parsing Git remote URL")?;
1298
1299 Ok(provider.build_permalink(
1300 remote,
1301 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1302 ))
1303 }
1304 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1305 let response = client
1306 .request(proto::GetPermalinkToLine {
1307 project_id: project_id.to_proto(),
1308 buffer_id: buffer_id.into(),
1309 selection: Some(proto::Range {
1310 start: selection.start as u64,
1311 end: selection.end as u64,
1312 }),
1313 })
1314 .await?;
1315
1316 url::Url::parse(&response.permalink).context("failed to parse permalink")
1317 }
1318 }
1319 })
1320 });
1321 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1322 }
1323
1324 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1325 match &self.state {
1326 GitStoreState::Local {
1327 downstream: downstream_client,
1328 ..
1329 } => downstream_client
1330 .as_ref()
1331 .map(|state| (state.client.clone(), state.project_id)),
1332 GitStoreState::Remote {
1333 downstream: downstream_client,
1334 ..
1335 } => downstream_client.clone(),
1336 }
1337 }
1338
1339 fn upstream_client(&self) -> Option<AnyProtoClient> {
1340 match &self.state {
1341 GitStoreState::Local { .. } => None,
1342 GitStoreState::Remote {
1343 upstream_client, ..
1344 } => Some(upstream_client.clone()),
1345 }
1346 }
1347
1348 fn on_worktree_store_event(
1349 &mut self,
1350 worktree_store: Entity<WorktreeStore>,
1351 event: &WorktreeStoreEvent,
1352 cx: &mut Context<Self>,
1353 ) {
1354 let GitStoreState::Local {
1355 project_environment,
1356 downstream,
1357 next_repository_id,
1358 fs,
1359 } = &self.state
1360 else {
1361 return;
1362 };
1363
1364 match event {
1365 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1366 if let Some(worktree) = self
1367 .worktree_store
1368 .read(cx)
1369 .worktree_for_id(*worktree_id, cx)
1370 {
1371 let paths_by_git_repo =
1372 self.process_updated_entries(&worktree, updated_entries, cx);
1373 let downstream = downstream
1374 .as_ref()
1375 .map(|downstream| downstream.updates_tx.clone());
1376 cx.spawn(async move |_, cx| {
1377 let paths_by_git_repo = paths_by_git_repo.await;
1378 for (repo, paths) in paths_by_git_repo {
1379 repo.update(cx, |repo, cx| {
1380 repo.paths_changed(paths, downstream.clone(), cx);
1381 });
1382 }
1383 })
1384 .detach();
1385 }
1386 }
1387 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1388 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1389 else {
1390 return;
1391 };
1392 if !worktree.read(cx).is_visible() {
1393 log::debug!(
1394 "not adding repositories for local worktree {:?} because it's not visible",
1395 worktree.read(cx).abs_path()
1396 );
1397 return;
1398 }
1399 self.update_repositories_from_worktree(
1400 *worktree_id,
1401 project_environment.clone(),
1402 next_repository_id.clone(),
1403 downstream
1404 .as_ref()
1405 .map(|downstream| downstream.updates_tx.clone()),
1406 changed_repos.clone(),
1407 fs.clone(),
1408 cx,
1409 );
1410 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1411 }
1412 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1413 let repos_without_worktree: Vec<RepositoryId> = self
1414 .worktree_ids
1415 .iter_mut()
1416 .filter_map(|(repo_id, worktree_ids)| {
1417 worktree_ids.remove(worktree_id);
1418 if worktree_ids.is_empty() {
1419 Some(*repo_id)
1420 } else {
1421 None
1422 }
1423 })
1424 .collect();
1425 let is_active_repo_removed = repos_without_worktree
1426 .iter()
1427 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1428
1429 for repo_id in repos_without_worktree {
1430 self.repositories.remove(&repo_id);
1431 self.worktree_ids.remove(&repo_id);
1432 if let Some(updates_tx) =
1433 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1434 {
1435 updates_tx
1436 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1437 .ok();
1438 }
1439 }
1440
1441 if is_active_repo_removed {
1442 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1443 self.active_repo_id = Some(repo_id);
1444 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1445 } else {
1446 self.active_repo_id = None;
1447 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1448 }
1449 }
1450 }
1451 _ => {}
1452 }
1453 }
1454 fn on_repository_event(
1455 &mut self,
1456 repo: Entity<Repository>,
1457 event: &RepositoryEvent,
1458 cx: &mut Context<Self>,
1459 ) {
1460 let id = repo.read(cx).id;
1461 let repo_snapshot = repo.read(cx).snapshot.clone();
1462 for (buffer_id, diff) in self.diffs.iter() {
1463 if let Some((buffer_repo, repo_path)) =
1464 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1465 && buffer_repo == repo
1466 {
1467 diff.update(cx, |diff, cx| {
1468 if let Some(conflict_set) = &diff.conflict_set {
1469 let conflict_status_changed =
1470 conflict_set.update(cx, |conflict_set, cx| {
1471 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1472 conflict_set.set_has_conflict(has_conflict, cx)
1473 })?;
1474 if conflict_status_changed {
1475 let buffer_store = self.buffer_store.read(cx);
1476 if let Some(buffer) = buffer_store.get(*buffer_id) {
1477 let _ = diff
1478 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1479 }
1480 }
1481 }
1482 anyhow::Ok(())
1483 })
1484 .ok();
1485 }
1486 }
1487 cx.emit(GitStoreEvent::RepositoryUpdated(
1488 id,
1489 event.clone(),
1490 self.active_repo_id == Some(id),
1491 ))
1492 }
1493
1494 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1495 cx.emit(GitStoreEvent::JobsUpdated)
1496 }
1497
1498 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1499 fn update_repositories_from_worktree(
1500 &mut self,
1501 worktree_id: WorktreeId,
1502 project_environment: Entity<ProjectEnvironment>,
1503 next_repository_id: Arc<AtomicU64>,
1504 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1505 updated_git_repositories: UpdatedGitRepositoriesSet,
1506 fs: Arc<dyn Fs>,
1507 cx: &mut Context<Self>,
1508 ) {
1509 let mut removed_ids = Vec::new();
1510 for update in updated_git_repositories.iter() {
1511 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1512 let existing_work_directory_abs_path =
1513 repo.read(cx).work_directory_abs_path.clone();
1514 Some(&existing_work_directory_abs_path)
1515 == update.old_work_directory_abs_path.as_ref()
1516 || Some(&existing_work_directory_abs_path)
1517 == update.new_work_directory_abs_path.as_ref()
1518 }) {
1519 let repo_id = *id;
1520 if let Some(new_work_directory_abs_path) =
1521 update.new_work_directory_abs_path.clone()
1522 {
1523 self.worktree_ids
1524 .entry(repo_id)
1525 .or_insert_with(HashSet::new)
1526 .insert(worktree_id);
1527 existing.update(cx, |existing, cx| {
1528 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1529 existing.schedule_scan(updates_tx.clone(), cx);
1530 });
1531 } else {
1532 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1533 worktree_ids.remove(&worktree_id);
1534 if worktree_ids.is_empty() {
1535 removed_ids.push(repo_id);
1536 }
1537 }
1538 }
1539 } else if let UpdatedGitRepository {
1540 new_work_directory_abs_path: Some(work_directory_abs_path),
1541 dot_git_abs_path: Some(dot_git_abs_path),
1542 repository_dir_abs_path: Some(repository_dir_abs_path),
1543 common_dir_abs_path: Some(common_dir_abs_path),
1544 ..
1545 } = update
1546 {
1547 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1548 work_directory_abs_path,
1549 common_dir_abs_path,
1550 repository_dir_abs_path,
1551 )
1552 .into();
1553 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1554 let is_trusted = TrustedWorktrees::try_get_global(cx)
1555 .map(|trusted_worktrees| {
1556 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1557 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1558 })
1559 })
1560 .unwrap_or(false);
1561 let git_store = cx.weak_entity();
1562 let repo = cx.new(|cx| {
1563 let mut repo = Repository::local(
1564 id,
1565 work_directory_abs_path.clone(),
1566 original_repo_abs_path.clone(),
1567 dot_git_abs_path.clone(),
1568 project_environment.downgrade(),
1569 fs.clone(),
1570 is_trusted,
1571 git_store,
1572 cx,
1573 );
1574 if let Some(updates_tx) = updates_tx.as_ref() {
1575 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1576 updates_tx
1577 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1578 .ok();
1579 }
1580 repo.schedule_scan(updates_tx.clone(), cx);
1581 repo
1582 });
1583 self._subscriptions
1584 .push(cx.subscribe(&repo, Self::on_repository_event));
1585 self._subscriptions
1586 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1587 self.repositories.insert(id, repo);
1588 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1589 cx.emit(GitStoreEvent::RepositoryAdded);
1590 self.active_repo_id.get_or_insert_with(|| {
1591 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1592 id
1593 });
1594 }
1595 }
1596
1597 for id in removed_ids {
1598 if self.active_repo_id == Some(id) {
1599 self.active_repo_id = None;
1600 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1601 }
1602 self.repositories.remove(&id);
1603 if let Some(updates_tx) = updates_tx.as_ref() {
1604 updates_tx
1605 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1606 .ok();
1607 }
1608 }
1609 }
1610
1611 fn on_trusted_worktrees_event(
1612 &mut self,
1613 _: Entity<TrustedWorktreesStore>,
1614 event: &TrustedWorktreesEvent,
1615 cx: &mut Context<Self>,
1616 ) {
1617 if !matches!(self.state, GitStoreState::Local { .. }) {
1618 return;
1619 }
1620
1621 let (is_trusted, event_paths) = match event {
1622 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1623 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1624 };
1625
1626 for (repo_id, worktree_ids) in &self.worktree_ids {
1627 if worktree_ids
1628 .iter()
1629 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1630 {
1631 if let Some(repo) = self.repositories.get(repo_id) {
1632 let repository_state = repo.read(cx).repository_state.clone();
1633 cx.background_spawn(async move {
1634 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1635 state.backend.set_trusted(is_trusted);
1636 }
1637 })
1638 .detach();
1639 }
1640 }
1641 }
1642 }
1643
1644 fn on_buffer_store_event(
1645 &mut self,
1646 _: Entity<BufferStore>,
1647 event: &BufferStoreEvent,
1648 cx: &mut Context<Self>,
1649 ) {
1650 match event {
1651 BufferStoreEvent::BufferAdded(buffer) => {
1652 cx.subscribe(buffer, |this, buffer, event, cx| {
1653 if let BufferEvent::LanguageChanged(_) = event {
1654 let buffer_id = buffer.read(cx).remote_id();
1655 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1656 diff_state.update(cx, |diff_state, cx| {
1657 diff_state.buffer_language_changed(buffer, cx);
1658 });
1659 }
1660 }
1661 })
1662 .detach();
1663 }
1664 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1665 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1666 diffs.remove(buffer_id);
1667 }
1668 }
1669 BufferStoreEvent::BufferDropped(buffer_id) => {
1670 self.diffs.remove(buffer_id);
1671 for diffs in self.shared_diffs.values_mut() {
1672 diffs.remove(buffer_id);
1673 }
1674 }
1675 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1676 // Whenever a buffer's file path changes, it's possible that the
1677 // new path is actually a path that is being tracked by a git
1678 // repository. In that case, we'll want to update the buffer's
1679 // `BufferDiffState`, in case it already has one.
1680 let buffer_id = buffer.read(cx).remote_id();
1681 let diff_state = self.diffs.get(&buffer_id);
1682 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1683
1684 if let Some(diff_state) = diff_state
1685 && let Some((repo, repo_path)) = repo
1686 {
1687 let buffer = buffer.clone();
1688 let diff_state = diff_state.clone();
1689
1690 cx.spawn(async move |_git_store, cx| {
1691 async {
1692 let diff_bases_change = repo
1693 .update(cx, |repo, cx| {
1694 repo.load_committed_text(buffer_id, repo_path, cx)
1695 })
1696 .await?;
1697
1698 diff_state.update(cx, |diff_state, cx| {
1699 let buffer_snapshot = buffer.read(cx).text_snapshot();
1700 diff_state.diff_bases_changed(
1701 buffer_snapshot,
1702 Some(diff_bases_change),
1703 cx,
1704 );
1705 });
1706 anyhow::Ok(())
1707 }
1708 .await
1709 .log_err();
1710 })
1711 .detach();
1712 }
1713 }
1714 }
1715 }
1716
1717 pub fn recalculate_buffer_diffs(
1718 &mut self,
1719 buffers: Vec<Entity<Buffer>>,
1720 cx: &mut Context<Self>,
1721 ) -> impl Future<Output = ()> + use<> {
1722 let mut futures = Vec::new();
1723 for buffer in buffers {
1724 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1725 let buffer = buffer.read(cx).text_snapshot();
1726 diff_state.update(cx, |diff_state, cx| {
1727 diff_state.recalculate_diffs(buffer.clone(), cx);
1728 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1729 });
1730 futures.push(diff_state.update(cx, |diff_state, cx| {
1731 diff_state
1732 .reparse_conflict_markers(buffer, cx)
1733 .map(|_| {})
1734 .boxed()
1735 }));
1736 }
1737 }
1738 async move {
1739 futures::future::join_all(futures).await;
1740 }
1741 }
1742
1743 fn on_buffer_diff_event(
1744 &mut self,
1745 diff: Entity<buffer_diff::BufferDiff>,
1746 event: &BufferDiffEvent,
1747 cx: &mut Context<Self>,
1748 ) {
1749 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1750 let buffer_id = diff.read(cx).buffer_id;
1751 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1752 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1753 diff_state.hunk_staging_operation_count += 1;
1754 diff_state.hunk_staging_operation_count
1755 });
1756 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1757 let recv = repo.update(cx, |repo, cx| {
1758 log::debug!("hunks changed for {}", path.as_unix_str());
1759 repo.spawn_set_index_text_job(
1760 path,
1761 new_index_text.as_ref().map(|rope| rope.to_string()),
1762 Some(hunk_staging_operation_count),
1763 cx,
1764 )
1765 });
1766 let diff = diff.downgrade();
1767 cx.spawn(async move |this, cx| {
1768 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1769 diff.update(cx, |diff, cx| {
1770 diff.clear_pending_hunks(cx);
1771 })
1772 .ok();
1773 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1774 .ok();
1775 }
1776 })
1777 .detach();
1778 }
1779 }
1780 }
1781 }
1782
1783 fn local_worktree_git_repos_changed(
1784 &mut self,
1785 worktree: Entity<Worktree>,
1786 changed_repos: &UpdatedGitRepositoriesSet,
1787 cx: &mut Context<Self>,
1788 ) {
1789 log::debug!("local worktree repos changed");
1790 debug_assert!(worktree.read(cx).is_local());
1791
1792 for repository in self.repositories.values() {
1793 repository.update(cx, |repository, cx| {
1794 let repo_abs_path = &repository.work_directory_abs_path;
1795 if changed_repos.iter().any(|update| {
1796 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1797 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1798 }) {
1799 repository.reload_buffer_diff_bases(cx);
1800 }
1801 });
1802 }
1803 }
1804
1805 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1806 &self.repositories
1807 }
1808
1809 /// Returns the original (main) repository working directory for the given worktree.
1810 /// For normal checkouts this equals the worktree's own path; for linked
1811 /// worktrees it points back to the original repo.
1812 pub fn original_repo_path_for_worktree(
1813 &self,
1814 worktree_id: WorktreeId,
1815 cx: &App,
1816 ) -> Option<Arc<Path>> {
1817 self.active_repo_id
1818 .iter()
1819 .chain(self.worktree_ids.keys())
1820 .find(|repo_id| {
1821 self.worktree_ids
1822 .get(repo_id)
1823 .is_some_and(|ids| ids.contains(&worktree_id))
1824 })
1825 .and_then(|repo_id| self.repositories.get(repo_id))
1826 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1827 }
1828
1829 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1830 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1831 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1832 Some(status.status)
1833 }
1834
1835 pub fn repository_and_path_for_buffer_id(
1836 &self,
1837 buffer_id: BufferId,
1838 cx: &App,
1839 ) -> Option<(Entity<Repository>, RepoPath)> {
1840 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1841 let project_path = buffer.read(cx).project_path(cx)?;
1842 self.repository_and_path_for_project_path(&project_path, cx)
1843 }
1844
1845 pub fn repository_and_path_for_project_path(
1846 &self,
1847 path: &ProjectPath,
1848 cx: &App,
1849 ) -> Option<(Entity<Repository>, RepoPath)> {
1850 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1851 self.repositories
1852 .values()
1853 .filter_map(|repo| {
1854 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1855 Some((repo.clone(), repo_path))
1856 })
1857 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1858 }
1859
1860 pub fn git_init(
1861 &self,
1862 path: Arc<Path>,
1863 fallback_branch_name: String,
1864 cx: &App,
1865 ) -> Task<Result<()>> {
1866 match &self.state {
1867 GitStoreState::Local { fs, .. } => {
1868 let fs = fs.clone();
1869 cx.background_executor()
1870 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1871 }
1872 GitStoreState::Remote {
1873 upstream_client,
1874 upstream_project_id: project_id,
1875 ..
1876 } => {
1877 let client = upstream_client.clone();
1878 let project_id = *project_id;
1879 cx.background_executor().spawn(async move {
1880 client
1881 .request(proto::GitInit {
1882 project_id: project_id,
1883 abs_path: path.to_string_lossy().into_owned(),
1884 fallback_branch_name,
1885 })
1886 .await?;
1887 Ok(())
1888 })
1889 }
1890 }
1891 }
1892
1893 pub fn git_clone(
1894 &self,
1895 repo: String,
1896 path: impl Into<Arc<std::path::Path>>,
1897 cx: &App,
1898 ) -> Task<Result<()>> {
1899 let path = path.into();
1900 match &self.state {
1901 GitStoreState::Local { fs, .. } => {
1902 let fs = fs.clone();
1903 cx.background_executor()
1904 .spawn(async move { fs.git_clone(&repo, &path).await })
1905 }
1906 GitStoreState::Remote {
1907 upstream_client,
1908 upstream_project_id,
1909 ..
1910 } => {
1911 if upstream_client.is_via_collab() {
1912 return Task::ready(Err(anyhow!(
1913 "Git Clone isn't supported for project guests"
1914 )));
1915 }
1916 let request = upstream_client.request(proto::GitClone {
1917 project_id: *upstream_project_id,
1918 abs_path: path.to_string_lossy().into_owned(),
1919 remote_repo: repo,
1920 });
1921
1922 cx.background_spawn(async move {
1923 let result = request.await?;
1924
1925 match result.success {
1926 true => Ok(()),
1927 false => Err(anyhow!("Git Clone failed")),
1928 }
1929 })
1930 }
1931 }
1932 }
1933
1934 async fn handle_update_repository(
1935 this: Entity<Self>,
1936 envelope: TypedEnvelope<proto::UpdateRepository>,
1937 mut cx: AsyncApp,
1938 ) -> Result<()> {
1939 this.update(&mut cx, |this, cx| {
1940 let path_style = this.worktree_store.read(cx).path_style();
1941 let mut update = envelope.payload;
1942
1943 let id = RepositoryId::from_proto(update.id);
1944 let client = this.upstream_client().context("no upstream client")?;
1945
1946 let original_repo_abs_path: Option<Arc<Path>> = update
1947 .original_repo_abs_path
1948 .as_deref()
1949 .map(|p| Path::new(p).into());
1950
1951 let mut repo_subscription = None;
1952 let repo = this.repositories.entry(id).or_insert_with(|| {
1953 let git_store = cx.weak_entity();
1954 let repo = cx.new(|cx| {
1955 Repository::remote(
1956 id,
1957 Path::new(&update.abs_path).into(),
1958 original_repo_abs_path.clone(),
1959 path_style,
1960 ProjectId(update.project_id),
1961 client,
1962 git_store,
1963 cx,
1964 )
1965 });
1966 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1967 cx.emit(GitStoreEvent::RepositoryAdded);
1968 repo
1969 });
1970 this._subscriptions.extend(repo_subscription);
1971
1972 repo.update(cx, {
1973 let update = update.clone();
1974 |repo, cx| repo.apply_remote_update(update, cx)
1975 })?;
1976
1977 this.active_repo_id.get_or_insert_with(|| {
1978 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1979 id
1980 });
1981
1982 if let Some((client, project_id)) = this.downstream_client() {
1983 update.project_id = project_id.to_proto();
1984 client.send(update).log_err();
1985 }
1986 Ok(())
1987 })
1988 }
1989
1990 async fn handle_remove_repository(
1991 this: Entity<Self>,
1992 envelope: TypedEnvelope<proto::RemoveRepository>,
1993 mut cx: AsyncApp,
1994 ) -> Result<()> {
1995 this.update(&mut cx, |this, cx| {
1996 let mut update = envelope.payload;
1997 let id = RepositoryId::from_proto(update.id);
1998 this.repositories.remove(&id);
1999 if let Some((client, project_id)) = this.downstream_client() {
2000 update.project_id = project_id.to_proto();
2001 client.send(update).log_err();
2002 }
2003 if this.active_repo_id == Some(id) {
2004 this.active_repo_id = None;
2005 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2006 }
2007 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2008 });
2009 Ok(())
2010 }
2011
2012 async fn handle_git_init(
2013 this: Entity<Self>,
2014 envelope: TypedEnvelope<proto::GitInit>,
2015 cx: AsyncApp,
2016 ) -> Result<proto::Ack> {
2017 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2018 let name = envelope.payload.fallback_branch_name;
2019 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2020 .await?;
2021
2022 Ok(proto::Ack {})
2023 }
2024
2025 async fn handle_git_clone(
2026 this: Entity<Self>,
2027 envelope: TypedEnvelope<proto::GitClone>,
2028 cx: AsyncApp,
2029 ) -> Result<proto::GitCloneResponse> {
2030 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2031 let repo_name = envelope.payload.remote_repo;
2032 let result = cx
2033 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2034 .await;
2035
2036 Ok(proto::GitCloneResponse {
2037 success: result.is_ok(),
2038 })
2039 }
2040
2041 async fn handle_fetch(
2042 this: Entity<Self>,
2043 envelope: TypedEnvelope<proto::Fetch>,
2044 mut cx: AsyncApp,
2045 ) -> Result<proto::RemoteMessageResponse> {
2046 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2047 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2048 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2049 let askpass_id = envelope.payload.askpass_id;
2050
2051 let askpass = make_remote_delegate(
2052 this,
2053 envelope.payload.project_id,
2054 repository_id,
2055 askpass_id,
2056 &mut cx,
2057 );
2058
2059 let remote_output = repository_handle
2060 .update(&mut cx, |repository_handle, cx| {
2061 repository_handle.fetch(fetch_options, askpass, cx)
2062 })
2063 .await??;
2064
2065 Ok(proto::RemoteMessageResponse {
2066 stdout: remote_output.stdout,
2067 stderr: remote_output.stderr,
2068 })
2069 }
2070
2071 async fn handle_push(
2072 this: Entity<Self>,
2073 envelope: TypedEnvelope<proto::Push>,
2074 mut cx: AsyncApp,
2075 ) -> Result<proto::RemoteMessageResponse> {
2076 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2077 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2078
2079 let askpass_id = envelope.payload.askpass_id;
2080 let askpass = make_remote_delegate(
2081 this,
2082 envelope.payload.project_id,
2083 repository_id,
2084 askpass_id,
2085 &mut cx,
2086 );
2087
2088 let options = envelope
2089 .payload
2090 .options
2091 .as_ref()
2092 .map(|_| match envelope.payload.options() {
2093 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2094 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2095 });
2096
2097 let branch_name = envelope.payload.branch_name.into();
2098 let remote_branch_name = envelope.payload.remote_branch_name.into();
2099 let remote_name = envelope.payload.remote_name.into();
2100
2101 let remote_output = repository_handle
2102 .update(&mut cx, |repository_handle, cx| {
2103 repository_handle.push(
2104 branch_name,
2105 remote_branch_name,
2106 remote_name,
2107 options,
2108 askpass,
2109 cx,
2110 )
2111 })
2112 .await??;
2113 Ok(proto::RemoteMessageResponse {
2114 stdout: remote_output.stdout,
2115 stderr: remote_output.stderr,
2116 })
2117 }
2118
2119 async fn handle_pull(
2120 this: Entity<Self>,
2121 envelope: TypedEnvelope<proto::Pull>,
2122 mut cx: AsyncApp,
2123 ) -> Result<proto::RemoteMessageResponse> {
2124 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2125 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2126 let askpass_id = envelope.payload.askpass_id;
2127 let askpass = make_remote_delegate(
2128 this,
2129 envelope.payload.project_id,
2130 repository_id,
2131 askpass_id,
2132 &mut cx,
2133 );
2134
2135 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2136 let remote_name = envelope.payload.remote_name.into();
2137 let rebase = envelope.payload.rebase;
2138
2139 let remote_message = repository_handle
2140 .update(&mut cx, |repository_handle, cx| {
2141 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2142 })
2143 .await??;
2144
2145 Ok(proto::RemoteMessageResponse {
2146 stdout: remote_message.stdout,
2147 stderr: remote_message.stderr,
2148 })
2149 }
2150
2151 async fn handle_stage(
2152 this: Entity<Self>,
2153 envelope: TypedEnvelope<proto::Stage>,
2154 mut cx: AsyncApp,
2155 ) -> Result<proto::Ack> {
2156 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2157 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2158
2159 let entries = envelope
2160 .payload
2161 .paths
2162 .into_iter()
2163 .map(|path| RepoPath::new(&path))
2164 .collect::<Result<Vec<_>>>()?;
2165
2166 repository_handle
2167 .update(&mut cx, |repository_handle, cx| {
2168 repository_handle.stage_entries(entries, cx)
2169 })
2170 .await?;
2171 Ok(proto::Ack {})
2172 }
2173
2174 async fn handle_unstage(
2175 this: Entity<Self>,
2176 envelope: TypedEnvelope<proto::Unstage>,
2177 mut cx: AsyncApp,
2178 ) -> Result<proto::Ack> {
2179 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2180 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2181
2182 let entries = envelope
2183 .payload
2184 .paths
2185 .into_iter()
2186 .map(|path| RepoPath::new(&path))
2187 .collect::<Result<Vec<_>>>()?;
2188
2189 repository_handle
2190 .update(&mut cx, |repository_handle, cx| {
2191 repository_handle.unstage_entries(entries, cx)
2192 })
2193 .await?;
2194
2195 Ok(proto::Ack {})
2196 }
2197
2198 async fn handle_stash(
2199 this: Entity<Self>,
2200 envelope: TypedEnvelope<proto::Stash>,
2201 mut cx: AsyncApp,
2202 ) -> Result<proto::Ack> {
2203 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2204 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2205
2206 let entries = envelope
2207 .payload
2208 .paths
2209 .into_iter()
2210 .map(|path| RepoPath::new(&path))
2211 .collect::<Result<Vec<_>>>()?;
2212
2213 repository_handle
2214 .update(&mut cx, |repository_handle, cx| {
2215 repository_handle.stash_entries(entries, cx)
2216 })
2217 .await?;
2218
2219 Ok(proto::Ack {})
2220 }
2221
2222 async fn handle_stash_pop(
2223 this: Entity<Self>,
2224 envelope: TypedEnvelope<proto::StashPop>,
2225 mut cx: AsyncApp,
2226 ) -> Result<proto::Ack> {
2227 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2228 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2229 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2230
2231 repository_handle
2232 .update(&mut cx, |repository_handle, cx| {
2233 repository_handle.stash_pop(stash_index, cx)
2234 })
2235 .await?;
2236
2237 Ok(proto::Ack {})
2238 }
2239
2240 async fn handle_stash_apply(
2241 this: Entity<Self>,
2242 envelope: TypedEnvelope<proto::StashApply>,
2243 mut cx: AsyncApp,
2244 ) -> Result<proto::Ack> {
2245 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2246 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2247 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2248
2249 repository_handle
2250 .update(&mut cx, |repository_handle, cx| {
2251 repository_handle.stash_apply(stash_index, cx)
2252 })
2253 .await?;
2254
2255 Ok(proto::Ack {})
2256 }
2257
2258 async fn handle_stash_drop(
2259 this: Entity<Self>,
2260 envelope: TypedEnvelope<proto::StashDrop>,
2261 mut cx: AsyncApp,
2262 ) -> Result<proto::Ack> {
2263 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2264 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2265 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2266
2267 repository_handle
2268 .update(&mut cx, |repository_handle, cx| {
2269 repository_handle.stash_drop(stash_index, cx)
2270 })
2271 .await??;
2272
2273 Ok(proto::Ack {})
2274 }
2275
2276 async fn handle_set_index_text(
2277 this: Entity<Self>,
2278 envelope: TypedEnvelope<proto::SetIndexText>,
2279 mut cx: AsyncApp,
2280 ) -> Result<proto::Ack> {
2281 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2282 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2283 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2284
2285 repository_handle
2286 .update(&mut cx, |repository_handle, cx| {
2287 repository_handle.spawn_set_index_text_job(
2288 repo_path,
2289 envelope.payload.text,
2290 None,
2291 cx,
2292 )
2293 })
2294 .await??;
2295 Ok(proto::Ack {})
2296 }
2297
2298 async fn handle_run_hook(
2299 this: Entity<Self>,
2300 envelope: TypedEnvelope<proto::RunGitHook>,
2301 mut cx: AsyncApp,
2302 ) -> Result<proto::Ack> {
2303 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2304 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2305 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2306 repository_handle
2307 .update(&mut cx, |repository_handle, cx| {
2308 repository_handle.run_hook(hook, cx)
2309 })
2310 .await??;
2311 Ok(proto::Ack {})
2312 }
2313
2314 async fn handle_commit(
2315 this: Entity<Self>,
2316 envelope: TypedEnvelope<proto::Commit>,
2317 mut cx: AsyncApp,
2318 ) -> Result<proto::Ack> {
2319 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2320 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2321 let askpass_id = envelope.payload.askpass_id;
2322
2323 let askpass = make_remote_delegate(
2324 this,
2325 envelope.payload.project_id,
2326 repository_id,
2327 askpass_id,
2328 &mut cx,
2329 );
2330
2331 let message = SharedString::from(envelope.payload.message);
2332 let name = envelope.payload.name.map(SharedString::from);
2333 let email = envelope.payload.email.map(SharedString::from);
2334 let options = envelope.payload.options.unwrap_or_default();
2335
2336 repository_handle
2337 .update(&mut cx, |repository_handle, cx| {
2338 repository_handle.commit(
2339 message,
2340 name.zip(email),
2341 CommitOptions {
2342 amend: options.amend,
2343 signoff: options.signoff,
2344 allow_empty: options.allow_empty,
2345 },
2346 askpass,
2347 cx,
2348 )
2349 })
2350 .await??;
2351 Ok(proto::Ack {})
2352 }
2353
2354 async fn handle_get_remotes(
2355 this: Entity<Self>,
2356 envelope: TypedEnvelope<proto::GetRemotes>,
2357 mut cx: AsyncApp,
2358 ) -> Result<proto::GetRemotesResponse> {
2359 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2360 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2361
2362 let branch_name = envelope.payload.branch_name;
2363 let is_push = envelope.payload.is_push;
2364
2365 let remotes = repository_handle
2366 .update(&mut cx, |repository_handle, _| {
2367 repository_handle.get_remotes(branch_name, is_push)
2368 })
2369 .await??;
2370
2371 Ok(proto::GetRemotesResponse {
2372 remotes: remotes
2373 .into_iter()
2374 .map(|remotes| proto::get_remotes_response::Remote {
2375 name: remotes.name.to_string(),
2376 })
2377 .collect::<Vec<_>>(),
2378 })
2379 }
2380
2381 async fn handle_get_worktrees(
2382 this: Entity<Self>,
2383 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2384 mut cx: AsyncApp,
2385 ) -> Result<proto::GitWorktreesResponse> {
2386 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2387 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2388
2389 let worktrees = repository_handle
2390 .update(&mut cx, |repository_handle, _| {
2391 repository_handle.worktrees()
2392 })
2393 .await??;
2394
2395 Ok(proto::GitWorktreesResponse {
2396 worktrees: worktrees
2397 .into_iter()
2398 .map(|worktree| worktree_to_proto(&worktree))
2399 .collect::<Vec<_>>(),
2400 })
2401 }
2402
2403 async fn handle_create_worktree(
2404 this: Entity<Self>,
2405 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2406 mut cx: AsyncApp,
2407 ) -> Result<proto::Ack> {
2408 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2409 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2410 let directory = PathBuf::from(envelope.payload.directory);
2411 let name = envelope.payload.name;
2412 let commit = envelope.payload.commit;
2413 let use_existing_branch = envelope.payload.use_existing_branch;
2414 let target = if name.is_empty() {
2415 CreateWorktreeTarget::Detached { base_sha: commit }
2416 } else if use_existing_branch {
2417 CreateWorktreeTarget::ExistingBranch { branch_name: name }
2418 } else {
2419 CreateWorktreeTarget::NewBranch {
2420 branch_name: name,
2421 base_sha: commit,
2422 }
2423 };
2424
2425 repository_handle
2426 .update(&mut cx, |repository_handle, _| {
2427 repository_handle.create_worktree(target, directory)
2428 })
2429 .await??;
2430
2431 Ok(proto::Ack {})
2432 }
2433
2434 async fn handle_remove_worktree(
2435 this: Entity<Self>,
2436 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2437 mut cx: AsyncApp,
2438 ) -> Result<proto::Ack> {
2439 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2440 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2441 let path = PathBuf::from(envelope.payload.path);
2442 let force = envelope.payload.force;
2443
2444 repository_handle
2445 .update(&mut cx, |repository_handle, _| {
2446 repository_handle.remove_worktree(path, force)
2447 })
2448 .await??;
2449
2450 Ok(proto::Ack {})
2451 }
2452
2453 async fn handle_rename_worktree(
2454 this: Entity<Self>,
2455 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2456 mut cx: AsyncApp,
2457 ) -> Result<proto::Ack> {
2458 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2459 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2460 let old_path = PathBuf::from(envelope.payload.old_path);
2461 let new_path = PathBuf::from(envelope.payload.new_path);
2462
2463 repository_handle
2464 .update(&mut cx, |repository_handle, _| {
2465 repository_handle.rename_worktree(old_path, new_path)
2466 })
2467 .await??;
2468
2469 Ok(proto::Ack {})
2470 }
2471
2472 async fn handle_get_head_sha(
2473 this: Entity<Self>,
2474 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2475 mut cx: AsyncApp,
2476 ) -> Result<proto::GitGetHeadShaResponse> {
2477 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2478 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2479
2480 let head_sha = repository_handle
2481 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2482 .await??;
2483
2484 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2485 }
2486
2487 async fn handle_get_branches(
2488 this: Entity<Self>,
2489 envelope: TypedEnvelope<proto::GitGetBranches>,
2490 mut cx: AsyncApp,
2491 ) -> Result<proto::GitBranchesResponse> {
2492 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2493 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2494
2495 let branches = repository_handle
2496 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2497 .await??;
2498
2499 Ok(proto::GitBranchesResponse {
2500 branches: branches
2501 .into_iter()
2502 .map(|branch| branch_to_proto(&branch))
2503 .collect::<Vec<_>>(),
2504 })
2505 }
2506 async fn handle_get_default_branch(
2507 this: Entity<Self>,
2508 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2509 mut cx: AsyncApp,
2510 ) -> Result<proto::GetDefaultBranchResponse> {
2511 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2512 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2513
2514 let branch = repository_handle
2515 .update(&mut cx, |repository_handle, _| {
2516 repository_handle.default_branch(false)
2517 })
2518 .await??
2519 .map(Into::into);
2520
2521 Ok(proto::GetDefaultBranchResponse { branch })
2522 }
2523 async fn handle_create_branch(
2524 this: Entity<Self>,
2525 envelope: TypedEnvelope<proto::GitCreateBranch>,
2526 mut cx: AsyncApp,
2527 ) -> Result<proto::Ack> {
2528 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2529 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2530 let branch_name = envelope.payload.branch_name;
2531
2532 repository_handle
2533 .update(&mut cx, |repository_handle, _| {
2534 repository_handle.create_branch(branch_name, None)
2535 })
2536 .await??;
2537
2538 Ok(proto::Ack {})
2539 }
2540
2541 async fn handle_change_branch(
2542 this: Entity<Self>,
2543 envelope: TypedEnvelope<proto::GitChangeBranch>,
2544 mut cx: AsyncApp,
2545 ) -> Result<proto::Ack> {
2546 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2547 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2548 let branch_name = envelope.payload.branch_name;
2549
2550 repository_handle
2551 .update(&mut cx, |repository_handle, _| {
2552 repository_handle.change_branch(branch_name)
2553 })
2554 .await??;
2555
2556 Ok(proto::Ack {})
2557 }
2558
2559 async fn handle_rename_branch(
2560 this: Entity<Self>,
2561 envelope: TypedEnvelope<proto::GitRenameBranch>,
2562 mut cx: AsyncApp,
2563 ) -> Result<proto::Ack> {
2564 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2565 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2566 let branch = envelope.payload.branch;
2567 let new_name = envelope.payload.new_name;
2568
2569 repository_handle
2570 .update(&mut cx, |repository_handle, _| {
2571 repository_handle.rename_branch(branch, new_name)
2572 })
2573 .await??;
2574
2575 Ok(proto::Ack {})
2576 }
2577
2578 async fn handle_create_remote(
2579 this: Entity<Self>,
2580 envelope: TypedEnvelope<proto::GitCreateRemote>,
2581 mut cx: AsyncApp,
2582 ) -> Result<proto::Ack> {
2583 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2584 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2585 let remote_name = envelope.payload.remote_name;
2586 let remote_url = envelope.payload.remote_url;
2587
2588 repository_handle
2589 .update(&mut cx, |repository_handle, _| {
2590 repository_handle.create_remote(remote_name, remote_url)
2591 })
2592 .await??;
2593
2594 Ok(proto::Ack {})
2595 }
2596
2597 async fn handle_delete_branch(
2598 this: Entity<Self>,
2599 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2600 mut cx: AsyncApp,
2601 ) -> Result<proto::Ack> {
2602 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2603 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2604 let is_remote = envelope.payload.is_remote;
2605 let branch_name = envelope.payload.branch_name;
2606
2607 repository_handle
2608 .update(&mut cx, |repository_handle, _| {
2609 repository_handle.delete_branch(is_remote, branch_name)
2610 })
2611 .await??;
2612
2613 Ok(proto::Ack {})
2614 }
2615
2616 async fn handle_remove_remote(
2617 this: Entity<Self>,
2618 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2619 mut cx: AsyncApp,
2620 ) -> Result<proto::Ack> {
2621 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2622 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2623 let remote_name = envelope.payload.remote_name;
2624
2625 repository_handle
2626 .update(&mut cx, |repository_handle, _| {
2627 repository_handle.remove_remote(remote_name)
2628 })
2629 .await??;
2630
2631 Ok(proto::Ack {})
2632 }
2633
2634 async fn handle_show(
2635 this: Entity<Self>,
2636 envelope: TypedEnvelope<proto::GitShow>,
2637 mut cx: AsyncApp,
2638 ) -> Result<proto::GitCommitDetails> {
2639 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2640 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2641
2642 let commit = repository_handle
2643 .update(&mut cx, |repository_handle, _| {
2644 repository_handle.show(envelope.payload.commit)
2645 })
2646 .await??;
2647 Ok(proto::GitCommitDetails {
2648 sha: commit.sha.into(),
2649 message: commit.message.into(),
2650 commit_timestamp: commit.commit_timestamp,
2651 author_email: commit.author_email.into(),
2652 author_name: commit.author_name.into(),
2653 })
2654 }
2655
2656 async fn handle_create_checkpoint(
2657 this: Entity<Self>,
2658 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2659 mut cx: AsyncApp,
2660 ) -> Result<proto::GitCreateCheckpointResponse> {
2661 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2662 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2663
2664 let checkpoint = repository_handle
2665 .update(&mut cx, |repository, _| repository.checkpoint())
2666 .await??;
2667
2668 Ok(proto::GitCreateCheckpointResponse {
2669 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2670 })
2671 }
2672
2673 async fn handle_restore_checkpoint(
2674 this: Entity<Self>,
2675 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2676 mut cx: AsyncApp,
2677 ) -> Result<proto::Ack> {
2678 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2679 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2680
2681 let checkpoint = GitRepositoryCheckpoint {
2682 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2683 };
2684
2685 repository_handle
2686 .update(&mut cx, |repository, _| {
2687 repository.restore_checkpoint(checkpoint)
2688 })
2689 .await??;
2690
2691 Ok(proto::Ack {})
2692 }
2693
2694 async fn handle_compare_checkpoints(
2695 this: Entity<Self>,
2696 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2697 mut cx: AsyncApp,
2698 ) -> Result<proto::GitCompareCheckpointsResponse> {
2699 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2700 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2701
2702 let left = GitRepositoryCheckpoint {
2703 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2704 };
2705 let right = GitRepositoryCheckpoint {
2706 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2707 };
2708
2709 let equal = repository_handle
2710 .update(&mut cx, |repository, _| {
2711 repository.compare_checkpoints(left, right)
2712 })
2713 .await??;
2714
2715 Ok(proto::GitCompareCheckpointsResponse { equal })
2716 }
2717
2718 async fn handle_diff_checkpoints(
2719 this: Entity<Self>,
2720 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2721 mut cx: AsyncApp,
2722 ) -> Result<proto::GitDiffCheckpointsResponse> {
2723 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2724 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2725
2726 let base = GitRepositoryCheckpoint {
2727 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2728 };
2729 let target = GitRepositoryCheckpoint {
2730 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2731 };
2732
2733 let diff = repository_handle
2734 .update(&mut cx, |repository, _| {
2735 repository.diff_checkpoints(base, target)
2736 })
2737 .await??;
2738
2739 Ok(proto::GitDiffCheckpointsResponse { diff })
2740 }
2741
2742 async fn handle_load_commit_diff(
2743 this: Entity<Self>,
2744 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2745 mut cx: AsyncApp,
2746 ) -> Result<proto::LoadCommitDiffResponse> {
2747 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2748 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2749
2750 let commit_diff = repository_handle
2751 .update(&mut cx, |repository_handle, _| {
2752 repository_handle.load_commit_diff(envelope.payload.commit)
2753 })
2754 .await??;
2755 Ok(proto::LoadCommitDiffResponse {
2756 files: commit_diff
2757 .files
2758 .into_iter()
2759 .map(|file| proto::CommitFile {
2760 path: file.path.to_proto(),
2761 old_text: file.old_text,
2762 new_text: file.new_text,
2763 is_binary: file.is_binary,
2764 })
2765 .collect(),
2766 })
2767 }
2768
2769 async fn handle_file_history(
2770 this: Entity<Self>,
2771 envelope: TypedEnvelope<proto::GitFileHistory>,
2772 mut cx: AsyncApp,
2773 ) -> Result<proto::GitFileHistoryResponse> {
2774 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2775 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2776 let path = RepoPath::from_proto(&envelope.payload.path)?;
2777 let skip = envelope.payload.skip as usize;
2778 let limit = envelope.payload.limit.map(|l| l as usize);
2779
2780 let file_history = repository_handle
2781 .update(&mut cx, |repository_handle, _| {
2782 repository_handle.file_history_paginated(path, skip, limit)
2783 })
2784 .await??;
2785
2786 Ok(proto::GitFileHistoryResponse {
2787 entries: file_history
2788 .entries
2789 .into_iter()
2790 .map(|entry| proto::FileHistoryEntry {
2791 sha: entry.sha.to_string(),
2792 subject: entry.subject.to_string(),
2793 message: entry.message.to_string(),
2794 commit_timestamp: entry.commit_timestamp,
2795 author_name: entry.author_name.to_string(),
2796 author_email: entry.author_email.to_string(),
2797 })
2798 .collect(),
2799 path: file_history.path.to_proto(),
2800 })
2801 }
2802
2803 async fn handle_reset(
2804 this: Entity<Self>,
2805 envelope: TypedEnvelope<proto::GitReset>,
2806 mut cx: AsyncApp,
2807 ) -> Result<proto::Ack> {
2808 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2809 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2810
2811 let mode = match envelope.payload.mode() {
2812 git_reset::ResetMode::Soft => ResetMode::Soft,
2813 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2814 };
2815
2816 repository_handle
2817 .update(&mut cx, |repository_handle, cx| {
2818 repository_handle.reset(envelope.payload.commit, mode, cx)
2819 })
2820 .await??;
2821 Ok(proto::Ack {})
2822 }
2823
2824 async fn handle_checkout_files(
2825 this: Entity<Self>,
2826 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2827 mut cx: AsyncApp,
2828 ) -> Result<proto::Ack> {
2829 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2830 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2831 let paths = envelope
2832 .payload
2833 .paths
2834 .iter()
2835 .map(|s| RepoPath::from_proto(s))
2836 .collect::<Result<Vec<_>>>()?;
2837
2838 repository_handle
2839 .update(&mut cx, |repository_handle, cx| {
2840 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2841 })
2842 .await?;
2843 Ok(proto::Ack {})
2844 }
2845
2846 async fn handle_open_commit_message_buffer(
2847 this: Entity<Self>,
2848 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2849 mut cx: AsyncApp,
2850 ) -> Result<proto::OpenBufferResponse> {
2851 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2852 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2853 let buffer = repository
2854 .update(&mut cx, |repository, cx| {
2855 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2856 })
2857 .await?;
2858
2859 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2860 this.update(&mut cx, |this, cx| {
2861 this.buffer_store.update(cx, |buffer_store, cx| {
2862 buffer_store
2863 .create_buffer_for_peer(
2864 &buffer,
2865 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2866 cx,
2867 )
2868 .detach_and_log_err(cx);
2869 })
2870 });
2871
2872 Ok(proto::OpenBufferResponse {
2873 buffer_id: buffer_id.to_proto(),
2874 })
2875 }
2876
2877 async fn handle_askpass(
2878 this: Entity<Self>,
2879 envelope: TypedEnvelope<proto::AskPassRequest>,
2880 mut cx: AsyncApp,
2881 ) -> Result<proto::AskPassResponse> {
2882 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2883 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2884
2885 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2886 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2887 debug_panic!("no askpass found");
2888 anyhow::bail!("no askpass found");
2889 };
2890
2891 let response = askpass
2892 .ask_password(envelope.payload.prompt)
2893 .await
2894 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2895
2896 delegates
2897 .lock()
2898 .insert(envelope.payload.askpass_id, askpass);
2899
2900 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2901 Ok(proto::AskPassResponse {
2902 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2903 })
2904 }
2905
2906 async fn handle_check_for_pushed_commits(
2907 this: Entity<Self>,
2908 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2909 mut cx: AsyncApp,
2910 ) -> Result<proto::CheckForPushedCommitsResponse> {
2911 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2912 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2913
2914 let branches = repository_handle
2915 .update(&mut cx, |repository_handle, _| {
2916 repository_handle.check_for_pushed_commits()
2917 })
2918 .await??;
2919 Ok(proto::CheckForPushedCommitsResponse {
2920 pushed_to: branches
2921 .into_iter()
2922 .map(|commit| commit.to_string())
2923 .collect(),
2924 })
2925 }
2926
2927 async fn handle_git_diff(
2928 this: Entity<Self>,
2929 envelope: TypedEnvelope<proto::GitDiff>,
2930 mut cx: AsyncApp,
2931 ) -> Result<proto::GitDiffResponse> {
2932 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2933 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2934 let diff_type = match envelope.payload.diff_type() {
2935 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2936 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2937 proto::git_diff::DiffType::MergeBase => {
2938 let base_ref = envelope
2939 .payload
2940 .merge_base_ref
2941 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2942 DiffType::MergeBase {
2943 base_ref: base_ref.into(),
2944 }
2945 }
2946 };
2947
2948 let mut diff = repository_handle
2949 .update(&mut cx, |repository_handle, cx| {
2950 repository_handle.diff(diff_type, cx)
2951 })
2952 .await??;
2953 const ONE_MB: usize = 1_000_000;
2954 if diff.len() > ONE_MB {
2955 diff = diff.chars().take(ONE_MB).collect()
2956 }
2957
2958 Ok(proto::GitDiffResponse { diff })
2959 }
2960
2961 async fn handle_tree_diff(
2962 this: Entity<Self>,
2963 request: TypedEnvelope<proto::GetTreeDiff>,
2964 mut cx: AsyncApp,
2965 ) -> Result<proto::GetTreeDiffResponse> {
2966 let repository_id = RepositoryId(request.payload.repository_id);
2967 let diff_type = if request.payload.is_merge {
2968 DiffTreeType::MergeBase {
2969 base: request.payload.base.into(),
2970 head: request.payload.head.into(),
2971 }
2972 } else {
2973 DiffTreeType::Since {
2974 base: request.payload.base.into(),
2975 head: request.payload.head.into(),
2976 }
2977 };
2978
2979 let diff = this
2980 .update(&mut cx, |this, cx| {
2981 let repository = this.repositories().get(&repository_id)?;
2982 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2983 })
2984 .context("missing repository")?
2985 .await??;
2986
2987 Ok(proto::GetTreeDiffResponse {
2988 entries: diff
2989 .entries
2990 .into_iter()
2991 .map(|(path, status)| proto::TreeDiffStatus {
2992 path: path.as_ref().to_proto(),
2993 status: match status {
2994 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2995 TreeDiffStatus::Modified { .. } => {
2996 proto::tree_diff_status::Status::Modified.into()
2997 }
2998 TreeDiffStatus::Deleted { .. } => {
2999 proto::tree_diff_status::Status::Deleted.into()
3000 }
3001 },
3002 oid: match status {
3003 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3004 Some(old.to_string())
3005 }
3006 TreeDiffStatus::Added => None,
3007 },
3008 })
3009 .collect(),
3010 })
3011 }
3012
3013 async fn handle_get_blob_content(
3014 this: Entity<Self>,
3015 request: TypedEnvelope<proto::GetBlobContent>,
3016 mut cx: AsyncApp,
3017 ) -> Result<proto::GetBlobContentResponse> {
3018 let oid = git::Oid::from_str(&request.payload.oid)?;
3019 let repository_id = RepositoryId(request.payload.repository_id);
3020 let content = this
3021 .update(&mut cx, |this, cx| {
3022 let repository = this.repositories().get(&repository_id)?;
3023 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3024 })
3025 .context("missing repository")?
3026 .await?;
3027 Ok(proto::GetBlobContentResponse { content })
3028 }
3029
3030 async fn handle_open_unstaged_diff(
3031 this: Entity<Self>,
3032 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3033 mut cx: AsyncApp,
3034 ) -> Result<proto::OpenUnstagedDiffResponse> {
3035 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3036 let diff = this
3037 .update(&mut cx, |this, cx| {
3038 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3039 Some(this.open_unstaged_diff(buffer, cx))
3040 })
3041 .context("missing buffer")?
3042 .await?;
3043 this.update(&mut cx, |this, _| {
3044 let shared_diffs = this
3045 .shared_diffs
3046 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3047 .or_default();
3048 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3049 });
3050 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3051 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3052 }
3053
3054 async fn handle_open_uncommitted_diff(
3055 this: Entity<Self>,
3056 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3057 mut cx: AsyncApp,
3058 ) -> Result<proto::OpenUncommittedDiffResponse> {
3059 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3060 let diff = this
3061 .update(&mut cx, |this, cx| {
3062 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3063 Some(this.open_uncommitted_diff(buffer, cx))
3064 })
3065 .context("missing buffer")?
3066 .await?;
3067 this.update(&mut cx, |this, _| {
3068 let shared_diffs = this
3069 .shared_diffs
3070 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3071 .or_default();
3072 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3073 });
3074 Ok(diff.read_with(&cx, |diff, cx| {
3075 use proto::open_uncommitted_diff_response::Mode;
3076
3077 let unstaged_diff = diff.secondary_diff();
3078 let index_snapshot = unstaged_diff.and_then(|diff| {
3079 let diff = diff.read(cx);
3080 diff.base_text_exists().then(|| diff.base_text(cx))
3081 });
3082
3083 let mode;
3084 let staged_text;
3085 let committed_text;
3086 if diff.base_text_exists() {
3087 let committed_snapshot = diff.base_text(cx);
3088 committed_text = Some(committed_snapshot.text());
3089 if let Some(index_text) = index_snapshot {
3090 if index_text.remote_id() == committed_snapshot.remote_id() {
3091 mode = Mode::IndexMatchesHead;
3092 staged_text = None;
3093 } else {
3094 mode = Mode::IndexAndHead;
3095 staged_text = Some(index_text.text());
3096 }
3097 } else {
3098 mode = Mode::IndexAndHead;
3099 staged_text = None;
3100 }
3101 } else {
3102 mode = Mode::IndexAndHead;
3103 committed_text = None;
3104 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3105 }
3106
3107 proto::OpenUncommittedDiffResponse {
3108 committed_text,
3109 staged_text,
3110 mode: mode.into(),
3111 }
3112 }))
3113 }
3114
3115 async fn handle_update_diff_bases(
3116 this: Entity<Self>,
3117 request: TypedEnvelope<proto::UpdateDiffBases>,
3118 mut cx: AsyncApp,
3119 ) -> Result<()> {
3120 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3121 this.update(&mut cx, |this, cx| {
3122 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3123 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3124 {
3125 let buffer = buffer.read(cx).text_snapshot();
3126 diff_state.update(cx, |diff_state, cx| {
3127 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3128 })
3129 }
3130 });
3131 Ok(())
3132 }
3133
3134 async fn handle_blame_buffer(
3135 this: Entity<Self>,
3136 envelope: TypedEnvelope<proto::BlameBuffer>,
3137 mut cx: AsyncApp,
3138 ) -> Result<proto::BlameBufferResponse> {
3139 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3140 let version = deserialize_version(&envelope.payload.version);
3141 let buffer = this.read_with(&cx, |this, cx| {
3142 this.buffer_store.read(cx).get_existing(buffer_id)
3143 })?;
3144 buffer
3145 .update(&mut cx, |buffer, _| {
3146 buffer.wait_for_version(version.clone())
3147 })
3148 .await?;
3149 let blame = this
3150 .update(&mut cx, |this, cx| {
3151 this.blame_buffer(&buffer, Some(version), cx)
3152 })
3153 .await?;
3154 Ok(serialize_blame_buffer_response(blame))
3155 }
3156
3157 async fn handle_get_permalink_to_line(
3158 this: Entity<Self>,
3159 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3160 mut cx: AsyncApp,
3161 ) -> Result<proto::GetPermalinkToLineResponse> {
3162 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3163 // let version = deserialize_version(&envelope.payload.version);
3164 let selection = {
3165 let proto_selection = envelope
3166 .payload
3167 .selection
3168 .context("no selection to get permalink for defined")?;
3169 proto_selection.start as u32..proto_selection.end as u32
3170 };
3171 let buffer = this.read_with(&cx, |this, cx| {
3172 this.buffer_store.read(cx).get_existing(buffer_id)
3173 })?;
3174 let permalink = this
3175 .update(&mut cx, |this, cx| {
3176 this.get_permalink_to_line(&buffer, selection, cx)
3177 })
3178 .await?;
3179 Ok(proto::GetPermalinkToLineResponse {
3180 permalink: permalink.to_string(),
3181 })
3182 }
3183
3184 fn repository_for_request(
3185 this: &Entity<Self>,
3186 id: RepositoryId,
3187 cx: &mut AsyncApp,
3188 ) -> Result<Entity<Repository>> {
3189 this.read_with(cx, |this, _| {
3190 this.repositories
3191 .get(&id)
3192 .context("missing repository handle")
3193 .cloned()
3194 })
3195 }
3196
3197 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3198 self.repositories
3199 .iter()
3200 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3201 .collect()
3202 }
3203
3204 fn process_updated_entries(
3205 &self,
3206 worktree: &Entity<Worktree>,
3207 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3208 cx: &mut App,
3209 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3210 let path_style = worktree.read(cx).path_style();
3211 let mut repo_paths = self
3212 .repositories
3213 .values()
3214 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3215 .collect::<Vec<_>>();
3216 let mut entries: Vec<_> = updated_entries
3217 .iter()
3218 .map(|(path, _, _)| path.clone())
3219 .collect();
3220 entries.sort();
3221 let worktree = worktree.read(cx);
3222
3223 let entries = entries
3224 .into_iter()
3225 .map(|path| worktree.absolutize(&path))
3226 .collect::<Arc<[_]>>();
3227
3228 let executor = cx.background_executor().clone();
3229 cx.background_executor().spawn(async move {
3230 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3231 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3232 let mut tasks = FuturesOrdered::new();
3233 for (repo_path, repo) in repo_paths.into_iter().rev() {
3234 let entries = entries.clone();
3235 let task = executor.spawn(async move {
3236 // Find all repository paths that belong to this repo
3237 let mut ix = entries.partition_point(|path| path < &*repo_path);
3238 if ix == entries.len() {
3239 return None;
3240 };
3241
3242 let mut paths = Vec::new();
3243 // All paths prefixed by a given repo will constitute a continuous range.
3244 while let Some(path) = entries.get(ix)
3245 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3246 &repo_path, path, path_style,
3247 )
3248 {
3249 paths.push((repo_path, ix));
3250 ix += 1;
3251 }
3252 if paths.is_empty() {
3253 None
3254 } else {
3255 Some((repo, paths))
3256 }
3257 });
3258 tasks.push_back(task);
3259 }
3260
3261 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3262 let mut path_was_used = vec![false; entries.len()];
3263 let tasks = tasks.collect::<Vec<_>>().await;
3264 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3265 // We always want to assign a path to it's innermost repository.
3266 for t in tasks {
3267 let Some((repo, paths)) = t else {
3268 continue;
3269 };
3270 let entry = paths_by_git_repo.entry(repo).or_default();
3271 for (repo_path, ix) in paths {
3272 if path_was_used[ix] {
3273 continue;
3274 }
3275 path_was_used[ix] = true;
3276 entry.push(repo_path);
3277 }
3278 }
3279
3280 paths_by_git_repo
3281 })
3282 }
3283}
3284
3285impl BufferGitState {
3286 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3287 Self {
3288 unstaged_diff: Default::default(),
3289 uncommitted_diff: Default::default(),
3290 oid_diffs: Default::default(),
3291 recalculate_diff_task: Default::default(),
3292 language: Default::default(),
3293 language_registry: Default::default(),
3294 recalculating_tx: postage::watch::channel_with(false).0,
3295 hunk_staging_operation_count: 0,
3296 hunk_staging_operation_count_as_of_write: 0,
3297 head_text: Default::default(),
3298 index_text: Default::default(),
3299 oid_texts: Default::default(),
3300 head_changed: Default::default(),
3301 index_changed: Default::default(),
3302 language_changed: Default::default(),
3303 conflict_updated_futures: Default::default(),
3304 conflict_set: Default::default(),
3305 reparse_conflict_markers_task: Default::default(),
3306 }
3307 }
3308
3309 #[ztracing::instrument(skip_all)]
3310 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3311 self.language = buffer.read(cx).language().cloned();
3312 self.language_changed = true;
3313 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3314 }
3315
3316 fn reparse_conflict_markers(
3317 &mut self,
3318 buffer: text::BufferSnapshot,
3319 cx: &mut Context<Self>,
3320 ) -> oneshot::Receiver<()> {
3321 let (tx, rx) = oneshot::channel();
3322
3323 let Some(conflict_set) = self
3324 .conflict_set
3325 .as_ref()
3326 .and_then(|conflict_set| conflict_set.upgrade())
3327 else {
3328 return rx;
3329 };
3330
3331 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3332 if conflict_set.has_conflict {
3333 Some(conflict_set.snapshot())
3334 } else {
3335 None
3336 }
3337 });
3338
3339 if let Some(old_snapshot) = old_snapshot {
3340 self.conflict_updated_futures.push(tx);
3341 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3342 let (snapshot, changed_range) = cx
3343 .background_spawn(async move {
3344 let new_snapshot = ConflictSet::parse(&buffer);
3345 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3346 (new_snapshot, changed_range)
3347 })
3348 .await;
3349 this.update(cx, |this, cx| {
3350 if let Some(conflict_set) = &this.conflict_set {
3351 conflict_set
3352 .update(cx, |conflict_set, cx| {
3353 conflict_set.set_snapshot(snapshot, changed_range, cx);
3354 })
3355 .ok();
3356 }
3357 let futures = std::mem::take(&mut this.conflict_updated_futures);
3358 for tx in futures {
3359 tx.send(()).ok();
3360 }
3361 })
3362 }))
3363 }
3364
3365 rx
3366 }
3367
3368 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3369 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3370 }
3371
3372 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3373 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3374 }
3375
3376 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3377 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3378 }
3379
3380 fn handle_base_texts_updated(
3381 &mut self,
3382 buffer: text::BufferSnapshot,
3383 message: proto::UpdateDiffBases,
3384 cx: &mut Context<Self>,
3385 ) {
3386 use proto::update_diff_bases::Mode;
3387
3388 let Some(mode) = Mode::from_i32(message.mode) else {
3389 return;
3390 };
3391
3392 let diff_bases_change = match mode {
3393 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3394 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3395 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3396 Mode::IndexAndHead => DiffBasesChange::SetEach {
3397 index: message.staged_text,
3398 head: message.committed_text,
3399 },
3400 };
3401
3402 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3403 }
3404
3405 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3406 if *self.recalculating_tx.borrow() {
3407 let mut rx = self.recalculating_tx.subscribe();
3408 Some(async move {
3409 loop {
3410 let is_recalculating = rx.recv().await;
3411 if is_recalculating != Some(true) {
3412 break;
3413 }
3414 }
3415 })
3416 } else {
3417 None
3418 }
3419 }
3420
3421 fn diff_bases_changed(
3422 &mut self,
3423 buffer: text::BufferSnapshot,
3424 diff_bases_change: Option<DiffBasesChange>,
3425 cx: &mut Context<Self>,
3426 ) {
3427 match diff_bases_change {
3428 Some(DiffBasesChange::SetIndex(index)) => {
3429 self.index_text = index.map(|mut index| {
3430 text::LineEnding::normalize(&mut index);
3431 Arc::from(index.as_str())
3432 });
3433 self.index_changed = true;
3434 }
3435 Some(DiffBasesChange::SetHead(head)) => {
3436 self.head_text = head.map(|mut head| {
3437 text::LineEnding::normalize(&mut head);
3438 Arc::from(head.as_str())
3439 });
3440 self.head_changed = true;
3441 }
3442 Some(DiffBasesChange::SetBoth(text)) => {
3443 let text = text.map(|mut text| {
3444 text::LineEnding::normalize(&mut text);
3445 Arc::from(text.as_str())
3446 });
3447 self.head_text = text.clone();
3448 self.index_text = text;
3449 self.head_changed = true;
3450 self.index_changed = true;
3451 }
3452 Some(DiffBasesChange::SetEach { index, head }) => {
3453 self.index_text = index.map(|mut index| {
3454 text::LineEnding::normalize(&mut index);
3455 Arc::from(index.as_str())
3456 });
3457 self.index_changed = true;
3458 self.head_text = head.map(|mut head| {
3459 text::LineEnding::normalize(&mut head);
3460 Arc::from(head.as_str())
3461 });
3462 self.head_changed = true;
3463 }
3464 None => {}
3465 }
3466
3467 self.recalculate_diffs(buffer, cx)
3468 }
3469
3470 #[ztracing::instrument(skip_all)]
3471 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3472 *self.recalculating_tx.borrow_mut() = true;
3473
3474 let language = self.language.clone();
3475 let language_registry = self.language_registry.clone();
3476 let unstaged_diff = self.unstaged_diff();
3477 let uncommitted_diff = self.uncommitted_diff();
3478 let head = self.head_text.clone();
3479 let index = self.index_text.clone();
3480 let index_changed = self.index_changed;
3481 let head_changed = self.head_changed;
3482 let language_changed = self.language_changed;
3483 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3484 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3485 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3486 (None, None) => true,
3487 _ => false,
3488 };
3489
3490 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3491 .oid_diffs
3492 .iter()
3493 .filter_map(|(oid, weak)| {
3494 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3495 weak.upgrade().map(|diff| (*oid, diff, base_text))
3496 })
3497 .collect();
3498
3499 self.oid_diffs.retain(|oid, weak| {
3500 let alive = weak.upgrade().is_some();
3501 if !alive {
3502 if let Some(oid) = oid {
3503 self.oid_texts.remove(oid);
3504 }
3505 }
3506 alive
3507 });
3508 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3509 log::debug!(
3510 "start recalculating diffs for buffer {}",
3511 buffer.remote_id()
3512 );
3513
3514 let mut new_unstaged_diff = None;
3515 if let Some(unstaged_diff) = &unstaged_diff {
3516 new_unstaged_diff = Some(
3517 cx.update(|cx| {
3518 unstaged_diff.read(cx).update_diff(
3519 buffer.clone(),
3520 index,
3521 index_changed.then_some(false),
3522 language.clone(),
3523 cx,
3524 )
3525 })
3526 .await,
3527 );
3528 }
3529
3530 // Dropping BufferDiff can be expensive, so yield back to the event loop
3531 // for a bit
3532 yield_now().await;
3533
3534 let mut new_uncommitted_diff = None;
3535 if let Some(uncommitted_diff) = &uncommitted_diff {
3536 new_uncommitted_diff = if index_matches_head {
3537 new_unstaged_diff.clone()
3538 } else {
3539 Some(
3540 cx.update(|cx| {
3541 uncommitted_diff.read(cx).update_diff(
3542 buffer.clone(),
3543 head,
3544 head_changed.then_some(true),
3545 language.clone(),
3546 cx,
3547 )
3548 })
3549 .await,
3550 )
3551 }
3552 }
3553
3554 // Dropping BufferDiff can be expensive, so yield back to the event loop
3555 // for a bit
3556 yield_now().await;
3557
3558 let cancel = this.update(cx, |this, _| {
3559 // This checks whether all pending stage/unstage operations
3560 // have quiesced (i.e. both the corresponding write and the
3561 // read of that write have completed). If not, then we cancel
3562 // this recalculation attempt to avoid invalidating pending
3563 // state too quickly; another recalculation will come along
3564 // later and clear the pending state once the state of the index has settled.
3565 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3566 *this.recalculating_tx.borrow_mut() = false;
3567 true
3568 } else {
3569 false
3570 }
3571 })?;
3572 if cancel {
3573 log::debug!(
3574 concat!(
3575 "aborting recalculating diffs for buffer {}",
3576 "due to subsequent hunk operations",
3577 ),
3578 buffer.remote_id()
3579 );
3580 return Ok(());
3581 }
3582
3583 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3584 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3585 {
3586 let task = unstaged_diff.update(cx, |diff, cx| {
3587 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3588 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3589 // view multibuffers.
3590 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3591 });
3592 Some(task.await)
3593 } else {
3594 None
3595 };
3596
3597 yield_now().await;
3598
3599 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3600 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3601 {
3602 uncommitted_diff
3603 .update(cx, |diff, cx| {
3604 if language_changed {
3605 diff.language_changed(language.clone(), language_registry, cx);
3606 }
3607 diff.set_snapshot_with_secondary(
3608 new_uncommitted_diff,
3609 &buffer,
3610 unstaged_changed_range.flatten(),
3611 true,
3612 cx,
3613 )
3614 })
3615 .await;
3616 }
3617
3618 yield_now().await;
3619
3620 for (oid, oid_diff, base_text) in oid_diffs {
3621 let new_oid_diff = cx
3622 .update(|cx| {
3623 oid_diff.read(cx).update_diff(
3624 buffer.clone(),
3625 base_text,
3626 None,
3627 language.clone(),
3628 cx,
3629 )
3630 })
3631 .await;
3632
3633 oid_diff
3634 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3635 .await;
3636
3637 log::debug!(
3638 "finished recalculating oid diff for buffer {} oid {:?}",
3639 buffer.remote_id(),
3640 oid
3641 );
3642
3643 yield_now().await;
3644 }
3645
3646 log::debug!(
3647 "finished recalculating diffs for buffer {}",
3648 buffer.remote_id()
3649 );
3650
3651 if let Some(this) = this.upgrade() {
3652 this.update(cx, |this, _| {
3653 this.index_changed = false;
3654 this.head_changed = false;
3655 this.language_changed = false;
3656 *this.recalculating_tx.borrow_mut() = false;
3657 });
3658 }
3659
3660 Ok(())
3661 }));
3662 }
3663}
3664
3665fn make_remote_delegate(
3666 this: Entity<GitStore>,
3667 project_id: u64,
3668 repository_id: RepositoryId,
3669 askpass_id: u64,
3670 cx: &mut AsyncApp,
3671) -> AskPassDelegate {
3672 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3673 this.update(cx, |this, cx| {
3674 let Some((client, _)) = this.downstream_client() else {
3675 return;
3676 };
3677 let response = client.request(proto::AskPassRequest {
3678 project_id,
3679 repository_id: repository_id.to_proto(),
3680 askpass_id,
3681 prompt,
3682 });
3683 cx.spawn(async move |_, _| {
3684 let mut response = response.await?.response;
3685 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3686 .ok();
3687 response.zeroize();
3688 anyhow::Ok(())
3689 })
3690 .detach_and_log_err(cx);
3691 });
3692 })
3693}
3694
3695impl RepositoryId {
3696 pub fn to_proto(self) -> u64 {
3697 self.0
3698 }
3699
3700 pub fn from_proto(id: u64) -> Self {
3701 RepositoryId(id)
3702 }
3703}
3704
3705impl RepositorySnapshot {
3706 fn empty(
3707 id: RepositoryId,
3708 work_directory_abs_path: Arc<Path>,
3709 original_repo_abs_path: Option<Arc<Path>>,
3710 path_style: PathStyle,
3711 ) -> Self {
3712 Self {
3713 id,
3714 statuses_by_path: Default::default(),
3715 original_repo_abs_path: original_repo_abs_path
3716 .unwrap_or_else(|| work_directory_abs_path.clone()),
3717 work_directory_abs_path,
3718 branch: None,
3719 branch_list: Arc::from([]),
3720 head_commit: None,
3721 scan_id: 0,
3722 merge: Default::default(),
3723 remote_origin_url: None,
3724 remote_upstream_url: None,
3725 stash_entries: Default::default(),
3726 linked_worktrees: Arc::from([]),
3727 path_style,
3728 }
3729 }
3730
3731 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3732 proto::UpdateRepository {
3733 branch_summary: self.branch.as_ref().map(branch_to_proto),
3734 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3735 updated_statuses: self
3736 .statuses_by_path
3737 .iter()
3738 .map(|entry| entry.to_proto())
3739 .collect(),
3740 removed_statuses: Default::default(),
3741 current_merge_conflicts: self
3742 .merge
3743 .merge_heads_by_conflicted_path
3744 .iter()
3745 .map(|(repo_path, _)| repo_path.to_proto())
3746 .collect(),
3747 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3748 project_id,
3749 id: self.id.to_proto(),
3750 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3751 entry_ids: vec![self.id.to_proto()],
3752 scan_id: self.scan_id,
3753 is_last_update: true,
3754 stash_entries: self
3755 .stash_entries
3756 .entries
3757 .iter()
3758 .map(stash_to_proto)
3759 .collect(),
3760 remote_upstream_url: self.remote_upstream_url.clone(),
3761 remote_origin_url: self.remote_origin_url.clone(),
3762 original_repo_abs_path: Some(
3763 self.original_repo_abs_path.to_string_lossy().into_owned(),
3764 ),
3765 linked_worktrees: self
3766 .linked_worktrees
3767 .iter()
3768 .map(worktree_to_proto)
3769 .collect(),
3770 }
3771 }
3772
3773 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3774 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3775 let mut removed_statuses: Vec<String> = Vec::new();
3776
3777 let mut new_statuses = self.statuses_by_path.iter().peekable();
3778 let mut old_statuses = old.statuses_by_path.iter().peekable();
3779
3780 let mut current_new_entry = new_statuses.next();
3781 let mut current_old_entry = old_statuses.next();
3782 loop {
3783 match (current_new_entry, current_old_entry) {
3784 (Some(new_entry), Some(old_entry)) => {
3785 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3786 Ordering::Less => {
3787 updated_statuses.push(new_entry.to_proto());
3788 current_new_entry = new_statuses.next();
3789 }
3790 Ordering::Equal => {
3791 if new_entry.status != old_entry.status
3792 || new_entry.diff_stat != old_entry.diff_stat
3793 {
3794 updated_statuses.push(new_entry.to_proto());
3795 }
3796 current_old_entry = old_statuses.next();
3797 current_new_entry = new_statuses.next();
3798 }
3799 Ordering::Greater => {
3800 removed_statuses.push(old_entry.repo_path.to_proto());
3801 current_old_entry = old_statuses.next();
3802 }
3803 }
3804 }
3805 (None, Some(old_entry)) => {
3806 removed_statuses.push(old_entry.repo_path.to_proto());
3807 current_old_entry = old_statuses.next();
3808 }
3809 (Some(new_entry), None) => {
3810 updated_statuses.push(new_entry.to_proto());
3811 current_new_entry = new_statuses.next();
3812 }
3813 (None, None) => break,
3814 }
3815 }
3816
3817 proto::UpdateRepository {
3818 branch_summary: self.branch.as_ref().map(branch_to_proto),
3819 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3820 updated_statuses,
3821 removed_statuses,
3822 current_merge_conflicts: self
3823 .merge
3824 .merge_heads_by_conflicted_path
3825 .iter()
3826 .map(|(path, _)| path.to_proto())
3827 .collect(),
3828 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3829 project_id,
3830 id: self.id.to_proto(),
3831 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3832 entry_ids: vec![],
3833 scan_id: self.scan_id,
3834 is_last_update: true,
3835 stash_entries: self
3836 .stash_entries
3837 .entries
3838 .iter()
3839 .map(stash_to_proto)
3840 .collect(),
3841 remote_upstream_url: self.remote_upstream_url.clone(),
3842 remote_origin_url: self.remote_origin_url.clone(),
3843 original_repo_abs_path: Some(
3844 self.original_repo_abs_path.to_string_lossy().into_owned(),
3845 ),
3846 linked_worktrees: self
3847 .linked_worktrees
3848 .iter()
3849 .map(worktree_to_proto)
3850 .collect(),
3851 }
3852 }
3853
3854 /// The main worktree is the original checkout that other worktrees were
3855 /// created from.
3856 ///
3857 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3858 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3859 ///
3860 /// Submodules also return `true` here, since they are not linked worktrees.
3861 pub fn is_main_worktree(&self) -> bool {
3862 self.work_directory_abs_path == self.original_repo_abs_path
3863 }
3864
3865 /// Returns true if this repository is a linked worktree, that is, one that
3866 /// was created from another worktree.
3867 ///
3868 /// Returns `false` for both the main worktree and submodules.
3869 pub fn is_linked_worktree(&self) -> bool {
3870 !self.is_main_worktree()
3871 }
3872
3873 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3874 &self.linked_worktrees
3875 }
3876
3877 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3878 self.statuses_by_path.iter().cloned()
3879 }
3880
3881 pub fn status_summary(&self) -> GitSummary {
3882 self.statuses_by_path.summary().item_summary
3883 }
3884
3885 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3886 self.statuses_by_path
3887 .get(&PathKey(path.as_ref().clone()), ())
3888 .cloned()
3889 }
3890
3891 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3892 self.statuses_by_path
3893 .get(&PathKey(path.as_ref().clone()), ())
3894 .and_then(|entry| entry.diff_stat)
3895 }
3896
3897 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3898 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3899 }
3900
3901 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3902 self.path_style
3903 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3904 .unwrap()
3905 .into()
3906 }
3907
3908 #[inline]
3909 fn abs_path_to_repo_path_inner(
3910 work_directory_abs_path: &Path,
3911 abs_path: &Path,
3912 path_style: PathStyle,
3913 ) -> Option<RepoPath> {
3914 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3915 Some(RepoPath::from_rel_path(&rel_path))
3916 }
3917
3918 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3919 self.merge
3920 .merge_heads_by_conflicted_path
3921 .contains_key(repo_path)
3922 }
3923
3924 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3925 let had_conflict_on_last_merge_head_change = self
3926 .merge
3927 .merge_heads_by_conflicted_path
3928 .contains_key(repo_path);
3929 let has_conflict_currently = self
3930 .status_for_path(repo_path)
3931 .is_some_and(|entry| entry.status.is_conflicted());
3932 had_conflict_on_last_merge_head_change || has_conflict_currently
3933 }
3934
3935 /// This is the name that will be displayed in the repository selector for this repository.
3936 pub fn display_name(&self) -> SharedString {
3937 self.work_directory_abs_path
3938 .file_name()
3939 .unwrap_or_default()
3940 .to_string_lossy()
3941 .to_string()
3942 .into()
3943 }
3944}
3945
3946pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3947 proto::StashEntry {
3948 oid: entry.oid.as_bytes().to_vec(),
3949 message: entry.message.clone(),
3950 branch: entry.branch.clone(),
3951 index: entry.index as u64,
3952 timestamp: entry.timestamp,
3953 }
3954}
3955
3956pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3957 Ok(StashEntry {
3958 oid: Oid::from_bytes(&entry.oid)?,
3959 message: entry.message.clone(),
3960 index: entry.index as usize,
3961 branch: entry.branch.clone(),
3962 timestamp: entry.timestamp,
3963 })
3964}
3965
3966impl MergeDetails {
3967 async fn update(
3968 &mut self,
3969 backend: &Arc<dyn GitRepository>,
3970 current_conflicted_paths: Vec<RepoPath>,
3971 ) -> Result<bool> {
3972 log::debug!("load merge details");
3973 self.message = backend.merge_message().await.map(SharedString::from);
3974 let heads = backend
3975 .revparse_batch(vec![
3976 "MERGE_HEAD".into(),
3977 "CHERRY_PICK_HEAD".into(),
3978 "REBASE_HEAD".into(),
3979 "REVERT_HEAD".into(),
3980 "APPLY_HEAD".into(),
3981 ])
3982 .await
3983 .log_err()
3984 .unwrap_or_default()
3985 .into_iter()
3986 .map(|opt| opt.map(SharedString::from))
3987 .collect::<Vec<_>>();
3988
3989 let mut conflicts_changed = false;
3990
3991 // Record the merge state for newly conflicted paths
3992 for path in ¤t_conflicted_paths {
3993 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3994 conflicts_changed = true;
3995 self.merge_heads_by_conflicted_path
3996 .insert(path.clone(), heads.clone());
3997 }
3998 }
3999
4000 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4001 self.merge_heads_by_conflicted_path
4002 .retain(|path, old_merge_heads| {
4003 let keep = current_conflicted_paths.contains(path)
4004 || (old_merge_heads == &heads
4005 && old_merge_heads.iter().any(|head| head.is_some()));
4006 if !keep {
4007 conflicts_changed = true;
4008 }
4009 keep
4010 });
4011
4012 Ok(conflicts_changed)
4013 }
4014}
4015
4016impl Repository {
4017 pub fn is_trusted(&self) -> bool {
4018 match self.repository_state.peek() {
4019 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4020 _ => false,
4021 }
4022 }
4023
4024 pub fn snapshot(&self) -> RepositorySnapshot {
4025 self.snapshot.clone()
4026 }
4027
4028 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4029 self.pending_ops.iter().cloned()
4030 }
4031
4032 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4033 self.pending_ops.summary().clone()
4034 }
4035
4036 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4037 self.pending_ops
4038 .get(&PathKey(path.as_ref().clone()), ())
4039 .cloned()
4040 }
4041
4042 fn local(
4043 id: RepositoryId,
4044 work_directory_abs_path: Arc<Path>,
4045 original_repo_abs_path: Arc<Path>,
4046 dot_git_abs_path: Arc<Path>,
4047 project_environment: WeakEntity<ProjectEnvironment>,
4048 fs: Arc<dyn Fs>,
4049 is_trusted: bool,
4050 git_store: WeakEntity<GitStore>,
4051 cx: &mut Context<Self>,
4052 ) -> Self {
4053 let snapshot = RepositorySnapshot::empty(
4054 id,
4055 work_directory_abs_path.clone(),
4056 Some(original_repo_abs_path),
4057 PathStyle::local(),
4058 );
4059 let state = cx
4060 .spawn(async move |_, cx| {
4061 LocalRepositoryState::new(
4062 work_directory_abs_path,
4063 dot_git_abs_path,
4064 project_environment,
4065 fs,
4066 is_trusted,
4067 cx,
4068 )
4069 .await
4070 .map_err(|err| err.to_string())
4071 })
4072 .shared();
4073 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4074 let state = cx
4075 .spawn(async move |_, _| {
4076 let state = state.await?;
4077 Ok(RepositoryState::Local(state))
4078 })
4079 .shared();
4080
4081 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4082 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4083 if this.scan_id > 1 {
4084 this.initial_graph_data.clear();
4085 }
4086 }
4087 RepositoryEvent::StashEntriesChanged => {
4088 if this.scan_id > 1 {
4089 this.initial_graph_data
4090 .retain(|(log_source, _), _| *log_source != LogSource::All);
4091 }
4092 }
4093 _ => {}
4094 })
4095 .detach();
4096
4097 Repository {
4098 this: cx.weak_entity(),
4099 git_store,
4100 snapshot,
4101 pending_ops: Default::default(),
4102 repository_state: state,
4103 commit_message_buffer: None,
4104 askpass_delegates: Default::default(),
4105 paths_needing_status_update: Default::default(),
4106 latest_askpass_id: 0,
4107 job_sender,
4108 job_id: 0,
4109 active_jobs: Default::default(),
4110 initial_graph_data: Default::default(),
4111 commit_data: Default::default(),
4112 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4113 }
4114 }
4115
4116 fn remote(
4117 id: RepositoryId,
4118 work_directory_abs_path: Arc<Path>,
4119 original_repo_abs_path: Option<Arc<Path>>,
4120 path_style: PathStyle,
4121 project_id: ProjectId,
4122 client: AnyProtoClient,
4123 git_store: WeakEntity<GitStore>,
4124 cx: &mut Context<Self>,
4125 ) -> Self {
4126 let snapshot = RepositorySnapshot::empty(
4127 id,
4128 work_directory_abs_path,
4129 original_repo_abs_path,
4130 path_style,
4131 );
4132 let repository_state = RemoteRepositoryState { project_id, client };
4133 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4134 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4135 Self {
4136 this: cx.weak_entity(),
4137 snapshot,
4138 commit_message_buffer: None,
4139 git_store,
4140 pending_ops: Default::default(),
4141 paths_needing_status_update: Default::default(),
4142 job_sender,
4143 repository_state,
4144 askpass_delegates: Default::default(),
4145 latest_askpass_id: 0,
4146 active_jobs: Default::default(),
4147 job_id: 0,
4148 initial_graph_data: Default::default(),
4149 commit_data: Default::default(),
4150 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4151 }
4152 }
4153
4154 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4155 self.git_store.upgrade()
4156 }
4157
4158 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4159 let this = cx.weak_entity();
4160 let git_store = self.git_store.clone();
4161 let _ = self.send_keyed_job(
4162 Some(GitJobKey::ReloadBufferDiffBases),
4163 None,
4164 |state, mut cx| async move {
4165 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4166 log::error!("tried to recompute diffs for a non-local repository");
4167 return Ok(());
4168 };
4169
4170 let Some(this) = this.upgrade() else {
4171 return Ok(());
4172 };
4173
4174 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4175 git_store.update(cx, |git_store, cx| {
4176 git_store
4177 .diffs
4178 .iter()
4179 .filter_map(|(buffer_id, diff_state)| {
4180 let buffer_store = git_store.buffer_store.read(cx);
4181 let buffer = buffer_store.get(*buffer_id)?;
4182 let file = File::from_dyn(buffer.read(cx).file())?;
4183 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4184 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4185 log::debug!(
4186 "start reload diff bases for repo path {}",
4187 repo_path.as_unix_str()
4188 );
4189 diff_state.update(cx, |diff_state, _| {
4190 let has_unstaged_diff = diff_state
4191 .unstaged_diff
4192 .as_ref()
4193 .is_some_and(|diff| diff.is_upgradable());
4194 let has_uncommitted_diff = diff_state
4195 .uncommitted_diff
4196 .as_ref()
4197 .is_some_and(|set| set.is_upgradable());
4198
4199 Some((
4200 buffer,
4201 repo_path,
4202 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4203 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4204 ))
4205 })
4206 })
4207 .collect::<Vec<_>>()
4208 })
4209 })?;
4210
4211 let buffer_diff_base_changes = cx
4212 .background_spawn(async move {
4213 let mut changes = Vec::new();
4214 for (buffer, repo_path, current_index_text, current_head_text) in
4215 &repo_diff_state_updates
4216 {
4217 let index_text = if current_index_text.is_some() {
4218 backend.load_index_text(repo_path.clone()).await
4219 } else {
4220 None
4221 };
4222 let head_text = if current_head_text.is_some() {
4223 backend.load_committed_text(repo_path.clone()).await
4224 } else {
4225 None
4226 };
4227
4228 let change =
4229 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4230 (Some(current_index), Some(current_head)) => {
4231 let index_changed =
4232 index_text.as_deref() != current_index.as_deref();
4233 let head_changed =
4234 head_text.as_deref() != current_head.as_deref();
4235 if index_changed && head_changed {
4236 if index_text == head_text {
4237 Some(DiffBasesChange::SetBoth(head_text))
4238 } else {
4239 Some(DiffBasesChange::SetEach {
4240 index: index_text,
4241 head: head_text,
4242 })
4243 }
4244 } else if index_changed {
4245 Some(DiffBasesChange::SetIndex(index_text))
4246 } else if head_changed {
4247 Some(DiffBasesChange::SetHead(head_text))
4248 } else {
4249 None
4250 }
4251 }
4252 (Some(current_index), None) => {
4253 let index_changed =
4254 index_text.as_deref() != current_index.as_deref();
4255 index_changed
4256 .then_some(DiffBasesChange::SetIndex(index_text))
4257 }
4258 (None, Some(current_head)) => {
4259 let head_changed =
4260 head_text.as_deref() != current_head.as_deref();
4261 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4262 }
4263 (None, None) => None,
4264 };
4265
4266 changes.push((buffer.clone(), change))
4267 }
4268 changes
4269 })
4270 .await;
4271
4272 git_store.update(&mut cx, |git_store, cx| {
4273 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4274 let buffer_snapshot = buffer.read(cx).text_snapshot();
4275 let buffer_id = buffer_snapshot.remote_id();
4276 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4277 continue;
4278 };
4279
4280 let downstream_client = git_store.downstream_client();
4281 diff_state.update(cx, |diff_state, cx| {
4282 use proto::update_diff_bases::Mode;
4283
4284 if let Some((diff_bases_change, (client, project_id))) =
4285 diff_bases_change.clone().zip(downstream_client)
4286 {
4287 let (staged_text, committed_text, mode) = match diff_bases_change {
4288 DiffBasesChange::SetIndex(index) => {
4289 (index, None, Mode::IndexOnly)
4290 }
4291 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4292 DiffBasesChange::SetEach { index, head } => {
4293 (index, head, Mode::IndexAndHead)
4294 }
4295 DiffBasesChange::SetBoth(text) => {
4296 (None, text, Mode::IndexMatchesHead)
4297 }
4298 };
4299 client
4300 .send(proto::UpdateDiffBases {
4301 project_id: project_id.to_proto(),
4302 buffer_id: buffer_id.to_proto(),
4303 staged_text,
4304 committed_text,
4305 mode: mode as i32,
4306 })
4307 .log_err();
4308 }
4309
4310 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4311 });
4312 }
4313 })
4314 },
4315 );
4316 }
4317
4318 pub fn send_job<F, Fut, R>(
4319 &mut self,
4320 status: Option<SharedString>,
4321 job: F,
4322 ) -> oneshot::Receiver<R>
4323 where
4324 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4325 Fut: Future<Output = R> + 'static,
4326 R: Send + 'static,
4327 {
4328 self.send_keyed_job(None, status, job)
4329 }
4330
4331 fn send_keyed_job<F, Fut, R>(
4332 &mut self,
4333 key: Option<GitJobKey>,
4334 status: Option<SharedString>,
4335 job: F,
4336 ) -> oneshot::Receiver<R>
4337 where
4338 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4339 Fut: Future<Output = R> + 'static,
4340 R: Send + 'static,
4341 {
4342 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4343 let job_id = post_inc(&mut self.job_id);
4344 let this = self.this.clone();
4345 self.job_sender
4346 .unbounded_send(GitJob {
4347 key,
4348 job: Box::new(move |state, cx: &mut AsyncApp| {
4349 let job = job(state, cx.clone());
4350 cx.spawn(async move |cx| {
4351 if let Some(s) = status.clone() {
4352 this.update(cx, |this, cx| {
4353 this.active_jobs.insert(
4354 job_id,
4355 JobInfo {
4356 start: Instant::now(),
4357 message: s.clone(),
4358 },
4359 );
4360
4361 cx.notify();
4362 })
4363 .ok();
4364 }
4365 let result = job.await;
4366
4367 this.update(cx, |this, cx| {
4368 this.active_jobs.remove(&job_id);
4369 cx.notify();
4370 })
4371 .ok();
4372
4373 result_tx.send(result).ok();
4374 })
4375 }),
4376 })
4377 .ok();
4378 result_rx
4379 }
4380
4381 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4382 let Some(git_store) = self.git_store.upgrade() else {
4383 return;
4384 };
4385 let entity = cx.entity();
4386 git_store.update(cx, |git_store, cx| {
4387 let Some((&id, _)) = git_store
4388 .repositories
4389 .iter()
4390 .find(|(_, handle)| *handle == &entity)
4391 else {
4392 return;
4393 };
4394 git_store.active_repo_id = Some(id);
4395 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4396 });
4397 }
4398
4399 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4400 self.snapshot.status()
4401 }
4402
4403 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4404 self.snapshot.diff_stat_for_path(path)
4405 }
4406
4407 pub fn cached_stash(&self) -> GitStash {
4408 self.snapshot.stash_entries.clone()
4409 }
4410
4411 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4412 let git_store = self.git_store.upgrade()?;
4413 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4414 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4415 let abs_path = SanitizedPath::new(&abs_path);
4416 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4417 Some(ProjectPath {
4418 worktree_id: worktree.read(cx).id(),
4419 path: relative_path,
4420 })
4421 }
4422
4423 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4424 let git_store = self.git_store.upgrade()?;
4425 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4426 let abs_path = worktree_store.absolutize(path, cx)?;
4427 self.snapshot.abs_path_to_repo_path(&abs_path)
4428 }
4429
4430 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4431 other
4432 .read(cx)
4433 .snapshot
4434 .work_directory_abs_path
4435 .starts_with(&self.snapshot.work_directory_abs_path)
4436 }
4437
4438 pub fn open_commit_buffer(
4439 &mut self,
4440 languages: Option<Arc<LanguageRegistry>>,
4441 buffer_store: Entity<BufferStore>,
4442 cx: &mut Context<Self>,
4443 ) -> Task<Result<Entity<Buffer>>> {
4444 let id = self.id;
4445 if let Some(buffer) = self.commit_message_buffer.clone() {
4446 return Task::ready(Ok(buffer));
4447 }
4448 let this = cx.weak_entity();
4449
4450 let rx = self.send_job(None, move |state, mut cx| async move {
4451 let Some(this) = this.upgrade() else {
4452 bail!("git store was dropped");
4453 };
4454 match state {
4455 RepositoryState::Local(..) => {
4456 this.update(&mut cx, |_, cx| {
4457 Self::open_local_commit_buffer(languages, buffer_store, cx)
4458 })
4459 .await
4460 }
4461 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4462 let request = client.request(proto::OpenCommitMessageBuffer {
4463 project_id: project_id.0,
4464 repository_id: id.to_proto(),
4465 });
4466 let response = request.await.context("requesting to open commit buffer")?;
4467 let buffer_id = BufferId::new(response.buffer_id)?;
4468 let buffer = buffer_store
4469 .update(&mut cx, |buffer_store, cx| {
4470 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4471 })
4472 .await?;
4473 if let Some(language_registry) = languages {
4474 let git_commit_language =
4475 language_registry.language_for_name("Git Commit").await?;
4476 buffer.update(&mut cx, |buffer, cx| {
4477 buffer.set_language(Some(git_commit_language), cx);
4478 });
4479 }
4480 this.update(&mut cx, |this, _| {
4481 this.commit_message_buffer = Some(buffer.clone());
4482 });
4483 Ok(buffer)
4484 }
4485 }
4486 });
4487
4488 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4489 }
4490
4491 fn open_local_commit_buffer(
4492 language_registry: Option<Arc<LanguageRegistry>>,
4493 buffer_store: Entity<BufferStore>,
4494 cx: &mut Context<Self>,
4495 ) -> Task<Result<Entity<Buffer>>> {
4496 cx.spawn(async move |repository, cx| {
4497 let git_commit_language = match language_registry {
4498 Some(language_registry) => {
4499 Some(language_registry.language_for_name("Git Commit").await?)
4500 }
4501 None => None,
4502 };
4503 let buffer = buffer_store
4504 .update(cx, |buffer_store, cx| {
4505 buffer_store.create_buffer(git_commit_language, false, cx)
4506 })
4507 .await?;
4508
4509 repository.update(cx, |repository, _| {
4510 repository.commit_message_buffer = Some(buffer.clone());
4511 })?;
4512 Ok(buffer)
4513 })
4514 }
4515
4516 pub fn checkout_files(
4517 &mut self,
4518 commit: &str,
4519 paths: Vec<RepoPath>,
4520 cx: &mut Context<Self>,
4521 ) -> Task<Result<()>> {
4522 let commit = commit.to_string();
4523 let id = self.id;
4524
4525 self.spawn_job_with_tracking(
4526 paths.clone(),
4527 pending_op::GitStatus::Reverted,
4528 cx,
4529 async move |this, cx| {
4530 this.update(cx, |this, _cx| {
4531 this.send_job(
4532 Some(format!("git checkout {}", commit).into()),
4533 move |git_repo, _| async move {
4534 match git_repo {
4535 RepositoryState::Local(LocalRepositoryState {
4536 backend,
4537 environment,
4538 ..
4539 }) => {
4540 backend
4541 .checkout_files(commit, paths, environment.clone())
4542 .await
4543 }
4544 RepositoryState::Remote(RemoteRepositoryState {
4545 project_id,
4546 client,
4547 }) => {
4548 client
4549 .request(proto::GitCheckoutFiles {
4550 project_id: project_id.0,
4551 repository_id: id.to_proto(),
4552 commit,
4553 paths: paths
4554 .into_iter()
4555 .map(|p| p.to_proto())
4556 .collect(),
4557 })
4558 .await?;
4559
4560 Ok(())
4561 }
4562 }
4563 },
4564 )
4565 })?
4566 .await?
4567 },
4568 )
4569 }
4570
4571 pub fn reset(
4572 &mut self,
4573 commit: String,
4574 reset_mode: ResetMode,
4575 _cx: &mut App,
4576 ) -> oneshot::Receiver<Result<()>> {
4577 let id = self.id;
4578
4579 self.send_job(None, move |git_repo, _| async move {
4580 match git_repo {
4581 RepositoryState::Local(LocalRepositoryState {
4582 backend,
4583 environment,
4584 ..
4585 }) => backend.reset(commit, reset_mode, environment).await,
4586 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4587 client
4588 .request(proto::GitReset {
4589 project_id: project_id.0,
4590 repository_id: id.to_proto(),
4591 commit,
4592 mode: match reset_mode {
4593 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4594 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4595 },
4596 })
4597 .await?;
4598
4599 Ok(())
4600 }
4601 }
4602 })
4603 }
4604
4605 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4606 let id = self.id;
4607 self.send_job(None, move |git_repo, _cx| async move {
4608 match git_repo {
4609 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4610 backend.show(commit).await
4611 }
4612 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4613 let resp = client
4614 .request(proto::GitShow {
4615 project_id: project_id.0,
4616 repository_id: id.to_proto(),
4617 commit,
4618 })
4619 .await?;
4620
4621 Ok(CommitDetails {
4622 sha: resp.sha.into(),
4623 message: resp.message.into(),
4624 commit_timestamp: resp.commit_timestamp,
4625 author_email: resp.author_email.into(),
4626 author_name: resp.author_name.into(),
4627 })
4628 }
4629 }
4630 })
4631 }
4632
4633 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4634 let id = self.id;
4635 self.send_job(None, move |git_repo, cx| async move {
4636 match git_repo {
4637 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4638 backend.load_commit(commit, cx).await
4639 }
4640 RepositoryState::Remote(RemoteRepositoryState {
4641 client, project_id, ..
4642 }) => {
4643 let response = client
4644 .request(proto::LoadCommitDiff {
4645 project_id: project_id.0,
4646 repository_id: id.to_proto(),
4647 commit,
4648 })
4649 .await?;
4650 Ok(CommitDiff {
4651 files: response
4652 .files
4653 .into_iter()
4654 .map(|file| {
4655 Ok(CommitFile {
4656 path: RepoPath::from_proto(&file.path)?,
4657 old_text: file.old_text,
4658 new_text: file.new_text,
4659 is_binary: file.is_binary,
4660 })
4661 })
4662 .collect::<Result<Vec<_>>>()?,
4663 })
4664 }
4665 }
4666 })
4667 }
4668
4669 pub fn file_history(
4670 &mut self,
4671 path: RepoPath,
4672 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4673 self.file_history_paginated(path, 0, None)
4674 }
4675
4676 pub fn file_history_paginated(
4677 &mut self,
4678 path: RepoPath,
4679 skip: usize,
4680 limit: Option<usize>,
4681 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4682 let id = self.id;
4683 self.send_job(None, move |git_repo, _cx| async move {
4684 match git_repo {
4685 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4686 backend.file_history_paginated(path, skip, limit).await
4687 }
4688 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4689 let response = client
4690 .request(proto::GitFileHistory {
4691 project_id: project_id.0,
4692 repository_id: id.to_proto(),
4693 path: path.to_proto(),
4694 skip: skip as u64,
4695 limit: limit.map(|l| l as u64),
4696 })
4697 .await?;
4698 Ok(git::repository::FileHistory {
4699 entries: response
4700 .entries
4701 .into_iter()
4702 .map(|entry| git::repository::FileHistoryEntry {
4703 sha: entry.sha.into(),
4704 subject: entry.subject.into(),
4705 message: entry.message.into(),
4706 commit_timestamp: entry.commit_timestamp,
4707 author_name: entry.author_name.into(),
4708 author_email: entry.author_email.into(),
4709 })
4710 .collect(),
4711 path: RepoPath::from_proto(&response.path)?,
4712 })
4713 }
4714 }
4715 })
4716 }
4717
4718 pub fn get_graph_data(
4719 &self,
4720 log_source: LogSource,
4721 log_order: LogOrder,
4722 ) -> Option<&InitialGitGraphData> {
4723 self.initial_graph_data.get(&(log_source, log_order))
4724 }
4725
4726 pub fn search_commits(
4727 &mut self,
4728 log_source: LogSource,
4729 search_args: SearchCommitArgs,
4730 request_tx: smol::channel::Sender<Oid>,
4731 cx: &mut Context<Self>,
4732 ) {
4733 let repository_state = self.repository_state.clone();
4734
4735 cx.background_spawn(async move {
4736 let repo_state = repository_state.await;
4737
4738 match repo_state {
4739 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4740 backend
4741 .search_commits(log_source, search_args, request_tx)
4742 .await
4743 .log_err();
4744 }
4745 Ok(RepositoryState::Remote(_)) => {}
4746 Err(_) => {}
4747 };
4748 })
4749 .detach();
4750 }
4751
4752 pub fn graph_data(
4753 &mut self,
4754 log_source: LogSource,
4755 log_order: LogOrder,
4756 range: Range<usize>,
4757 cx: &mut Context<Self>,
4758 ) -> GraphDataResponse<'_> {
4759 let initial_commit_data = self
4760 .initial_graph_data
4761 .entry((log_source.clone(), log_order))
4762 .or_insert_with(|| {
4763 let state = self.repository_state.clone();
4764 let log_source = log_source.clone();
4765
4766 let fetch_task = cx.spawn(async move |repository, cx| {
4767 let state = state.await;
4768 let result = match state {
4769 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4770 Self::local_git_graph_data(
4771 repository.clone(),
4772 backend,
4773 log_source.clone(),
4774 log_order,
4775 cx,
4776 )
4777 .await
4778 }
4779 Ok(RepositoryState::Remote(_)) => {
4780 Err("Git graph is not supported for collab yet".into())
4781 }
4782 Err(e) => Err(SharedString::from(e)),
4783 };
4784
4785 if let Err(fetch_task_error) = result {
4786 repository
4787 .update(cx, |repository, _| {
4788 if let Some(data) = repository
4789 .initial_graph_data
4790 .get_mut(&(log_source, log_order))
4791 {
4792 data.error = Some(fetch_task_error);
4793 } else {
4794 debug_panic!(
4795 "This task would be dropped if this entry doesn't exist"
4796 );
4797 }
4798 })
4799 .ok();
4800 }
4801 });
4802
4803 InitialGitGraphData {
4804 fetch_task,
4805 error: None,
4806 commit_data: Vec::new(),
4807 commit_oid_to_index: HashMap::default(),
4808 }
4809 });
4810
4811 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4812 let max_end = initial_commit_data.commit_data.len();
4813
4814 GraphDataResponse {
4815 commits: &initial_commit_data.commit_data
4816 [range.start.min(max_start)..range.end.min(max_end)],
4817 is_loading: !initial_commit_data.fetch_task.is_ready(),
4818 error: initial_commit_data.error.clone(),
4819 }
4820 }
4821
4822 async fn local_git_graph_data(
4823 this: WeakEntity<Self>,
4824 backend: Arc<dyn GitRepository>,
4825 log_source: LogSource,
4826 log_order: LogOrder,
4827 cx: &mut AsyncApp,
4828 ) -> Result<(), SharedString> {
4829 let (request_tx, request_rx) =
4830 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4831
4832 let task = cx.background_executor().spawn({
4833 let log_source = log_source.clone();
4834 async move {
4835 backend
4836 .initial_graph_data(log_source, log_order, request_tx)
4837 .await
4838 .map_err(|err| SharedString::from(err.to_string()))
4839 }
4840 });
4841
4842 let graph_data_key = (log_source, log_order);
4843
4844 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4845 this.update(cx, |repository, cx| {
4846 let graph_data = repository
4847 .initial_graph_data
4848 .entry(graph_data_key.clone())
4849 .and_modify(|graph_data| {
4850 for commit_data in initial_graph_commit_data {
4851 graph_data
4852 .commit_oid_to_index
4853 .insert(commit_data.sha, graph_data.commit_data.len());
4854 graph_data.commit_data.push(commit_data);
4855 }
4856 cx.emit(RepositoryEvent::GraphEvent(
4857 graph_data_key.clone(),
4858 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4859 ));
4860 });
4861
4862 match &graph_data {
4863 Entry::Occupied(_) => {}
4864 Entry::Vacant(_) => {
4865 debug_panic!("This task should be dropped if data doesn't exist");
4866 }
4867 }
4868 })
4869 .ok();
4870 }
4871
4872 task.await?;
4873 Ok(())
4874 }
4875
4876 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4877 if !self.commit_data.contains_key(&sha) {
4878 match &self.graph_commit_data_handler {
4879 GraphCommitHandlerState::Open(handler) => {
4880 if handler.commit_data_request.try_send(sha).is_ok() {
4881 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4882 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4883 }
4884 }
4885 GraphCommitHandlerState::Closed => {
4886 self.open_graph_commit_data_handler(cx);
4887 }
4888 GraphCommitHandlerState::Starting => {}
4889 }
4890 }
4891
4892 self.commit_data
4893 .get(&sha)
4894 .unwrap_or(&CommitDataState::Loading)
4895 }
4896
4897 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4898 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4899
4900 let state = self.repository_state.clone();
4901 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4902 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4903
4904 let foreground_task = cx.spawn(async move |this, cx| {
4905 while let Ok((sha, commit_data)) = result_rx.recv().await {
4906 let result = this.update(cx, |this, cx| {
4907 let old_value = this
4908 .commit_data
4909 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4910 debug_assert!(
4911 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4912 "We should never overwrite commit data"
4913 );
4914
4915 cx.notify();
4916 });
4917 if result.is_err() {
4918 break;
4919 }
4920 }
4921
4922 this.update(cx, |this, _cx| {
4923 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4924 })
4925 .ok();
4926 });
4927
4928 let request_tx_for_handler = request_tx;
4929 let background_executor = cx.background_executor().clone();
4930
4931 cx.background_spawn(async move {
4932 let backend = match state.await {
4933 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4934 Ok(RepositoryState::Remote(_)) => {
4935 log::error!("commit_data_reader not supported for remote repositories");
4936 return;
4937 }
4938 Err(error) => {
4939 log::error!("failed to get repository state: {error}");
4940 return;
4941 }
4942 };
4943
4944 let reader = match backend.commit_data_reader() {
4945 Ok(reader) => reader,
4946 Err(error) => {
4947 log::error!("failed to create commit data reader: {error:?}");
4948 return;
4949 }
4950 };
4951
4952 loop {
4953 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4954
4955 futures::select_biased! {
4956 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4957 let Ok(sha) = sha else {
4958 break;
4959 };
4960
4961 match reader.read(sha).await {
4962 Ok(commit_data) => {
4963 if result_tx.send((sha, commit_data)).await.is_err() {
4964 break;
4965 }
4966 }
4967 Err(error) => {
4968 log::error!("failed to read commit data for {sha}: {error:?}");
4969 }
4970 }
4971 }
4972 _ = futures::FutureExt::fuse(timeout) => {
4973 break;
4974 }
4975 }
4976 }
4977
4978 drop(result_tx);
4979 })
4980 .detach();
4981
4982 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4983 _task: foreground_task,
4984 commit_data_request: request_tx_for_handler,
4985 });
4986 }
4987
4988 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4989 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4990 }
4991
4992 fn save_buffers<'a>(
4993 &self,
4994 entries: impl IntoIterator<Item = &'a RepoPath>,
4995 cx: &mut Context<Self>,
4996 ) -> Vec<Task<anyhow::Result<()>>> {
4997 let mut save_futures = Vec::new();
4998 if let Some(buffer_store) = self.buffer_store(cx) {
4999 buffer_store.update(cx, |buffer_store, cx| {
5000 for path in entries {
5001 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5002 continue;
5003 };
5004 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5005 && buffer
5006 .read(cx)
5007 .file()
5008 .is_some_and(|file| file.disk_state().exists())
5009 && buffer.read(cx).has_unsaved_edits()
5010 {
5011 save_futures.push(buffer_store.save_buffer(buffer, cx));
5012 }
5013 }
5014 })
5015 }
5016 save_futures
5017 }
5018
5019 pub fn stage_entries(
5020 &mut self,
5021 entries: Vec<RepoPath>,
5022 cx: &mut Context<Self>,
5023 ) -> Task<anyhow::Result<()>> {
5024 self.stage_or_unstage_entries(true, entries, cx)
5025 }
5026
5027 pub fn unstage_entries(
5028 &mut self,
5029 entries: Vec<RepoPath>,
5030 cx: &mut Context<Self>,
5031 ) -> Task<anyhow::Result<()>> {
5032 self.stage_or_unstage_entries(false, entries, cx)
5033 }
5034
5035 fn stage_or_unstage_entries(
5036 &mut self,
5037 stage: bool,
5038 entries: Vec<RepoPath>,
5039 cx: &mut Context<Self>,
5040 ) -> Task<anyhow::Result<()>> {
5041 if entries.is_empty() {
5042 return Task::ready(Ok(()));
5043 }
5044 let Some(git_store) = self.git_store.upgrade() else {
5045 return Task::ready(Ok(()));
5046 };
5047 let id = self.id;
5048 let save_tasks = self.save_buffers(&entries, cx);
5049 let paths = entries
5050 .iter()
5051 .map(|p| p.as_unix_str())
5052 .collect::<Vec<_>>()
5053 .join(" ");
5054 let status = if stage {
5055 format!("git add {paths}")
5056 } else {
5057 format!("git reset {paths}")
5058 };
5059 let job_key = GitJobKey::WriteIndex(entries.clone());
5060
5061 self.spawn_job_with_tracking(
5062 entries.clone(),
5063 if stage {
5064 pending_op::GitStatus::Staged
5065 } else {
5066 pending_op::GitStatus::Unstaged
5067 },
5068 cx,
5069 async move |this, cx| {
5070 for save_task in save_tasks {
5071 save_task.await?;
5072 }
5073
5074 this.update(cx, |this, cx| {
5075 let weak_this = cx.weak_entity();
5076 this.send_keyed_job(
5077 Some(job_key),
5078 Some(status.into()),
5079 move |git_repo, mut cx| async move {
5080 let hunk_staging_operation_counts = weak_this
5081 .update(&mut cx, |this, cx| {
5082 let mut hunk_staging_operation_counts = HashMap::default();
5083 for path in &entries {
5084 let Some(project_path) =
5085 this.repo_path_to_project_path(path, cx)
5086 else {
5087 continue;
5088 };
5089 let Some(buffer) = git_store
5090 .read(cx)
5091 .buffer_store
5092 .read(cx)
5093 .get_by_path(&project_path)
5094 else {
5095 continue;
5096 };
5097 let Some(diff_state) = git_store
5098 .read(cx)
5099 .diffs
5100 .get(&buffer.read(cx).remote_id())
5101 .cloned()
5102 else {
5103 continue;
5104 };
5105 let Some(uncommitted_diff) =
5106 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5107 |uncommitted_diff| uncommitted_diff.upgrade(),
5108 )
5109 else {
5110 continue;
5111 };
5112 let buffer_snapshot = buffer.read(cx).text_snapshot();
5113 let file_exists = buffer
5114 .read(cx)
5115 .file()
5116 .is_some_and(|file| file.disk_state().exists());
5117 let hunk_staging_operation_count =
5118 diff_state.update(cx, |diff_state, cx| {
5119 uncommitted_diff.update(
5120 cx,
5121 |uncommitted_diff, cx| {
5122 uncommitted_diff
5123 .stage_or_unstage_all_hunks(
5124 stage,
5125 &buffer_snapshot,
5126 file_exists,
5127 cx,
5128 );
5129 },
5130 );
5131
5132 diff_state.hunk_staging_operation_count += 1;
5133 diff_state.hunk_staging_operation_count
5134 });
5135 hunk_staging_operation_counts.insert(
5136 diff_state.downgrade(),
5137 hunk_staging_operation_count,
5138 );
5139 }
5140 hunk_staging_operation_counts
5141 })
5142 .unwrap_or_default();
5143
5144 let result = match git_repo {
5145 RepositoryState::Local(LocalRepositoryState {
5146 backend,
5147 environment,
5148 ..
5149 }) => {
5150 if stage {
5151 backend.stage_paths(entries, environment.clone()).await
5152 } else {
5153 backend.unstage_paths(entries, environment.clone()).await
5154 }
5155 }
5156 RepositoryState::Remote(RemoteRepositoryState {
5157 project_id,
5158 client,
5159 }) => {
5160 if stage {
5161 client
5162 .request(proto::Stage {
5163 project_id: project_id.0,
5164 repository_id: id.to_proto(),
5165 paths: entries
5166 .into_iter()
5167 .map(|repo_path| repo_path.to_proto())
5168 .collect(),
5169 })
5170 .await
5171 .context("sending stage request")
5172 .map(|_| ())
5173 } else {
5174 client
5175 .request(proto::Unstage {
5176 project_id: project_id.0,
5177 repository_id: id.to_proto(),
5178 paths: entries
5179 .into_iter()
5180 .map(|repo_path| repo_path.to_proto())
5181 .collect(),
5182 })
5183 .await
5184 .context("sending unstage request")
5185 .map(|_| ())
5186 }
5187 }
5188 };
5189
5190 for (diff_state, hunk_staging_operation_count) in
5191 hunk_staging_operation_counts
5192 {
5193 diff_state
5194 .update(&mut cx, |diff_state, cx| {
5195 if result.is_ok() {
5196 diff_state.hunk_staging_operation_count_as_of_write =
5197 hunk_staging_operation_count;
5198 } else if let Some(uncommitted_diff) =
5199 &diff_state.uncommitted_diff
5200 {
5201 uncommitted_diff
5202 .update(cx, |uncommitted_diff, cx| {
5203 uncommitted_diff.clear_pending_hunks(cx);
5204 })
5205 .ok();
5206 }
5207 })
5208 .ok();
5209 }
5210
5211 result
5212 },
5213 )
5214 })?
5215 .await?
5216 },
5217 )
5218 }
5219
5220 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5221 let snapshot = self.snapshot.clone();
5222 let pending_ops = self.pending_ops.clone();
5223 let to_stage = cx.background_spawn(async move {
5224 snapshot
5225 .status()
5226 .filter_map(|entry| {
5227 if let Some(ops) =
5228 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5229 {
5230 if ops.staging() || ops.staged() {
5231 None
5232 } else {
5233 Some(entry.repo_path)
5234 }
5235 } else if entry.status.staging().is_fully_staged() {
5236 None
5237 } else {
5238 Some(entry.repo_path)
5239 }
5240 })
5241 .collect()
5242 });
5243
5244 cx.spawn(async move |this, cx| {
5245 let to_stage = to_stage.await;
5246 this.update(cx, |this, cx| {
5247 this.stage_or_unstage_entries(true, to_stage, cx)
5248 })?
5249 .await
5250 })
5251 }
5252
5253 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5254 let snapshot = self.snapshot.clone();
5255 let pending_ops = self.pending_ops.clone();
5256 let to_unstage = cx.background_spawn(async move {
5257 snapshot
5258 .status()
5259 .filter_map(|entry| {
5260 if let Some(ops) =
5261 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5262 {
5263 if !ops.staging() && !ops.staged() {
5264 None
5265 } else {
5266 Some(entry.repo_path)
5267 }
5268 } else if entry.status.staging().is_fully_unstaged() {
5269 None
5270 } else {
5271 Some(entry.repo_path)
5272 }
5273 })
5274 .collect()
5275 });
5276
5277 cx.spawn(async move |this, cx| {
5278 let to_unstage = to_unstage.await;
5279 this.update(cx, |this, cx| {
5280 this.stage_or_unstage_entries(false, to_unstage, cx)
5281 })?
5282 .await
5283 })
5284 }
5285
5286 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5287 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5288
5289 self.stash_entries(to_stash, cx)
5290 }
5291
5292 pub fn stash_entries(
5293 &mut self,
5294 entries: Vec<RepoPath>,
5295 cx: &mut Context<Self>,
5296 ) -> Task<anyhow::Result<()>> {
5297 let id = self.id;
5298
5299 cx.spawn(async move |this, cx| {
5300 this.update(cx, |this, _| {
5301 this.send_job(None, move |git_repo, _cx| async move {
5302 match git_repo {
5303 RepositoryState::Local(LocalRepositoryState {
5304 backend,
5305 environment,
5306 ..
5307 }) => backend.stash_paths(entries, environment).await,
5308 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5309 client
5310 .request(proto::Stash {
5311 project_id: project_id.0,
5312 repository_id: id.to_proto(),
5313 paths: entries
5314 .into_iter()
5315 .map(|repo_path| repo_path.to_proto())
5316 .collect(),
5317 })
5318 .await?;
5319 Ok(())
5320 }
5321 }
5322 })
5323 })?
5324 .await??;
5325 Ok(())
5326 })
5327 }
5328
5329 pub fn stash_pop(
5330 &mut self,
5331 index: Option<usize>,
5332 cx: &mut Context<Self>,
5333 ) -> Task<anyhow::Result<()>> {
5334 let id = self.id;
5335 cx.spawn(async move |this, cx| {
5336 this.update(cx, |this, _| {
5337 this.send_job(None, move |git_repo, _cx| async move {
5338 match git_repo {
5339 RepositoryState::Local(LocalRepositoryState {
5340 backend,
5341 environment,
5342 ..
5343 }) => backend.stash_pop(index, environment).await,
5344 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5345 client
5346 .request(proto::StashPop {
5347 project_id: project_id.0,
5348 repository_id: id.to_proto(),
5349 stash_index: index.map(|i| i as u64),
5350 })
5351 .await
5352 .context("sending stash pop request")?;
5353 Ok(())
5354 }
5355 }
5356 })
5357 })?
5358 .await??;
5359 Ok(())
5360 })
5361 }
5362
5363 pub fn stash_apply(
5364 &mut self,
5365 index: Option<usize>,
5366 cx: &mut Context<Self>,
5367 ) -> Task<anyhow::Result<()>> {
5368 let id = self.id;
5369 cx.spawn(async move |this, cx| {
5370 this.update(cx, |this, _| {
5371 this.send_job(None, move |git_repo, _cx| async move {
5372 match git_repo {
5373 RepositoryState::Local(LocalRepositoryState {
5374 backend,
5375 environment,
5376 ..
5377 }) => backend.stash_apply(index, environment).await,
5378 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5379 client
5380 .request(proto::StashApply {
5381 project_id: project_id.0,
5382 repository_id: id.to_proto(),
5383 stash_index: index.map(|i| i as u64),
5384 })
5385 .await
5386 .context("sending stash apply request")?;
5387 Ok(())
5388 }
5389 }
5390 })
5391 })?
5392 .await??;
5393 Ok(())
5394 })
5395 }
5396
5397 pub fn stash_drop(
5398 &mut self,
5399 index: Option<usize>,
5400 cx: &mut Context<Self>,
5401 ) -> oneshot::Receiver<anyhow::Result<()>> {
5402 let id = self.id;
5403 let updates_tx = self
5404 .git_store()
5405 .and_then(|git_store| match &git_store.read(cx).state {
5406 GitStoreState::Local { downstream, .. } => downstream
5407 .as_ref()
5408 .map(|downstream| downstream.updates_tx.clone()),
5409 _ => None,
5410 });
5411 let this = cx.weak_entity();
5412 self.send_job(None, move |git_repo, mut cx| async move {
5413 match git_repo {
5414 RepositoryState::Local(LocalRepositoryState {
5415 backend,
5416 environment,
5417 ..
5418 }) => {
5419 // TODO would be nice to not have to do this manually
5420 let result = backend.stash_drop(index, environment).await;
5421 if result.is_ok()
5422 && let Ok(stash_entries) = backend.stash_entries().await
5423 {
5424 let snapshot = this.update(&mut cx, |this, cx| {
5425 this.snapshot.stash_entries = stash_entries;
5426 cx.emit(RepositoryEvent::StashEntriesChanged);
5427 this.snapshot.clone()
5428 })?;
5429 if let Some(updates_tx) = updates_tx {
5430 updates_tx
5431 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5432 .ok();
5433 }
5434 }
5435
5436 result
5437 }
5438 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5439 client
5440 .request(proto::StashDrop {
5441 project_id: project_id.0,
5442 repository_id: id.to_proto(),
5443 stash_index: index.map(|i| i as u64),
5444 })
5445 .await
5446 .context("sending stash pop request")?;
5447 Ok(())
5448 }
5449 }
5450 })
5451 }
5452
5453 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5454 let id = self.id;
5455 self.send_job(
5456 Some(format!("git hook {}", hook.as_str()).into()),
5457 move |git_repo, _cx| async move {
5458 match git_repo {
5459 RepositoryState::Local(LocalRepositoryState {
5460 backend,
5461 environment,
5462 ..
5463 }) => backend.run_hook(hook, environment.clone()).await,
5464 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5465 client
5466 .request(proto::RunGitHook {
5467 project_id: project_id.0,
5468 repository_id: id.to_proto(),
5469 hook: hook.to_proto(),
5470 })
5471 .await?;
5472
5473 Ok(())
5474 }
5475 }
5476 },
5477 )
5478 }
5479
5480 pub fn commit(
5481 &mut self,
5482 message: SharedString,
5483 name_and_email: Option<(SharedString, SharedString)>,
5484 options: CommitOptions,
5485 askpass: AskPassDelegate,
5486 cx: &mut App,
5487 ) -> oneshot::Receiver<Result<()>> {
5488 let id = self.id;
5489 let askpass_delegates = self.askpass_delegates.clone();
5490 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5491
5492 let rx = self.run_hook(RunHook::PreCommit, cx);
5493
5494 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5495 rx.await??;
5496
5497 match git_repo {
5498 RepositoryState::Local(LocalRepositoryState {
5499 backend,
5500 environment,
5501 ..
5502 }) => {
5503 backend
5504 .commit(message, name_and_email, options, askpass, environment)
5505 .await
5506 }
5507 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5508 askpass_delegates.lock().insert(askpass_id, askpass);
5509 let _defer = util::defer(|| {
5510 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5511 debug_assert!(askpass_delegate.is_some());
5512 });
5513 let (name, email) = name_and_email.unzip();
5514 client
5515 .request(proto::Commit {
5516 project_id: project_id.0,
5517 repository_id: id.to_proto(),
5518 message: String::from(message),
5519 name: name.map(String::from),
5520 email: email.map(String::from),
5521 options: Some(proto::commit::CommitOptions {
5522 amend: options.amend,
5523 signoff: options.signoff,
5524 allow_empty: options.allow_empty,
5525 }),
5526 askpass_id,
5527 })
5528 .await?;
5529
5530 Ok(())
5531 }
5532 }
5533 })
5534 }
5535
5536 pub fn fetch(
5537 &mut self,
5538 fetch_options: FetchOptions,
5539 askpass: AskPassDelegate,
5540 _cx: &mut App,
5541 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5542 let askpass_delegates = self.askpass_delegates.clone();
5543 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5544 let id = self.id;
5545
5546 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5547 match git_repo {
5548 RepositoryState::Local(LocalRepositoryState {
5549 backend,
5550 environment,
5551 ..
5552 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5553 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5554 askpass_delegates.lock().insert(askpass_id, askpass);
5555 let _defer = util::defer(|| {
5556 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5557 debug_assert!(askpass_delegate.is_some());
5558 });
5559
5560 let response = client
5561 .request(proto::Fetch {
5562 project_id: project_id.0,
5563 repository_id: id.to_proto(),
5564 askpass_id,
5565 remote: fetch_options.to_proto(),
5566 })
5567 .await?;
5568
5569 Ok(RemoteCommandOutput {
5570 stdout: response.stdout,
5571 stderr: response.stderr,
5572 })
5573 }
5574 }
5575 })
5576 }
5577
5578 pub fn push(
5579 &mut self,
5580 branch: SharedString,
5581 remote_branch: SharedString,
5582 remote: SharedString,
5583 options: Option<PushOptions>,
5584 askpass: AskPassDelegate,
5585 cx: &mut Context<Self>,
5586 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5587 let askpass_delegates = self.askpass_delegates.clone();
5588 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5589 let id = self.id;
5590
5591 let args = options
5592 .map(|option| match option {
5593 PushOptions::SetUpstream => " --set-upstream",
5594 PushOptions::Force => " --force-with-lease",
5595 })
5596 .unwrap_or("");
5597
5598 let updates_tx = self
5599 .git_store()
5600 .and_then(|git_store| match &git_store.read(cx).state {
5601 GitStoreState::Local { downstream, .. } => downstream
5602 .as_ref()
5603 .map(|downstream| downstream.updates_tx.clone()),
5604 _ => None,
5605 });
5606
5607 let this = cx.weak_entity();
5608 self.send_job(
5609 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5610 move |git_repo, mut cx| async move {
5611 match git_repo {
5612 RepositoryState::Local(LocalRepositoryState {
5613 backend,
5614 environment,
5615 ..
5616 }) => {
5617 let result = backend
5618 .push(
5619 branch.to_string(),
5620 remote_branch.to_string(),
5621 remote.to_string(),
5622 options,
5623 askpass,
5624 environment.clone(),
5625 cx.clone(),
5626 )
5627 .await;
5628 // TODO would be nice to not have to do this manually
5629 if result.is_ok() {
5630 let branches = backend.branches().await?;
5631 let branch = branches.into_iter().find(|branch| branch.is_head);
5632 log::info!("head branch after scan is {branch:?}");
5633 let snapshot = this.update(&mut cx, |this, cx| {
5634 this.snapshot.branch = branch;
5635 cx.emit(RepositoryEvent::HeadChanged);
5636 this.snapshot.clone()
5637 })?;
5638 if let Some(updates_tx) = updates_tx {
5639 updates_tx
5640 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5641 .ok();
5642 }
5643 }
5644 result
5645 }
5646 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5647 askpass_delegates.lock().insert(askpass_id, askpass);
5648 let _defer = util::defer(|| {
5649 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5650 debug_assert!(askpass_delegate.is_some());
5651 });
5652 let response = client
5653 .request(proto::Push {
5654 project_id: project_id.0,
5655 repository_id: id.to_proto(),
5656 askpass_id,
5657 branch_name: branch.to_string(),
5658 remote_branch_name: remote_branch.to_string(),
5659 remote_name: remote.to_string(),
5660 options: options.map(|options| match options {
5661 PushOptions::Force => proto::push::PushOptions::Force,
5662 PushOptions::SetUpstream => {
5663 proto::push::PushOptions::SetUpstream
5664 }
5665 }
5666 as i32),
5667 })
5668 .await?;
5669
5670 Ok(RemoteCommandOutput {
5671 stdout: response.stdout,
5672 stderr: response.stderr,
5673 })
5674 }
5675 }
5676 },
5677 )
5678 }
5679
5680 pub fn pull(
5681 &mut self,
5682 branch: Option<SharedString>,
5683 remote: SharedString,
5684 rebase: bool,
5685 askpass: AskPassDelegate,
5686 _cx: &mut App,
5687 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5688 let askpass_delegates = self.askpass_delegates.clone();
5689 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5690 let id = self.id;
5691
5692 let mut status = "git pull".to_string();
5693 if rebase {
5694 status.push_str(" --rebase");
5695 }
5696 status.push_str(&format!(" {}", remote));
5697 if let Some(b) = &branch {
5698 status.push_str(&format!(" {}", b));
5699 }
5700
5701 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5702 match git_repo {
5703 RepositoryState::Local(LocalRepositoryState {
5704 backend,
5705 environment,
5706 ..
5707 }) => {
5708 backend
5709 .pull(
5710 branch.as_ref().map(|b| b.to_string()),
5711 remote.to_string(),
5712 rebase,
5713 askpass,
5714 environment.clone(),
5715 cx,
5716 )
5717 .await
5718 }
5719 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5720 askpass_delegates.lock().insert(askpass_id, askpass);
5721 let _defer = util::defer(|| {
5722 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5723 debug_assert!(askpass_delegate.is_some());
5724 });
5725 let response = client
5726 .request(proto::Pull {
5727 project_id: project_id.0,
5728 repository_id: id.to_proto(),
5729 askpass_id,
5730 rebase,
5731 branch_name: branch.as_ref().map(|b| b.to_string()),
5732 remote_name: remote.to_string(),
5733 })
5734 .await?;
5735
5736 Ok(RemoteCommandOutput {
5737 stdout: response.stdout,
5738 stderr: response.stderr,
5739 })
5740 }
5741 }
5742 })
5743 }
5744
5745 fn spawn_set_index_text_job(
5746 &mut self,
5747 path: RepoPath,
5748 content: Option<String>,
5749 hunk_staging_operation_count: Option<usize>,
5750 cx: &mut Context<Self>,
5751 ) -> oneshot::Receiver<anyhow::Result<()>> {
5752 let id = self.id;
5753 let this = cx.weak_entity();
5754 let git_store = self.git_store.clone();
5755 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5756 self.send_keyed_job(
5757 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5758 None,
5759 move |git_repo, mut cx| async move {
5760 log::debug!(
5761 "start updating index text for buffer {}",
5762 path.as_unix_str()
5763 );
5764
5765 match git_repo {
5766 RepositoryState::Local(LocalRepositoryState {
5767 fs,
5768 backend,
5769 environment,
5770 ..
5771 }) => {
5772 let executable = match fs.metadata(&abs_path).await {
5773 Ok(Some(meta)) => meta.is_executable,
5774 Ok(None) => false,
5775 Err(_err) => false,
5776 };
5777 backend
5778 .set_index_text(path.clone(), content, environment.clone(), executable)
5779 .await?;
5780 }
5781 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5782 client
5783 .request(proto::SetIndexText {
5784 project_id: project_id.0,
5785 repository_id: id.to_proto(),
5786 path: path.to_proto(),
5787 text: content,
5788 })
5789 .await?;
5790 }
5791 }
5792 log::debug!(
5793 "finish updating index text for buffer {}",
5794 path.as_unix_str()
5795 );
5796
5797 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5798 let project_path = this
5799 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5800 .ok()
5801 .flatten();
5802 git_store
5803 .update(&mut cx, |git_store, cx| {
5804 let buffer_id = git_store
5805 .buffer_store
5806 .read(cx)
5807 .get_by_path(&project_path?)?
5808 .read(cx)
5809 .remote_id();
5810 let diff_state = git_store.diffs.get(&buffer_id)?;
5811 diff_state.update(cx, |diff_state, _| {
5812 diff_state.hunk_staging_operation_count_as_of_write =
5813 hunk_staging_operation_count;
5814 });
5815 Some(())
5816 })
5817 .context("Git store dropped")?;
5818 }
5819 Ok(())
5820 },
5821 )
5822 }
5823
5824 pub fn create_remote(
5825 &mut self,
5826 remote_name: String,
5827 remote_url: String,
5828 ) -> oneshot::Receiver<Result<()>> {
5829 let id = self.id;
5830 self.send_job(
5831 Some(format!("git remote add {remote_name} {remote_url}").into()),
5832 move |repo, _cx| async move {
5833 match repo {
5834 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5835 backend.create_remote(remote_name, remote_url).await
5836 }
5837 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5838 client
5839 .request(proto::GitCreateRemote {
5840 project_id: project_id.0,
5841 repository_id: id.to_proto(),
5842 remote_name,
5843 remote_url,
5844 })
5845 .await?;
5846
5847 Ok(())
5848 }
5849 }
5850 },
5851 )
5852 }
5853
5854 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5855 let id = self.id;
5856 self.send_job(
5857 Some(format!("git remove remote {remote_name}").into()),
5858 move |repo, _cx| async move {
5859 match repo {
5860 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5861 backend.remove_remote(remote_name).await
5862 }
5863 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5864 client
5865 .request(proto::GitRemoveRemote {
5866 project_id: project_id.0,
5867 repository_id: id.to_proto(),
5868 remote_name,
5869 })
5870 .await?;
5871
5872 Ok(())
5873 }
5874 }
5875 },
5876 )
5877 }
5878
5879 pub fn get_remotes(
5880 &mut self,
5881 branch_name: Option<String>,
5882 is_push: bool,
5883 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5884 let id = self.id;
5885 self.send_job(None, move |repo, _cx| async move {
5886 match repo {
5887 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5888 let remote = if let Some(branch_name) = branch_name {
5889 if is_push {
5890 backend.get_push_remote(branch_name).await?
5891 } else {
5892 backend.get_branch_remote(branch_name).await?
5893 }
5894 } else {
5895 None
5896 };
5897
5898 match remote {
5899 Some(remote) => Ok(vec![remote]),
5900 None => backend.get_all_remotes().await,
5901 }
5902 }
5903 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5904 let response = client
5905 .request(proto::GetRemotes {
5906 project_id: project_id.0,
5907 repository_id: id.to_proto(),
5908 branch_name,
5909 is_push,
5910 })
5911 .await?;
5912
5913 let remotes = response
5914 .remotes
5915 .into_iter()
5916 .map(|remotes| Remote {
5917 name: remotes.name.into(),
5918 })
5919 .collect();
5920
5921 Ok(remotes)
5922 }
5923 }
5924 })
5925 }
5926
5927 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5928 let id = self.id;
5929 self.send_job(None, move |repo, _| async move {
5930 match repo {
5931 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5932 backend.branches().await
5933 }
5934 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5935 let response = client
5936 .request(proto::GitGetBranches {
5937 project_id: project_id.0,
5938 repository_id: id.to_proto(),
5939 })
5940 .await?;
5941
5942 let branches = response
5943 .branches
5944 .into_iter()
5945 .map(|branch| proto_to_branch(&branch))
5946 .collect();
5947
5948 Ok(branches)
5949 }
5950 }
5951 })
5952 }
5953
5954 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5955 /// returns the pathed for the linked worktree.
5956 ///
5957 /// Returns None if this is the main checkout.
5958 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5959 if self.work_directory_abs_path != self.original_repo_abs_path {
5960 Some(&self.work_directory_abs_path)
5961 } else {
5962 None
5963 }
5964 }
5965
5966 pub fn path_for_new_linked_worktree(
5967 &self,
5968 branch_name: &str,
5969 worktree_directory_setting: &str,
5970 ) -> Result<PathBuf> {
5971 let original_repo = self.original_repo_abs_path.clone();
5972 let project_name = original_repo
5973 .file_name()
5974 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5975 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5976 Ok(directory.join(branch_name).join(project_name))
5977 }
5978
5979 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5980 let id = self.id;
5981 self.send_job(None, move |repo, _| async move {
5982 match repo {
5983 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5984 backend.worktrees().await
5985 }
5986 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5987 let response = client
5988 .request(proto::GitGetWorktrees {
5989 project_id: project_id.0,
5990 repository_id: id.to_proto(),
5991 })
5992 .await?;
5993
5994 let worktrees = response
5995 .worktrees
5996 .into_iter()
5997 .map(|worktree| proto_to_worktree(&worktree))
5998 .collect();
5999
6000 Ok(worktrees)
6001 }
6002 }
6003 })
6004 }
6005
6006 pub fn create_worktree(
6007 &mut self,
6008 target: CreateWorktreeTarget,
6009 path: PathBuf,
6010 ) -> oneshot::Receiver<Result<()>> {
6011 let id = self.id;
6012 let job_description = match target.branch_name() {
6013 Some(branch_name) => format!("git worktree add: {branch_name}"),
6014 None => "git worktree add (detached)".to_string(),
6015 };
6016 self.send_job(Some(job_description.into()), move |repo, _cx| async move {
6017 match repo {
6018 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6019 backend.create_worktree(target, path).await
6020 }
6021 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6022 let (name, commit, use_existing_branch) = match target {
6023 CreateWorktreeTarget::ExistingBranch { branch_name } => {
6024 (branch_name, None, true)
6025 }
6026 CreateWorktreeTarget::NewBranch {
6027 branch_name,
6028 base_sha: start_point,
6029 } => (branch_name, start_point, false),
6030 CreateWorktreeTarget::Detached {
6031 base_sha: start_point,
6032 } => (String::new(), start_point, false),
6033 };
6034
6035 client
6036 .request(proto::GitCreateWorktree {
6037 project_id: project_id.0,
6038 repository_id: id.to_proto(),
6039 name,
6040 directory: path.to_string_lossy().to_string(),
6041 commit,
6042 use_existing_branch,
6043 })
6044 .await?;
6045
6046 Ok(())
6047 }
6048 }
6049 })
6050 }
6051
6052 pub fn create_worktree_detached(
6053 &mut self,
6054 path: PathBuf,
6055 commit: String,
6056 ) -> oneshot::Receiver<Result<()>> {
6057 self.create_worktree(
6058 CreateWorktreeTarget::Detached {
6059 base_sha: Some(commit),
6060 },
6061 path,
6062 )
6063 }
6064
6065 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6066 let id = self.id;
6067 self.send_job(None, move |repo, _cx| async move {
6068 match repo {
6069 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6070 Ok(backend.head_sha().await)
6071 }
6072 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6073 let response = client
6074 .request(proto::GitGetHeadSha {
6075 project_id: project_id.0,
6076 repository_id: id.to_proto(),
6077 })
6078 .await?;
6079
6080 Ok(response.sha)
6081 }
6082 }
6083 })
6084 }
6085
6086 pub fn update_ref(
6087 &mut self,
6088 ref_name: String,
6089 commit: String,
6090 ) -> oneshot::Receiver<Result<()>> {
6091 self.send_job(None, move |repo, _cx| async move {
6092 match repo {
6093 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6094 backend.update_ref(ref_name, commit).await
6095 }
6096 RepositoryState::Remote(_) => {
6097 anyhow::bail!("update_ref is not supported for remote repositories")
6098 }
6099 }
6100 })
6101 }
6102
6103 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6104 self.send_job(None, move |repo, _cx| async move {
6105 match repo {
6106 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6107 backend.delete_ref(ref_name).await
6108 }
6109 RepositoryState::Remote(_) => {
6110 anyhow::bail!("delete_ref is not supported for remote repositories")
6111 }
6112 }
6113 })
6114 }
6115
6116 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6117 self.send_job(None, move |repo, _cx| async move {
6118 match repo {
6119 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6120 backend.repair_worktrees().await
6121 }
6122 RepositoryState::Remote(_) => {
6123 anyhow::bail!("repair_worktrees is not supported for remote repositories")
6124 }
6125 }
6126 })
6127 }
6128
6129 pub fn commit_exists(&mut self, sha: String) -> oneshot::Receiver<Result<bool>> {
6130 self.send_job(None, move |repo, _cx| async move {
6131 match repo {
6132 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6133 let results = backend.revparse_batch(vec![sha]).await?;
6134 Ok(results.into_iter().next().flatten().is_some())
6135 }
6136 RepositoryState::Remote(_) => {
6137 anyhow::bail!("commit_exists is not supported for remote repositories")
6138 }
6139 }
6140 })
6141 }
6142
6143 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6144 let id = self.id;
6145 self.send_job(
6146 Some(format!("git worktree remove: {}", path.display()).into()),
6147 move |repo, _cx| async move {
6148 match repo {
6149 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6150 backend.remove_worktree(path, force).await
6151 }
6152 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6153 client
6154 .request(proto::GitRemoveWorktree {
6155 project_id: project_id.0,
6156 repository_id: id.to_proto(),
6157 path: path.to_string_lossy().to_string(),
6158 force,
6159 })
6160 .await?;
6161
6162 Ok(())
6163 }
6164 }
6165 },
6166 )
6167 }
6168
6169 pub fn rename_worktree(
6170 &mut self,
6171 old_path: PathBuf,
6172 new_path: PathBuf,
6173 ) -> oneshot::Receiver<Result<()>> {
6174 let id = self.id;
6175 self.send_job(
6176 Some(format!("git worktree move: {}", old_path.display()).into()),
6177 move |repo, _cx| async move {
6178 match repo {
6179 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6180 backend.rename_worktree(old_path, new_path).await
6181 }
6182 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6183 client
6184 .request(proto::GitRenameWorktree {
6185 project_id: project_id.0,
6186 repository_id: id.to_proto(),
6187 old_path: old_path.to_string_lossy().to_string(),
6188 new_path: new_path.to_string_lossy().to_string(),
6189 })
6190 .await?;
6191
6192 Ok(())
6193 }
6194 }
6195 },
6196 )
6197 }
6198
6199 pub fn default_branch(
6200 &mut self,
6201 include_remote_name: bool,
6202 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6203 let id = self.id;
6204 self.send_job(None, move |repo, _| async move {
6205 match repo {
6206 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6207 backend.default_branch(include_remote_name).await
6208 }
6209 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6210 let response = client
6211 .request(proto::GetDefaultBranch {
6212 project_id: project_id.0,
6213 repository_id: id.to_proto(),
6214 })
6215 .await?;
6216
6217 anyhow::Ok(response.branch.map(SharedString::from))
6218 }
6219 }
6220 })
6221 }
6222
6223 pub fn diff_tree(
6224 &mut self,
6225 diff_type: DiffTreeType,
6226 _cx: &App,
6227 ) -> oneshot::Receiver<Result<TreeDiff>> {
6228 let repository_id = self.snapshot.id;
6229 self.send_job(None, move |repo, _cx| async move {
6230 match repo {
6231 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6232 backend.diff_tree(diff_type).await
6233 }
6234 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6235 let response = client
6236 .request(proto::GetTreeDiff {
6237 project_id: project_id.0,
6238 repository_id: repository_id.0,
6239 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6240 base: diff_type.base().to_string(),
6241 head: diff_type.head().to_string(),
6242 })
6243 .await?;
6244
6245 let entries = response
6246 .entries
6247 .into_iter()
6248 .filter_map(|entry| {
6249 let status = match entry.status() {
6250 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6251 proto::tree_diff_status::Status::Modified => {
6252 TreeDiffStatus::Modified {
6253 old: git::Oid::from_str(
6254 &entry.oid.context("missing oid").log_err()?,
6255 )
6256 .log_err()?,
6257 }
6258 }
6259 proto::tree_diff_status::Status::Deleted => {
6260 TreeDiffStatus::Deleted {
6261 old: git::Oid::from_str(
6262 &entry.oid.context("missing oid").log_err()?,
6263 )
6264 .log_err()?,
6265 }
6266 }
6267 };
6268 Some((
6269 RepoPath::from_rel_path(
6270 &RelPath::from_proto(&entry.path).log_err()?,
6271 ),
6272 status,
6273 ))
6274 })
6275 .collect();
6276
6277 Ok(TreeDiff { entries })
6278 }
6279 }
6280 })
6281 }
6282
6283 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6284 let id = self.id;
6285 self.send_job(None, move |repo, _cx| async move {
6286 match repo {
6287 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6288 backend.diff(diff_type).await
6289 }
6290 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6291 let (proto_diff_type, merge_base_ref) = match &diff_type {
6292 DiffType::HeadToIndex => {
6293 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6294 }
6295 DiffType::HeadToWorktree => {
6296 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6297 }
6298 DiffType::MergeBase { base_ref } => (
6299 proto::git_diff::DiffType::MergeBase.into(),
6300 Some(base_ref.to_string()),
6301 ),
6302 };
6303 let response = client
6304 .request(proto::GitDiff {
6305 project_id: project_id.0,
6306 repository_id: id.to_proto(),
6307 diff_type: proto_diff_type,
6308 merge_base_ref,
6309 })
6310 .await?;
6311
6312 Ok(response.diff)
6313 }
6314 }
6315 })
6316 }
6317
6318 pub fn create_branch(
6319 &mut self,
6320 branch_name: String,
6321 base_branch: Option<String>,
6322 ) -> oneshot::Receiver<Result<()>> {
6323 let id = self.id;
6324 let status_msg = if let Some(ref base) = base_branch {
6325 format!("git switch -c {branch_name} {base}").into()
6326 } else {
6327 format!("git switch -c {branch_name}").into()
6328 };
6329 self.send_job(Some(status_msg), move |repo, _cx| async move {
6330 match repo {
6331 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6332 backend.create_branch(branch_name, base_branch).await
6333 }
6334 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6335 client
6336 .request(proto::GitCreateBranch {
6337 project_id: project_id.0,
6338 repository_id: id.to_proto(),
6339 branch_name,
6340 })
6341 .await?;
6342
6343 Ok(())
6344 }
6345 }
6346 })
6347 }
6348
6349 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6350 let id = self.id;
6351 self.send_job(
6352 Some(format!("git switch {branch_name}").into()),
6353 move |repo, _cx| async move {
6354 match repo {
6355 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6356 backend.change_branch(branch_name).await
6357 }
6358 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6359 client
6360 .request(proto::GitChangeBranch {
6361 project_id: project_id.0,
6362 repository_id: id.to_proto(),
6363 branch_name,
6364 })
6365 .await?;
6366
6367 Ok(())
6368 }
6369 }
6370 },
6371 )
6372 }
6373
6374 pub fn delete_branch(
6375 &mut self,
6376 is_remote: bool,
6377 branch_name: String,
6378 ) -> oneshot::Receiver<Result<()>> {
6379 let id = self.id;
6380 self.send_job(
6381 Some(
6382 format!(
6383 "git branch {} {}",
6384 if is_remote { "-dr" } else { "-d" },
6385 branch_name
6386 )
6387 .into(),
6388 ),
6389 move |repo, _cx| async move {
6390 match repo {
6391 RepositoryState::Local(state) => {
6392 state.backend.delete_branch(is_remote, branch_name).await
6393 }
6394 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6395 client
6396 .request(proto::GitDeleteBranch {
6397 project_id: project_id.0,
6398 repository_id: id.to_proto(),
6399 is_remote,
6400 branch_name,
6401 })
6402 .await?;
6403
6404 Ok(())
6405 }
6406 }
6407 },
6408 )
6409 }
6410
6411 pub fn rename_branch(
6412 &mut self,
6413 branch: String,
6414 new_name: String,
6415 ) -> oneshot::Receiver<Result<()>> {
6416 let id = self.id;
6417 self.send_job(
6418 Some(format!("git branch -m {branch} {new_name}").into()),
6419 move |repo, _cx| async move {
6420 match repo {
6421 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6422 backend.rename_branch(branch, new_name).await
6423 }
6424 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6425 client
6426 .request(proto::GitRenameBranch {
6427 project_id: project_id.0,
6428 repository_id: id.to_proto(),
6429 branch,
6430 new_name,
6431 })
6432 .await?;
6433
6434 Ok(())
6435 }
6436 }
6437 },
6438 )
6439 }
6440
6441 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6442 let id = self.id;
6443 self.send_job(None, move |repo, _cx| async move {
6444 match repo {
6445 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6446 backend.check_for_pushed_commit().await
6447 }
6448 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6449 let response = client
6450 .request(proto::CheckForPushedCommits {
6451 project_id: project_id.0,
6452 repository_id: id.to_proto(),
6453 })
6454 .await?;
6455
6456 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6457
6458 Ok(branches)
6459 }
6460 }
6461 })
6462 }
6463
6464 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6465 let id = self.id;
6466 self.send_job(None, move |repo, _cx| async move {
6467 match repo {
6468 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6469 backend.checkpoint().await
6470 }
6471 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6472 let response = client
6473 .request(proto::GitCreateCheckpoint {
6474 project_id: project_id.0,
6475 repository_id: id.to_proto(),
6476 })
6477 .await?;
6478
6479 Ok(GitRepositoryCheckpoint {
6480 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6481 })
6482 }
6483 }
6484 })
6485 }
6486
6487 pub fn restore_checkpoint(
6488 &mut self,
6489 checkpoint: GitRepositoryCheckpoint,
6490 ) -> oneshot::Receiver<Result<()>> {
6491 let id = self.id;
6492 self.send_job(None, move |repo, _cx| async move {
6493 match repo {
6494 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6495 backend.restore_checkpoint(checkpoint).await
6496 }
6497 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6498 client
6499 .request(proto::GitRestoreCheckpoint {
6500 project_id: project_id.0,
6501 repository_id: id.to_proto(),
6502 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6503 })
6504 .await?;
6505 Ok(())
6506 }
6507 }
6508 })
6509 }
6510
6511 pub(crate) fn apply_remote_update(
6512 &mut self,
6513 update: proto::UpdateRepository,
6514 cx: &mut Context<Self>,
6515 ) -> Result<()> {
6516 if let Some(main_path) = &update.original_repo_abs_path {
6517 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6518 }
6519
6520 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6521 let new_head_commit = update
6522 .head_commit_details
6523 .as_ref()
6524 .map(proto_to_commit_details);
6525 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6526 cx.emit(RepositoryEvent::HeadChanged)
6527 }
6528 self.snapshot.branch = new_branch;
6529 self.snapshot.head_commit = new_head_commit;
6530
6531 // We don't store any merge head state for downstream projects; the upstream
6532 // will track it and we will just get the updated conflicts
6533 let new_merge_heads = TreeMap::from_ordered_entries(
6534 update
6535 .current_merge_conflicts
6536 .into_iter()
6537 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6538 );
6539 let conflicts_changed =
6540 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6541 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6542 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6543 let new_stash_entries = GitStash {
6544 entries: update
6545 .stash_entries
6546 .iter()
6547 .filter_map(|entry| proto_to_stash(entry).ok())
6548 .collect(),
6549 };
6550 if self.snapshot.stash_entries != new_stash_entries {
6551 cx.emit(RepositoryEvent::StashEntriesChanged)
6552 }
6553 self.snapshot.stash_entries = new_stash_entries;
6554 let new_linked_worktrees: Arc<[GitWorktree]> = update
6555 .linked_worktrees
6556 .iter()
6557 .map(proto_to_worktree)
6558 .collect();
6559 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6560 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6561 }
6562 self.snapshot.linked_worktrees = new_linked_worktrees;
6563 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6564 self.snapshot.remote_origin_url = update.remote_origin_url;
6565
6566 let edits = update
6567 .removed_statuses
6568 .into_iter()
6569 .filter_map(|path| {
6570 Some(sum_tree::Edit::Remove(PathKey(
6571 RelPath::from_proto(&path).log_err()?,
6572 )))
6573 })
6574 .chain(
6575 update
6576 .updated_statuses
6577 .into_iter()
6578 .filter_map(|updated_status| {
6579 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6580 }),
6581 )
6582 .collect::<Vec<_>>();
6583 if conflicts_changed || !edits.is_empty() {
6584 cx.emit(RepositoryEvent::StatusesChanged);
6585 }
6586 self.snapshot.statuses_by_path.edit(edits, ());
6587
6588 if update.is_last_update {
6589 self.snapshot.scan_id = update.scan_id;
6590 }
6591 self.clear_pending_ops(cx);
6592 Ok(())
6593 }
6594
6595 pub fn compare_checkpoints(
6596 &mut self,
6597 left: GitRepositoryCheckpoint,
6598 right: GitRepositoryCheckpoint,
6599 ) -> oneshot::Receiver<Result<bool>> {
6600 let id = self.id;
6601 self.send_job(None, move |repo, _cx| async move {
6602 match repo {
6603 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6604 backend.compare_checkpoints(left, right).await
6605 }
6606 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6607 let response = client
6608 .request(proto::GitCompareCheckpoints {
6609 project_id: project_id.0,
6610 repository_id: id.to_proto(),
6611 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6612 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6613 })
6614 .await?;
6615 Ok(response.equal)
6616 }
6617 }
6618 })
6619 }
6620
6621 pub fn diff_checkpoints(
6622 &mut self,
6623 base_checkpoint: GitRepositoryCheckpoint,
6624 target_checkpoint: GitRepositoryCheckpoint,
6625 ) -> oneshot::Receiver<Result<String>> {
6626 let id = self.id;
6627 self.send_job(None, move |repo, _cx| async move {
6628 match repo {
6629 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6630 backend
6631 .diff_checkpoints(base_checkpoint, target_checkpoint)
6632 .await
6633 }
6634 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6635 let response = client
6636 .request(proto::GitDiffCheckpoints {
6637 project_id: project_id.0,
6638 repository_id: id.to_proto(),
6639 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6640 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6641 })
6642 .await?;
6643 Ok(response.diff)
6644 }
6645 }
6646 })
6647 }
6648
6649 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6650 let updated = SumTree::from_iter(
6651 self.pending_ops.iter().filter_map(|ops| {
6652 let inner_ops: Vec<PendingOp> =
6653 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6654 if inner_ops.is_empty() {
6655 None
6656 } else {
6657 Some(PendingOps {
6658 repo_path: ops.repo_path.clone(),
6659 ops: inner_ops,
6660 })
6661 }
6662 }),
6663 (),
6664 );
6665
6666 if updated != self.pending_ops {
6667 cx.emit(RepositoryEvent::PendingOpsChanged {
6668 pending_ops: self.pending_ops.clone(),
6669 })
6670 }
6671
6672 self.pending_ops = updated;
6673 }
6674
6675 fn schedule_scan(
6676 &mut self,
6677 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6678 cx: &mut Context<Self>,
6679 ) {
6680 let this = cx.weak_entity();
6681 let _ = self.send_keyed_job(
6682 Some(GitJobKey::ReloadGitState),
6683 None,
6684 |state, mut cx| async move {
6685 log::debug!("run scheduled git status scan");
6686
6687 let Some(this) = this.upgrade() else {
6688 return Ok(());
6689 };
6690 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6691 bail!("not a local repository")
6692 };
6693 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6694 this.update(&mut cx, |this, cx| {
6695 this.clear_pending_ops(cx);
6696 });
6697 if let Some(updates_tx) = updates_tx {
6698 updates_tx
6699 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6700 .ok();
6701 }
6702 Ok(())
6703 },
6704 );
6705 }
6706
6707 fn spawn_local_git_worker(
6708 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6709 cx: &mut Context<Self>,
6710 ) -> mpsc::UnboundedSender<GitJob> {
6711 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6712
6713 cx.spawn(async move |_, cx| {
6714 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6715 if let Some(git_hosting_provider_registry) =
6716 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6717 {
6718 git_hosting_providers::register_additional_providers(
6719 git_hosting_provider_registry,
6720 state.backend.clone(),
6721 )
6722 .await;
6723 }
6724 let state = RepositoryState::Local(state);
6725 let mut jobs = VecDeque::new();
6726 loop {
6727 while let Ok(next_job) = job_rx.try_recv() {
6728 jobs.push_back(next_job);
6729 }
6730
6731 if let Some(job) = jobs.pop_front() {
6732 if let Some(current_key) = &job.key
6733 && jobs
6734 .iter()
6735 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6736 {
6737 continue;
6738 }
6739 (job.job)(state.clone(), cx).await;
6740 } else if let Some(job) = job_rx.next().await {
6741 jobs.push_back(job);
6742 } else {
6743 break;
6744 }
6745 }
6746 anyhow::Ok(())
6747 })
6748 .detach_and_log_err(cx);
6749
6750 job_tx
6751 }
6752
6753 fn spawn_remote_git_worker(
6754 state: RemoteRepositoryState,
6755 cx: &mut Context<Self>,
6756 ) -> mpsc::UnboundedSender<GitJob> {
6757 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6758
6759 cx.spawn(async move |_, cx| {
6760 let state = RepositoryState::Remote(state);
6761 let mut jobs = VecDeque::new();
6762 loop {
6763 while let Ok(next_job) = job_rx.try_recv() {
6764 jobs.push_back(next_job);
6765 }
6766
6767 if let Some(job) = jobs.pop_front() {
6768 if let Some(current_key) = &job.key
6769 && jobs
6770 .iter()
6771 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6772 {
6773 continue;
6774 }
6775 (job.job)(state.clone(), cx).await;
6776 } else if let Some(job) = job_rx.next().await {
6777 jobs.push_back(job);
6778 } else {
6779 break;
6780 }
6781 }
6782 anyhow::Ok(())
6783 })
6784 .detach_and_log_err(cx);
6785
6786 job_tx
6787 }
6788
6789 fn load_staged_text(
6790 &mut self,
6791 buffer_id: BufferId,
6792 repo_path: RepoPath,
6793 cx: &App,
6794 ) -> Task<Result<Option<String>>> {
6795 let rx = self.send_job(None, move |state, _| async move {
6796 match state {
6797 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6798 anyhow::Ok(backend.load_index_text(repo_path).await)
6799 }
6800 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6801 let response = client
6802 .request(proto::OpenUnstagedDiff {
6803 project_id: project_id.to_proto(),
6804 buffer_id: buffer_id.to_proto(),
6805 })
6806 .await?;
6807 Ok(response.staged_text)
6808 }
6809 }
6810 });
6811 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6812 }
6813
6814 fn load_committed_text(
6815 &mut self,
6816 buffer_id: BufferId,
6817 repo_path: RepoPath,
6818 cx: &App,
6819 ) -> Task<Result<DiffBasesChange>> {
6820 let rx = self.send_job(None, move |state, _| async move {
6821 match state {
6822 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6823 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6824 let staged_text = backend.load_index_text(repo_path).await;
6825 let diff_bases_change = if committed_text == staged_text {
6826 DiffBasesChange::SetBoth(committed_text)
6827 } else {
6828 DiffBasesChange::SetEach {
6829 index: staged_text,
6830 head: committed_text,
6831 }
6832 };
6833 anyhow::Ok(diff_bases_change)
6834 }
6835 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6836 use proto::open_uncommitted_diff_response::Mode;
6837
6838 let response = client
6839 .request(proto::OpenUncommittedDiff {
6840 project_id: project_id.to_proto(),
6841 buffer_id: buffer_id.to_proto(),
6842 })
6843 .await?;
6844 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6845 let bases = match mode {
6846 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6847 Mode::IndexAndHead => DiffBasesChange::SetEach {
6848 head: response.committed_text,
6849 index: response.staged_text,
6850 },
6851 };
6852 Ok(bases)
6853 }
6854 }
6855 });
6856
6857 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6858 }
6859
6860 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6861 let repository_id = self.snapshot.id;
6862 let rx = self.send_job(None, move |state, _| async move {
6863 match state {
6864 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6865 backend.load_blob_content(oid).await
6866 }
6867 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6868 let response = client
6869 .request(proto::GetBlobContent {
6870 project_id: project_id.to_proto(),
6871 repository_id: repository_id.0,
6872 oid: oid.to_string(),
6873 })
6874 .await?;
6875 Ok(response.content)
6876 }
6877 }
6878 });
6879 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6880 }
6881
6882 fn paths_changed(
6883 &mut self,
6884 paths: Vec<RepoPath>,
6885 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6886 cx: &mut Context<Self>,
6887 ) {
6888 if !paths.is_empty() {
6889 self.paths_needing_status_update.push(paths);
6890 }
6891
6892 let this = cx.weak_entity();
6893 let _ = self.send_keyed_job(
6894 Some(GitJobKey::RefreshStatuses),
6895 None,
6896 |state, mut cx| async move {
6897 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6898 (
6899 this.snapshot.clone(),
6900 mem::take(&mut this.paths_needing_status_update),
6901 )
6902 })?;
6903 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6904 bail!("not a local repository")
6905 };
6906
6907 if changed_paths.is_empty() {
6908 return Ok(());
6909 }
6910
6911 let has_head = prev_snapshot.head_commit.is_some();
6912
6913 let stash_entries = backend.stash_entries().await?;
6914 let changed_path_statuses = cx
6915 .background_spawn(async move {
6916 let mut changed_paths =
6917 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6918 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6919
6920 let status_task = backend.status(&changed_paths_vec);
6921 let diff_stat_future = if has_head {
6922 backend.diff_stat(&changed_paths_vec)
6923 } else {
6924 future::ready(Ok(status::GitDiffStat {
6925 entries: Arc::default(),
6926 }))
6927 .boxed()
6928 };
6929
6930 let (statuses, diff_stats) =
6931 futures::future::try_join(status_task, diff_stat_future).await?;
6932
6933 let diff_stats: HashMap<RepoPath, DiffStat> =
6934 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6935
6936 let mut changed_path_statuses = Vec::new();
6937 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6938 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6939
6940 for (repo_path, status) in &*statuses.entries {
6941 let current_diff_stat = diff_stats.get(repo_path).copied();
6942
6943 changed_paths.remove(repo_path);
6944 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6945 && cursor.item().is_some_and(|entry| {
6946 entry.status == *status && entry.diff_stat == current_diff_stat
6947 })
6948 {
6949 continue;
6950 }
6951
6952 changed_path_statuses.push(Edit::Insert(StatusEntry {
6953 repo_path: repo_path.clone(),
6954 status: *status,
6955 diff_stat: current_diff_stat,
6956 }));
6957 }
6958 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6959 for path in changed_paths.into_iter() {
6960 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6961 changed_path_statuses
6962 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6963 }
6964 }
6965 anyhow::Ok(changed_path_statuses)
6966 })
6967 .await?;
6968
6969 this.update(&mut cx, |this, cx| {
6970 if this.snapshot.stash_entries != stash_entries {
6971 cx.emit(RepositoryEvent::StashEntriesChanged);
6972 this.snapshot.stash_entries = stash_entries;
6973 }
6974
6975 if !changed_path_statuses.is_empty() {
6976 cx.emit(RepositoryEvent::StatusesChanged);
6977 this.snapshot
6978 .statuses_by_path
6979 .edit(changed_path_statuses, ());
6980 this.snapshot.scan_id += 1;
6981 }
6982
6983 if let Some(updates_tx) = updates_tx {
6984 updates_tx
6985 .unbounded_send(DownstreamUpdate::UpdateRepository(
6986 this.snapshot.clone(),
6987 ))
6988 .ok();
6989 }
6990 })
6991 },
6992 );
6993 }
6994
6995 /// currently running git command and when it started
6996 pub fn current_job(&self) -> Option<JobInfo> {
6997 self.active_jobs.values().next().cloned()
6998 }
6999
7000 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7001 self.send_job(None, |_, _| async {})
7002 }
7003
7004 fn spawn_job_with_tracking<AsyncFn>(
7005 &mut self,
7006 paths: Vec<RepoPath>,
7007 git_status: pending_op::GitStatus,
7008 cx: &mut Context<Self>,
7009 f: AsyncFn,
7010 ) -> Task<Result<()>>
7011 where
7012 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7013 {
7014 let ids = self.new_pending_ops_for_paths(paths, git_status);
7015
7016 cx.spawn(async move |this, cx| {
7017 let (job_status, result) = match f(this.clone(), cx).await {
7018 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7019 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7020 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7021 };
7022
7023 this.update(cx, |this, _| {
7024 let mut edits = Vec::with_capacity(ids.len());
7025 for (id, entry) in ids {
7026 if let Some(mut ops) = this
7027 .pending_ops
7028 .get(&PathKey(entry.as_ref().clone()), ())
7029 .cloned()
7030 {
7031 if let Some(op) = ops.op_by_id_mut(id) {
7032 op.job_status = job_status;
7033 }
7034 edits.push(sum_tree::Edit::Insert(ops));
7035 }
7036 }
7037 this.pending_ops.edit(edits, ());
7038 })?;
7039
7040 result
7041 })
7042 }
7043
7044 fn new_pending_ops_for_paths(
7045 &mut self,
7046 paths: Vec<RepoPath>,
7047 git_status: pending_op::GitStatus,
7048 ) -> Vec<(PendingOpId, RepoPath)> {
7049 let mut edits = Vec::with_capacity(paths.len());
7050 let mut ids = Vec::with_capacity(paths.len());
7051 for path in paths {
7052 let mut ops = self
7053 .pending_ops
7054 .get(&PathKey(path.as_ref().clone()), ())
7055 .cloned()
7056 .unwrap_or_else(|| PendingOps::new(&path));
7057 let id = ops.max_id() + 1;
7058 ops.ops.push(PendingOp {
7059 id,
7060 git_status,
7061 job_status: pending_op::JobStatus::Running,
7062 });
7063 edits.push(sum_tree::Edit::Insert(ops));
7064 ids.push((id, path));
7065 }
7066 self.pending_ops.edit(edits, ());
7067 ids
7068 }
7069 pub fn default_remote_url(&self) -> Option<String> {
7070 self.remote_upstream_url
7071 .clone()
7072 .or(self.remote_origin_url.clone())
7073 }
7074}
7075
7076/// If `path` is a git linked worktree checkout, resolves it to the main
7077/// repository's working directory path. Returns `None` if `path` is a normal
7078/// repository, not a git repo, or if resolution fails.
7079///
7080/// Resolution works by:
7081/// 1. Reading the `.git` file to get the `gitdir:` pointer
7082/// 2. Following that to the worktree-specific git directory
7083/// 3. Reading the `commondir` file to find the shared `.git` directory
7084/// 4. Deriving the main repo's working directory from the common dir
7085pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7086 let dot_git = path.join(".git");
7087 let metadata = fs.metadata(&dot_git).await.ok()??;
7088 if metadata.is_dir {
7089 return None; // Normal repo, not a linked worktree
7090 }
7091 // It's a .git file — parse the gitdir: pointer
7092 let content = fs.load(&dot_git).await.ok()?;
7093 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7094 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7095 // Read commondir to find the main .git directory
7096 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7097 let common_dir = fs
7098 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7099 .await
7100 .ok()?;
7101 Some(git::repository::original_repo_path_from_common_dir(
7102 &common_dir,
7103 ))
7104}
7105
7106/// Validates that the resolved worktree directory is acceptable:
7107/// - The setting must not be an absolute path.
7108/// - The resolved path must be either a subdirectory of the working
7109/// directory or a subdirectory of its parent (i.e., a sibling).
7110///
7111/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7112pub fn worktrees_directory_for_repo(
7113 original_repo_abs_path: &Path,
7114 worktree_directory_setting: &str,
7115) -> Result<PathBuf> {
7116 // Check the original setting before trimming, since a path like "///"
7117 // is absolute but becomes "" after stripping trailing separators.
7118 // Also check for leading `/` or `\` explicitly, because on Windows
7119 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7120 // would slip through even though it's clearly not a relative path.
7121 if Path::new(worktree_directory_setting).is_absolute()
7122 || worktree_directory_setting.starts_with('/')
7123 || worktree_directory_setting.starts_with('\\')
7124 {
7125 anyhow::bail!(
7126 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7127 );
7128 }
7129
7130 if worktree_directory_setting.is_empty() {
7131 anyhow::bail!("git.worktree_directory must not be empty");
7132 }
7133
7134 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7135 if trimmed == ".." {
7136 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7137 }
7138
7139 let joined = original_repo_abs_path.join(trimmed);
7140 let resolved = util::normalize_path(&joined);
7141 let resolved = if resolved.starts_with(original_repo_abs_path) {
7142 resolved
7143 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7144 resolved.join(repo_dir_name)
7145 } else {
7146 resolved
7147 };
7148
7149 let parent = original_repo_abs_path
7150 .parent()
7151 .unwrap_or(original_repo_abs_path);
7152
7153 if !resolved.starts_with(parent) {
7154 anyhow::bail!(
7155 "git.worktree_directory resolved to {resolved:?}, which is outside \
7156 the project root and its parent directory. It must resolve to a \
7157 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7158 );
7159 }
7160
7161 Ok(resolved)
7162}
7163
7164/// Returns a short name for a linked worktree suitable for UI display
7165///
7166/// Uses the main worktree path to come up with a short name that disambiguates
7167/// the linked worktree from the main worktree.
7168pub fn linked_worktree_short_name(
7169 main_worktree_path: &Path,
7170 linked_worktree_path: &Path,
7171) -> Option<SharedString> {
7172 if main_worktree_path == linked_worktree_path {
7173 return None;
7174 }
7175
7176 let project_name = main_worktree_path.file_name()?.to_str()?;
7177 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7178 let name = if directory_name != project_name {
7179 directory_name.to_string()
7180 } else {
7181 linked_worktree_path
7182 .parent()?
7183 .file_name()?
7184 .to_str()?
7185 .to_string()
7186 };
7187 Some(name.into())
7188}
7189
7190fn get_permalink_in_rust_registry_src(
7191 provider_registry: Arc<GitHostingProviderRegistry>,
7192 path: PathBuf,
7193 selection: Range<u32>,
7194) -> Result<url::Url> {
7195 #[derive(Deserialize)]
7196 struct CargoVcsGit {
7197 sha1: String,
7198 }
7199
7200 #[derive(Deserialize)]
7201 struct CargoVcsInfo {
7202 git: CargoVcsGit,
7203 path_in_vcs: String,
7204 }
7205
7206 #[derive(Deserialize)]
7207 struct CargoPackage {
7208 repository: String,
7209 }
7210
7211 #[derive(Deserialize)]
7212 struct CargoToml {
7213 package: CargoPackage,
7214 }
7215
7216 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7217 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7218 Some((dir, json))
7219 }) else {
7220 bail!("No .cargo_vcs_info.json found in parent directories")
7221 };
7222 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7223 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7224 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7225 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7226 .context("parsing package.repository field of manifest")?;
7227 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7228 let permalink = provider.build_permalink(
7229 remote,
7230 BuildPermalinkParams::new(
7231 &cargo_vcs_info.git.sha1,
7232 &RepoPath::from_rel_path(
7233 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7234 ),
7235 Some(selection),
7236 ),
7237 );
7238 Ok(permalink)
7239}
7240
7241fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7242 let Some(blame) = blame else {
7243 return proto::BlameBufferResponse {
7244 blame_response: None,
7245 };
7246 };
7247
7248 let entries = blame
7249 .entries
7250 .into_iter()
7251 .map(|entry| proto::BlameEntry {
7252 sha: entry.sha.as_bytes().into(),
7253 start_line: entry.range.start,
7254 end_line: entry.range.end,
7255 original_line_number: entry.original_line_number,
7256 author: entry.author,
7257 author_mail: entry.author_mail,
7258 author_time: entry.author_time,
7259 author_tz: entry.author_tz,
7260 committer: entry.committer_name,
7261 committer_mail: entry.committer_email,
7262 committer_time: entry.committer_time,
7263 committer_tz: entry.committer_tz,
7264 summary: entry.summary,
7265 previous: entry.previous,
7266 filename: entry.filename,
7267 })
7268 .collect::<Vec<_>>();
7269
7270 let messages = blame
7271 .messages
7272 .into_iter()
7273 .map(|(oid, message)| proto::CommitMessage {
7274 oid: oid.as_bytes().into(),
7275 message,
7276 })
7277 .collect::<Vec<_>>();
7278
7279 proto::BlameBufferResponse {
7280 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7281 }
7282}
7283
7284fn deserialize_blame_buffer_response(
7285 response: proto::BlameBufferResponse,
7286) -> Option<git::blame::Blame> {
7287 let response = response.blame_response?;
7288 let entries = response
7289 .entries
7290 .into_iter()
7291 .filter_map(|entry| {
7292 Some(git::blame::BlameEntry {
7293 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7294 range: entry.start_line..entry.end_line,
7295 original_line_number: entry.original_line_number,
7296 committer_name: entry.committer,
7297 committer_time: entry.committer_time,
7298 committer_tz: entry.committer_tz,
7299 committer_email: entry.committer_mail,
7300 author: entry.author,
7301 author_mail: entry.author_mail,
7302 author_time: entry.author_time,
7303 author_tz: entry.author_tz,
7304 summary: entry.summary,
7305 previous: entry.previous,
7306 filename: entry.filename,
7307 })
7308 })
7309 .collect::<Vec<_>>();
7310
7311 let messages = response
7312 .messages
7313 .into_iter()
7314 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7315 .collect::<HashMap<_, _>>();
7316
7317 Some(Blame { entries, messages })
7318}
7319
7320fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7321 proto::Branch {
7322 is_head: branch.is_head,
7323 ref_name: branch.ref_name.to_string(),
7324 unix_timestamp: branch
7325 .most_recent_commit
7326 .as_ref()
7327 .map(|commit| commit.commit_timestamp as u64),
7328 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7329 ref_name: upstream.ref_name.to_string(),
7330 tracking: upstream
7331 .tracking
7332 .status()
7333 .map(|upstream| proto::UpstreamTracking {
7334 ahead: upstream.ahead as u64,
7335 behind: upstream.behind as u64,
7336 }),
7337 }),
7338 most_recent_commit: branch
7339 .most_recent_commit
7340 .as_ref()
7341 .map(|commit| proto::CommitSummary {
7342 sha: commit.sha.to_string(),
7343 subject: commit.subject.to_string(),
7344 commit_timestamp: commit.commit_timestamp,
7345 author_name: commit.author_name.to_string(),
7346 }),
7347 }
7348}
7349
7350fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7351 proto::Worktree {
7352 path: worktree.path.to_string_lossy().to_string(),
7353 ref_name: worktree
7354 .ref_name
7355 .as_ref()
7356 .map(|s| s.to_string())
7357 .unwrap_or_default(),
7358 sha: worktree.sha.to_string(),
7359 is_main: worktree.is_main,
7360 }
7361}
7362
7363fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7364 git::repository::Worktree {
7365 path: PathBuf::from(proto.path.clone()),
7366 ref_name: Some(SharedString::from(&proto.ref_name)),
7367 sha: proto.sha.clone().into(),
7368 is_main: proto.is_main,
7369 }
7370}
7371
7372fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7373 git::repository::Branch {
7374 is_head: proto.is_head,
7375 ref_name: proto.ref_name.clone().into(),
7376 upstream: proto
7377 .upstream
7378 .as_ref()
7379 .map(|upstream| git::repository::Upstream {
7380 ref_name: upstream.ref_name.to_string().into(),
7381 tracking: upstream
7382 .tracking
7383 .as_ref()
7384 .map(|tracking| {
7385 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7386 ahead: tracking.ahead as u32,
7387 behind: tracking.behind as u32,
7388 })
7389 })
7390 .unwrap_or(git::repository::UpstreamTracking::Gone),
7391 }),
7392 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7393 git::repository::CommitSummary {
7394 sha: commit.sha.to_string().into(),
7395 subject: commit.subject.to_string().into(),
7396 commit_timestamp: commit.commit_timestamp,
7397 author_name: commit.author_name.to_string().into(),
7398 has_parent: true,
7399 }
7400 }),
7401 }
7402}
7403
7404fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7405 proto::GitCommitDetails {
7406 sha: commit.sha.to_string(),
7407 message: commit.message.to_string(),
7408 commit_timestamp: commit.commit_timestamp,
7409 author_email: commit.author_email.to_string(),
7410 author_name: commit.author_name.to_string(),
7411 }
7412}
7413
7414fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7415 CommitDetails {
7416 sha: proto.sha.clone().into(),
7417 message: proto.message.clone().into(),
7418 commit_timestamp: proto.commit_timestamp,
7419 author_email: proto.author_email.clone().into(),
7420 author_name: proto.author_name.clone().into(),
7421 }
7422}
7423
7424/// This snapshot computes the repository state on the foreground thread while
7425/// running the git commands on the background thread. We update branch, head,
7426/// remotes, and worktrees first so the UI can react sooner, then compute file
7427/// state and emit those events immediately after.
7428async fn compute_snapshot(
7429 this: Entity<Repository>,
7430 backend: Arc<dyn GitRepository>,
7431 cx: &mut AsyncApp,
7432) -> Result<RepositorySnapshot> {
7433 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7434 this.paths_needing_status_update.clear();
7435 (
7436 this.id,
7437 this.work_directory_abs_path.clone(),
7438 this.snapshot.clone(),
7439 )
7440 });
7441
7442 let head_commit_future = {
7443 let backend = backend.clone();
7444 async move {
7445 Ok(match backend.head_sha().await {
7446 Some(head_sha) => backend.show(head_sha).await.log_err(),
7447 None => None,
7448 })
7449 }
7450 };
7451 let (branches, head_commit, all_worktrees) = cx
7452 .background_spawn({
7453 let backend = backend.clone();
7454 async move {
7455 futures::future::try_join3(
7456 backend.branches(),
7457 head_commit_future,
7458 backend.worktrees(),
7459 )
7460 .await
7461 }
7462 })
7463 .await?;
7464 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7465 let branch_list: Arc<[Branch]> = branches.into();
7466
7467 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7468 .into_iter()
7469 .filter(|wt| wt.path != *work_directory_abs_path)
7470 .collect();
7471
7472 let (remote_origin_url, remote_upstream_url) = cx
7473 .background_spawn({
7474 let backend = backend.clone();
7475 async move {
7476 Ok::<_, anyhow::Error>(
7477 futures::future::join(
7478 backend.remote_url("origin"),
7479 backend.remote_url("upstream"),
7480 )
7481 .await,
7482 )
7483 }
7484 })
7485 .await?;
7486
7487 let snapshot = this.update(cx, |this, cx| {
7488 let head_changed =
7489 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7490 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7491 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7492
7493 this.snapshot = RepositorySnapshot {
7494 id,
7495 work_directory_abs_path,
7496 branch,
7497 branch_list: branch_list.clone(),
7498 head_commit,
7499 remote_origin_url,
7500 remote_upstream_url,
7501 linked_worktrees,
7502 scan_id: prev_snapshot.scan_id + 1,
7503 ..prev_snapshot
7504 };
7505
7506 if head_changed {
7507 cx.emit(RepositoryEvent::HeadChanged);
7508 }
7509
7510 if branch_list_changed {
7511 cx.emit(RepositoryEvent::BranchListChanged);
7512 }
7513
7514 if worktrees_changed {
7515 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7516 }
7517
7518 this.snapshot.clone()
7519 });
7520
7521 let (statuses, diff_stats, stash_entries) = cx
7522 .background_spawn({
7523 let backend = backend.clone();
7524 let snapshot = snapshot.clone();
7525 async move {
7526 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7527 if snapshot.head_commit.is_some() {
7528 backend.diff_stat(&[])
7529 } else {
7530 future::ready(Ok(status::GitDiffStat {
7531 entries: Arc::default(),
7532 }))
7533 .boxed()
7534 };
7535 futures::future::try_join3(
7536 backend.status(&[RepoPath::from_rel_path(
7537 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7538 )]),
7539 diff_stat_future,
7540 backend.stash_entries(),
7541 )
7542 .await
7543 }
7544 })
7545 .await?;
7546
7547 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7548 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7549 let mut conflicted_paths = Vec::new();
7550 let statuses_by_path = SumTree::from_iter(
7551 statuses.entries.iter().map(|(repo_path, status)| {
7552 if status.is_conflicted() {
7553 conflicted_paths.push(repo_path.clone());
7554 }
7555 StatusEntry {
7556 repo_path: repo_path.clone(),
7557 status: *status,
7558 diff_stat: diff_stat_map.get(repo_path).copied(),
7559 }
7560 }),
7561 (),
7562 );
7563
7564 let merge_details = cx
7565 .background_spawn({
7566 let backend = backend.clone();
7567 let mut merge_details = snapshot.merge.clone();
7568 async move {
7569 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7570 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7571 }
7572 })
7573 .await?;
7574 let (merge_details, conflicts_changed) = merge_details;
7575 log::debug!("new merge details: {merge_details:?}");
7576
7577 Ok(this.update(cx, |this, cx| {
7578 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7579 cx.emit(RepositoryEvent::StatusesChanged);
7580 }
7581 if stash_entries != this.snapshot.stash_entries {
7582 cx.emit(RepositoryEvent::StashEntriesChanged);
7583 }
7584
7585 this.snapshot.scan_id += 1;
7586 this.snapshot.merge = merge_details;
7587 this.snapshot.statuses_by_path = statuses_by_path;
7588 this.snapshot.stash_entries = stash_entries;
7589
7590 this.snapshot.clone()
7591 }))
7592}
7593
7594fn status_from_proto(
7595 simple_status: i32,
7596 status: Option<proto::GitFileStatus>,
7597) -> anyhow::Result<FileStatus> {
7598 use proto::git_file_status::Variant;
7599
7600 let Some(variant) = status.and_then(|status| status.variant) else {
7601 let code = proto::GitStatus::from_i32(simple_status)
7602 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7603 let result = match code {
7604 proto::GitStatus::Added => TrackedStatus {
7605 worktree_status: StatusCode::Added,
7606 index_status: StatusCode::Unmodified,
7607 }
7608 .into(),
7609 proto::GitStatus::Modified => TrackedStatus {
7610 worktree_status: StatusCode::Modified,
7611 index_status: StatusCode::Unmodified,
7612 }
7613 .into(),
7614 proto::GitStatus::Conflict => UnmergedStatus {
7615 first_head: UnmergedStatusCode::Updated,
7616 second_head: UnmergedStatusCode::Updated,
7617 }
7618 .into(),
7619 proto::GitStatus::Deleted => TrackedStatus {
7620 worktree_status: StatusCode::Deleted,
7621 index_status: StatusCode::Unmodified,
7622 }
7623 .into(),
7624 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7625 };
7626 return Ok(result);
7627 };
7628
7629 let result = match variant {
7630 Variant::Untracked(_) => FileStatus::Untracked,
7631 Variant::Ignored(_) => FileStatus::Ignored,
7632 Variant::Unmerged(unmerged) => {
7633 let [first_head, second_head] =
7634 [unmerged.first_head, unmerged.second_head].map(|head| {
7635 let code = proto::GitStatus::from_i32(head)
7636 .with_context(|| format!("Invalid git status code: {head}"))?;
7637 let result = match code {
7638 proto::GitStatus::Added => UnmergedStatusCode::Added,
7639 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7640 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7641 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7642 };
7643 Ok(result)
7644 });
7645 let [first_head, second_head] = [first_head?, second_head?];
7646 UnmergedStatus {
7647 first_head,
7648 second_head,
7649 }
7650 .into()
7651 }
7652 Variant::Tracked(tracked) => {
7653 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7654 .map(|status| {
7655 let code = proto::GitStatus::from_i32(status)
7656 .with_context(|| format!("Invalid git status code: {status}"))?;
7657 let result = match code {
7658 proto::GitStatus::Modified => StatusCode::Modified,
7659 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7660 proto::GitStatus::Added => StatusCode::Added,
7661 proto::GitStatus::Deleted => StatusCode::Deleted,
7662 proto::GitStatus::Renamed => StatusCode::Renamed,
7663 proto::GitStatus::Copied => StatusCode::Copied,
7664 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7665 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7666 };
7667 Ok(result)
7668 });
7669 let [index_status, worktree_status] = [index_status?, worktree_status?];
7670 TrackedStatus {
7671 index_status,
7672 worktree_status,
7673 }
7674 .into()
7675 }
7676 };
7677 Ok(result)
7678}
7679
7680fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7681 use proto::git_file_status::{Tracked, Unmerged, Variant};
7682
7683 let variant = match status {
7684 FileStatus::Untracked => Variant::Untracked(Default::default()),
7685 FileStatus::Ignored => Variant::Ignored(Default::default()),
7686 FileStatus::Unmerged(UnmergedStatus {
7687 first_head,
7688 second_head,
7689 }) => Variant::Unmerged(Unmerged {
7690 first_head: unmerged_status_to_proto(first_head),
7691 second_head: unmerged_status_to_proto(second_head),
7692 }),
7693 FileStatus::Tracked(TrackedStatus {
7694 index_status,
7695 worktree_status,
7696 }) => Variant::Tracked(Tracked {
7697 index_status: tracked_status_to_proto(index_status),
7698 worktree_status: tracked_status_to_proto(worktree_status),
7699 }),
7700 };
7701 proto::GitFileStatus {
7702 variant: Some(variant),
7703 }
7704}
7705
7706fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7707 match code {
7708 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7709 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7710 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7711 }
7712}
7713
7714fn tracked_status_to_proto(code: StatusCode) -> i32 {
7715 match code {
7716 StatusCode::Added => proto::GitStatus::Added as _,
7717 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7718 StatusCode::Modified => proto::GitStatus::Modified as _,
7719 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7720 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7721 StatusCode::Copied => proto::GitStatus::Copied as _,
7722 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7723 }
7724}