1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_create_checkpoint);
564 client.add_entity_request_handler(Self::handle_restore_checkpoint);
565 client.add_entity_request_handler(Self::handle_compare_checkpoints);
566 client.add_entity_request_handler(Self::handle_diff_checkpoints);
567 client.add_entity_request_handler(Self::handle_load_commit_diff);
568 client.add_entity_request_handler(Self::handle_file_history);
569 client.add_entity_request_handler(Self::handle_checkout_files);
570 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
571 client.add_entity_request_handler(Self::handle_set_index_text);
572 client.add_entity_request_handler(Self::handle_askpass);
573 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
574 client.add_entity_request_handler(Self::handle_git_diff);
575 client.add_entity_request_handler(Self::handle_tree_diff);
576 client.add_entity_request_handler(Self::handle_get_blob_content);
577 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
578 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
579 client.add_entity_message_handler(Self::handle_update_diff_bases);
580 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
581 client.add_entity_request_handler(Self::handle_blame_buffer);
582 client.add_entity_message_handler(Self::handle_update_repository);
583 client.add_entity_message_handler(Self::handle_remove_repository);
584 client.add_entity_request_handler(Self::handle_git_clone);
585 client.add_entity_request_handler(Self::handle_get_worktrees);
586 client.add_entity_request_handler(Self::handle_create_worktree);
587 client.add_entity_request_handler(Self::handle_remove_worktree);
588 client.add_entity_request_handler(Self::handle_rename_worktree);
589 }
590
591 pub fn is_local(&self) -> bool {
592 matches!(self.state, GitStoreState::Local { .. })
593 }
594 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
595 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
596 let id = repo.read(cx).id;
597 if self.active_repo_id != Some(id) {
598 self.active_repo_id = Some(id);
599 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
600 }
601 }
602 }
603
604 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
605 match &mut self.state {
606 GitStoreState::Remote {
607 downstream: downstream_client,
608 ..
609 } => {
610 for repo in self.repositories.values() {
611 let update = repo.read(cx).snapshot.initial_update(project_id);
612 for update in split_repository_update(update) {
613 client.send(update).log_err();
614 }
615 }
616 *downstream_client = Some((client, ProjectId(project_id)));
617 }
618 GitStoreState::Local {
619 downstream: downstream_client,
620 ..
621 } => {
622 let mut snapshots = HashMap::default();
623 let (updates_tx, mut updates_rx) = mpsc::unbounded();
624 for repo in self.repositories.values() {
625 updates_tx
626 .unbounded_send(DownstreamUpdate::UpdateRepository(
627 repo.read(cx).snapshot.clone(),
628 ))
629 .ok();
630 }
631 *downstream_client = Some(LocalDownstreamState {
632 client: client.clone(),
633 project_id: ProjectId(project_id),
634 updates_tx,
635 _task: cx.spawn(async move |this, cx| {
636 cx.background_spawn(async move {
637 while let Some(update) = updates_rx.next().await {
638 match update {
639 DownstreamUpdate::UpdateRepository(snapshot) => {
640 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
641 {
642 let update =
643 snapshot.build_update(old_snapshot, project_id);
644 *old_snapshot = snapshot;
645 for update in split_repository_update(update) {
646 client.send(update)?;
647 }
648 } else {
649 let update = snapshot.initial_update(project_id);
650 for update in split_repository_update(update) {
651 client.send(update)?;
652 }
653 snapshots.insert(snapshot.id, snapshot);
654 }
655 }
656 DownstreamUpdate::RemoveRepository(id) => {
657 client.send(proto::RemoveRepository {
658 project_id,
659 id: id.to_proto(),
660 })?;
661 }
662 }
663 }
664 anyhow::Ok(())
665 })
666 .await
667 .ok();
668 this.update(cx, |this, _| {
669 if let GitStoreState::Local {
670 downstream: downstream_client,
671 ..
672 } = &mut this.state
673 {
674 downstream_client.take();
675 } else {
676 unreachable!("unshared called on remote store");
677 }
678 })
679 }),
680 });
681 }
682 }
683 }
684
685 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
686 match &mut self.state {
687 GitStoreState::Local {
688 downstream: downstream_client,
689 ..
690 } => {
691 downstream_client.take();
692 }
693 GitStoreState::Remote {
694 downstream: downstream_client,
695 ..
696 } => {
697 downstream_client.take();
698 }
699 }
700 self.shared_diffs.clear();
701 }
702
703 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
704 self.shared_diffs.remove(peer_id);
705 }
706
707 pub fn active_repository(&self) -> Option<Entity<Repository>> {
708 self.active_repo_id
709 .as_ref()
710 .map(|id| self.repositories[id].clone())
711 }
712
713 pub fn open_unstaged_diff(
714 &mut self,
715 buffer: Entity<Buffer>,
716 cx: &mut Context<Self>,
717 ) -> Task<Result<Entity<BufferDiff>>> {
718 let buffer_id = buffer.read(cx).remote_id();
719 if let Some(diff_state) = self.diffs.get(&buffer_id)
720 && let Some(unstaged_diff) = diff_state
721 .read(cx)
722 .unstaged_diff
723 .as_ref()
724 .and_then(|weak| weak.upgrade())
725 {
726 if let Some(task) =
727 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
728 {
729 return cx.background_executor().spawn(async move {
730 task.await;
731 Ok(unstaged_diff)
732 });
733 }
734 return Task::ready(Ok(unstaged_diff));
735 }
736
737 let Some((repo, repo_path)) =
738 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
739 else {
740 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
741 };
742
743 let task = self
744 .loading_diffs
745 .entry((buffer_id, DiffKind::Unstaged))
746 .or_insert_with(|| {
747 let staged_text = repo.update(cx, |repo, cx| {
748 repo.load_staged_text(buffer_id, repo_path, cx)
749 });
750 cx.spawn(async move |this, cx| {
751 Self::open_diff_internal(
752 this,
753 DiffKind::Unstaged,
754 staged_text.await.map(DiffBasesChange::SetIndex),
755 buffer,
756 cx,
757 )
758 .await
759 .map_err(Arc::new)
760 })
761 .shared()
762 })
763 .clone();
764
765 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
766 }
767
768 pub fn open_diff_since(
769 &mut self,
770 oid: Option<git::Oid>,
771 buffer: Entity<Buffer>,
772 repo: Entity<Repository>,
773 cx: &mut Context<Self>,
774 ) -> Task<Result<Entity<BufferDiff>>> {
775 let buffer_id = buffer.read(cx).remote_id();
776
777 if let Some(diff_state) = self.diffs.get(&buffer_id)
778 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
779 {
780 if let Some(task) =
781 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
782 {
783 return cx.background_executor().spawn(async move {
784 task.await;
785 Ok(oid_diff)
786 });
787 }
788 return Task::ready(Ok(oid_diff));
789 }
790
791 let diff_kind = DiffKind::SinceOid(oid);
792 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
793 let task = task.clone();
794 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
795 }
796
797 let task = cx
798 .spawn(async move |this, cx| {
799 let result: Result<Entity<BufferDiff>> = async {
800 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
801 let language_registry =
802 buffer.update(cx, |buffer, _| buffer.language_registry());
803 let content: Option<Arc<str>> = match oid {
804 None => None,
805 Some(oid) => Some(
806 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
807 .await?
808 .into(),
809 ),
810 };
811 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
812
813 buffer_diff
814 .update(cx, |buffer_diff, cx| {
815 buffer_diff.language_changed(
816 buffer_snapshot.language().cloned(),
817 language_registry,
818 cx,
819 );
820 buffer_diff.set_base_text(
821 content.clone(),
822 buffer_snapshot.language().cloned(),
823 buffer_snapshot.text,
824 cx,
825 )
826 })
827 .await?;
828 let unstaged_diff = this
829 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
830 .await?;
831 buffer_diff.update(cx, |buffer_diff, _| {
832 buffer_diff.set_secondary_diff(unstaged_diff);
833 });
834
835 this.update(cx, |this, cx| {
836 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
837 .detach();
838
839 this.loading_diffs.remove(&(buffer_id, diff_kind));
840
841 let git_store = cx.weak_entity();
842 let diff_state = this
843 .diffs
844 .entry(buffer_id)
845 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
846
847 diff_state.update(cx, |state, _| {
848 if let Some(oid) = oid {
849 if let Some(content) = content {
850 state.oid_texts.insert(oid, content);
851 }
852 }
853 state.oid_diffs.insert(oid, buffer_diff.downgrade());
854 });
855 })?;
856
857 Ok(buffer_diff)
858 }
859 .await;
860 result.map_err(Arc::new)
861 })
862 .shared();
863
864 self.loading_diffs
865 .insert((buffer_id, diff_kind), task.clone());
866 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
867 }
868
869 #[ztracing::instrument(skip_all)]
870 pub fn open_uncommitted_diff(
871 &mut self,
872 buffer: Entity<Buffer>,
873 cx: &mut Context<Self>,
874 ) -> Task<Result<Entity<BufferDiff>>> {
875 let buffer_id = buffer.read(cx).remote_id();
876
877 if let Some(diff_state) = self.diffs.get(&buffer_id)
878 && let Some(uncommitted_diff) = diff_state
879 .read(cx)
880 .uncommitted_diff
881 .as_ref()
882 .and_then(|weak| weak.upgrade())
883 {
884 if let Some(task) =
885 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
886 {
887 return cx.background_executor().spawn(async move {
888 task.await;
889 Ok(uncommitted_diff)
890 });
891 }
892 return Task::ready(Ok(uncommitted_diff));
893 }
894
895 let Some((repo, repo_path)) =
896 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
897 else {
898 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
899 };
900
901 let task = self
902 .loading_diffs
903 .entry((buffer_id, DiffKind::Uncommitted))
904 .or_insert_with(|| {
905 let changes = repo.update(cx, |repo, cx| {
906 repo.load_committed_text(buffer_id, repo_path, cx)
907 });
908
909 // todo(lw): hot foreground spawn
910 cx.spawn(async move |this, cx| {
911 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
912 .await
913 .map_err(Arc::new)
914 })
915 .shared()
916 })
917 .clone();
918
919 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
920 }
921
922 #[ztracing::instrument(skip_all)]
923 async fn open_diff_internal(
924 this: WeakEntity<Self>,
925 kind: DiffKind,
926 texts: Result<DiffBasesChange>,
927 buffer_entity: Entity<Buffer>,
928 cx: &mut AsyncApp,
929 ) -> Result<Entity<BufferDiff>> {
930 let diff_bases_change = match texts {
931 Err(e) => {
932 this.update(cx, |this, cx| {
933 let buffer = buffer_entity.read(cx);
934 let buffer_id = buffer.remote_id();
935 this.loading_diffs.remove(&(buffer_id, kind));
936 })?;
937 return Err(e);
938 }
939 Ok(change) => change,
940 };
941
942 this.update(cx, |this, cx| {
943 let buffer = buffer_entity.read(cx);
944 let buffer_id = buffer.remote_id();
945 let language = buffer.language().cloned();
946 let language_registry = buffer.language_registry();
947 let text_snapshot = buffer.text_snapshot();
948 this.loading_diffs.remove(&(buffer_id, kind));
949
950 let git_store = cx.weak_entity();
951 let diff_state = this
952 .diffs
953 .entry(buffer_id)
954 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
955
956 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
957
958 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
959 diff_state.update(cx, |diff_state, cx| {
960 diff_state.language_changed = true;
961 diff_state.language = language;
962 diff_state.language_registry = language_registry;
963
964 match kind {
965 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
966 DiffKind::Uncommitted => {
967 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
968 diff
969 } else {
970 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
971 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
972 unstaged_diff
973 };
974
975 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
976 diff_state.uncommitted_diff = Some(diff.downgrade())
977 }
978 DiffKind::SinceOid(_) => {
979 unreachable!("open_diff_internal is not used for OID diffs")
980 }
981 }
982
983 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
984 let rx = diff_state.wait_for_recalculation();
985
986 anyhow::Ok(async move {
987 if let Some(rx) = rx {
988 rx.await;
989 }
990 Ok(diff)
991 })
992 })
993 })??
994 .await
995 }
996
997 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
998 let diff_state = self.diffs.get(&buffer_id)?;
999 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1000 }
1001
1002 pub fn get_uncommitted_diff(
1003 &self,
1004 buffer_id: BufferId,
1005 cx: &App,
1006 ) -> Option<Entity<BufferDiff>> {
1007 let diff_state = self.diffs.get(&buffer_id)?;
1008 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1009 }
1010
1011 pub fn get_diff_since_oid(
1012 &self,
1013 buffer_id: BufferId,
1014 oid: Option<git::Oid>,
1015 cx: &App,
1016 ) -> Option<Entity<BufferDiff>> {
1017 let diff_state = self.diffs.get(&buffer_id)?;
1018 diff_state.read(cx).oid_diff(oid)
1019 }
1020
1021 pub fn open_conflict_set(
1022 &mut self,
1023 buffer: Entity<Buffer>,
1024 cx: &mut Context<Self>,
1025 ) -> Entity<ConflictSet> {
1026 log::debug!("open conflict set");
1027 let buffer_id = buffer.read(cx).remote_id();
1028
1029 if let Some(git_state) = self.diffs.get(&buffer_id)
1030 && let Some(conflict_set) = git_state
1031 .read(cx)
1032 .conflict_set
1033 .as_ref()
1034 .and_then(|weak| weak.upgrade())
1035 {
1036 let conflict_set = conflict_set;
1037 let buffer_snapshot = buffer.read(cx).text_snapshot();
1038
1039 git_state.update(cx, |state, cx| {
1040 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1041 });
1042
1043 return conflict_set;
1044 }
1045
1046 let is_unmerged = self
1047 .repository_and_path_for_buffer_id(buffer_id, cx)
1048 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1049 let git_store = cx.weak_entity();
1050 let buffer_git_state = self
1051 .diffs
1052 .entry(buffer_id)
1053 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1054 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1055
1056 self._subscriptions
1057 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1058 cx.emit(GitStoreEvent::ConflictsUpdated);
1059 }));
1060
1061 buffer_git_state.update(cx, |state, cx| {
1062 state.conflict_set = Some(conflict_set.downgrade());
1063 let buffer_snapshot = buffer.read(cx).text_snapshot();
1064 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1065 });
1066
1067 conflict_set
1068 }
1069
1070 pub fn project_path_git_status(
1071 &self,
1072 project_path: &ProjectPath,
1073 cx: &App,
1074 ) -> Option<FileStatus> {
1075 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1076 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1077 }
1078
1079 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1080 let mut work_directory_abs_paths = Vec::new();
1081 let mut checkpoints = Vec::new();
1082 for repository in self.repositories.values() {
1083 repository.update(cx, |repository, _| {
1084 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1085 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1086 });
1087 }
1088
1089 cx.background_executor().spawn(async move {
1090 let checkpoints = future::try_join_all(checkpoints).await?;
1091 Ok(GitStoreCheckpoint {
1092 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1093 .into_iter()
1094 .zip(checkpoints)
1095 .collect(),
1096 })
1097 })
1098 }
1099
1100 pub fn restore_checkpoint(
1101 &self,
1102 checkpoint: GitStoreCheckpoint,
1103 cx: &mut App,
1104 ) -> Task<Result<()>> {
1105 let repositories_by_work_dir_abs_path = self
1106 .repositories
1107 .values()
1108 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1109 .collect::<HashMap<_, _>>();
1110
1111 let mut tasks = Vec::new();
1112 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1113 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1114 let restore = repository.update(cx, |repository, _| {
1115 repository.restore_checkpoint(checkpoint)
1116 });
1117 tasks.push(async move { restore.await? });
1118 }
1119 }
1120 cx.background_spawn(async move {
1121 future::try_join_all(tasks).await?;
1122 Ok(())
1123 })
1124 }
1125
1126 /// Compares two checkpoints, returning true if they are equal.
1127 pub fn compare_checkpoints(
1128 &self,
1129 left: GitStoreCheckpoint,
1130 mut right: GitStoreCheckpoint,
1131 cx: &mut App,
1132 ) -> Task<Result<bool>> {
1133 let repositories_by_work_dir_abs_path = self
1134 .repositories
1135 .values()
1136 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1137 .collect::<HashMap<_, _>>();
1138
1139 let mut tasks = Vec::new();
1140 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1141 if let Some(right_checkpoint) = right
1142 .checkpoints_by_work_dir_abs_path
1143 .remove(&work_dir_abs_path)
1144 {
1145 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1146 {
1147 let compare = repository.update(cx, |repository, _| {
1148 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1149 });
1150
1151 tasks.push(async move { compare.await? });
1152 }
1153 } else {
1154 return Task::ready(Ok(false));
1155 }
1156 }
1157 cx.background_spawn(async move {
1158 Ok(future::try_join_all(tasks)
1159 .await?
1160 .into_iter()
1161 .all(|result| result))
1162 })
1163 }
1164
1165 /// Blames a buffer.
1166 pub fn blame_buffer(
1167 &self,
1168 buffer: &Entity<Buffer>,
1169 version: Option<clock::Global>,
1170 cx: &mut Context<Self>,
1171 ) -> Task<Result<Option<Blame>>> {
1172 let buffer = buffer.read(cx);
1173 let Some((repo, repo_path)) =
1174 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1175 else {
1176 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1177 };
1178 let content = match &version {
1179 Some(version) => buffer.rope_for_version(version),
1180 None => buffer.as_rope().clone(),
1181 };
1182 let line_ending = buffer.line_ending();
1183 let version = version.unwrap_or(buffer.version());
1184 let buffer_id = buffer.remote_id();
1185
1186 let repo = repo.downgrade();
1187 cx.spawn(async move |_, cx| {
1188 let repository_state = repo
1189 .update(cx, |repo, _| repo.repository_state.clone())?
1190 .await
1191 .map_err(|err| anyhow::anyhow!(err))?;
1192 match repository_state {
1193 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1194 .blame(repo_path.clone(), content, line_ending)
1195 .await
1196 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1197 .map(Some),
1198 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1199 let response = client
1200 .request(proto::BlameBuffer {
1201 project_id: project_id.to_proto(),
1202 buffer_id: buffer_id.into(),
1203 version: serialize_version(&version),
1204 })
1205 .await?;
1206 Ok(deserialize_blame_buffer_response(response))
1207 }
1208 }
1209 })
1210 }
1211
1212 pub fn file_history(
1213 &self,
1214 repo: &Entity<Repository>,
1215 path: RepoPath,
1216 cx: &mut App,
1217 ) -> Task<Result<git::repository::FileHistory>> {
1218 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1219
1220 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1221 }
1222
1223 pub fn file_history_paginated(
1224 &self,
1225 repo: &Entity<Repository>,
1226 path: RepoPath,
1227 skip: usize,
1228 limit: Option<usize>,
1229 cx: &mut App,
1230 ) -> Task<Result<git::repository::FileHistory>> {
1231 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1232
1233 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1234 }
1235
1236 pub fn get_permalink_to_line(
1237 &self,
1238 buffer: &Entity<Buffer>,
1239 selection: Range<u32>,
1240 cx: &mut App,
1241 ) -> Task<Result<url::Url>> {
1242 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1243 return Task::ready(Err(anyhow!("buffer has no file")));
1244 };
1245
1246 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1247 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1248 cx,
1249 ) else {
1250 // If we're not in a Git repo, check whether this is a Rust source
1251 // file in the Cargo registry (presumably opened with go-to-definition
1252 // from a normal Rust file). If so, we can put together a permalink
1253 // using crate metadata.
1254 if buffer
1255 .read(cx)
1256 .language()
1257 .is_none_or(|lang| lang.name() != "Rust")
1258 {
1259 return Task::ready(Err(anyhow!("no permalink available")));
1260 }
1261 let file_path = file.worktree.read(cx).absolutize(&file.path);
1262 return cx.spawn(async move |cx| {
1263 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1264 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1265 .context("no permalink available")
1266 });
1267 };
1268
1269 let buffer_id = buffer.read(cx).remote_id();
1270 let branch = repo.read(cx).branch.clone();
1271 let remote = branch
1272 .as_ref()
1273 .and_then(|b| b.upstream.as_ref())
1274 .and_then(|b| b.remote_name())
1275 .unwrap_or("origin")
1276 .to_string();
1277
1278 let rx = repo.update(cx, |repo, _| {
1279 repo.send_job(None, move |state, cx| async move {
1280 match state {
1281 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1282 let origin_url = backend
1283 .remote_url(&remote)
1284 .await
1285 .with_context(|| format!("remote \"{remote}\" not found"))?;
1286
1287 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1288
1289 let provider_registry =
1290 cx.update(GitHostingProviderRegistry::default_global);
1291
1292 let (provider, remote) =
1293 parse_git_remote_url(provider_registry, &origin_url)
1294 .context("parsing Git remote URL")?;
1295
1296 Ok(provider.build_permalink(
1297 remote,
1298 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1299 ))
1300 }
1301 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1302 let response = client
1303 .request(proto::GetPermalinkToLine {
1304 project_id: project_id.to_proto(),
1305 buffer_id: buffer_id.into(),
1306 selection: Some(proto::Range {
1307 start: selection.start as u64,
1308 end: selection.end as u64,
1309 }),
1310 })
1311 .await?;
1312
1313 url::Url::parse(&response.permalink).context("failed to parse permalink")
1314 }
1315 }
1316 })
1317 });
1318 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1319 }
1320
1321 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1322 match &self.state {
1323 GitStoreState::Local {
1324 downstream: downstream_client,
1325 ..
1326 } => downstream_client
1327 .as_ref()
1328 .map(|state| (state.client.clone(), state.project_id)),
1329 GitStoreState::Remote {
1330 downstream: downstream_client,
1331 ..
1332 } => downstream_client.clone(),
1333 }
1334 }
1335
1336 fn upstream_client(&self) -> Option<AnyProtoClient> {
1337 match &self.state {
1338 GitStoreState::Local { .. } => None,
1339 GitStoreState::Remote {
1340 upstream_client, ..
1341 } => Some(upstream_client.clone()),
1342 }
1343 }
1344
1345 fn on_worktree_store_event(
1346 &mut self,
1347 worktree_store: Entity<WorktreeStore>,
1348 event: &WorktreeStoreEvent,
1349 cx: &mut Context<Self>,
1350 ) {
1351 let GitStoreState::Local {
1352 project_environment,
1353 downstream,
1354 next_repository_id,
1355 fs,
1356 } = &self.state
1357 else {
1358 return;
1359 };
1360
1361 match event {
1362 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1363 if let Some(worktree) = self
1364 .worktree_store
1365 .read(cx)
1366 .worktree_for_id(*worktree_id, cx)
1367 {
1368 let paths_by_git_repo =
1369 self.process_updated_entries(&worktree, updated_entries, cx);
1370 let downstream = downstream
1371 .as_ref()
1372 .map(|downstream| downstream.updates_tx.clone());
1373 cx.spawn(async move |_, cx| {
1374 let paths_by_git_repo = paths_by_git_repo.await;
1375 for (repo, paths) in paths_by_git_repo {
1376 repo.update(cx, |repo, cx| {
1377 repo.paths_changed(paths, downstream.clone(), cx);
1378 });
1379 }
1380 })
1381 .detach();
1382 }
1383 }
1384 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1385 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1386 else {
1387 return;
1388 };
1389 if !worktree.read(cx).is_visible() {
1390 log::debug!(
1391 "not adding repositories for local worktree {:?} because it's not visible",
1392 worktree.read(cx).abs_path()
1393 );
1394 return;
1395 }
1396 self.update_repositories_from_worktree(
1397 *worktree_id,
1398 project_environment.clone(),
1399 next_repository_id.clone(),
1400 downstream
1401 .as_ref()
1402 .map(|downstream| downstream.updates_tx.clone()),
1403 changed_repos.clone(),
1404 fs.clone(),
1405 cx,
1406 );
1407 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1408 }
1409 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1410 let repos_without_worktree: Vec<RepositoryId> = self
1411 .worktree_ids
1412 .iter_mut()
1413 .filter_map(|(repo_id, worktree_ids)| {
1414 worktree_ids.remove(worktree_id);
1415 if worktree_ids.is_empty() {
1416 Some(*repo_id)
1417 } else {
1418 None
1419 }
1420 })
1421 .collect();
1422 let is_active_repo_removed = repos_without_worktree
1423 .iter()
1424 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1425
1426 for repo_id in repos_without_worktree {
1427 self.repositories.remove(&repo_id);
1428 self.worktree_ids.remove(&repo_id);
1429 if let Some(updates_tx) =
1430 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1431 {
1432 updates_tx
1433 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1434 .ok();
1435 }
1436 }
1437
1438 if is_active_repo_removed {
1439 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1440 self.active_repo_id = Some(repo_id);
1441 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1442 } else {
1443 self.active_repo_id = None;
1444 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1445 }
1446 }
1447 }
1448 _ => {}
1449 }
1450 }
1451 fn on_repository_event(
1452 &mut self,
1453 repo: Entity<Repository>,
1454 event: &RepositoryEvent,
1455 cx: &mut Context<Self>,
1456 ) {
1457 let id = repo.read(cx).id;
1458 let repo_snapshot = repo.read(cx).snapshot.clone();
1459 for (buffer_id, diff) in self.diffs.iter() {
1460 if let Some((buffer_repo, repo_path)) =
1461 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1462 && buffer_repo == repo
1463 {
1464 diff.update(cx, |diff, cx| {
1465 if let Some(conflict_set) = &diff.conflict_set {
1466 let conflict_status_changed =
1467 conflict_set.update(cx, |conflict_set, cx| {
1468 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1469 conflict_set.set_has_conflict(has_conflict, cx)
1470 })?;
1471 if conflict_status_changed {
1472 let buffer_store = self.buffer_store.read(cx);
1473 if let Some(buffer) = buffer_store.get(*buffer_id) {
1474 let _ = diff
1475 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1476 }
1477 }
1478 }
1479 anyhow::Ok(())
1480 })
1481 .ok();
1482 }
1483 }
1484 cx.emit(GitStoreEvent::RepositoryUpdated(
1485 id,
1486 event.clone(),
1487 self.active_repo_id == Some(id),
1488 ))
1489 }
1490
1491 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1492 cx.emit(GitStoreEvent::JobsUpdated)
1493 }
1494
1495 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1496 fn update_repositories_from_worktree(
1497 &mut self,
1498 worktree_id: WorktreeId,
1499 project_environment: Entity<ProjectEnvironment>,
1500 next_repository_id: Arc<AtomicU64>,
1501 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1502 updated_git_repositories: UpdatedGitRepositoriesSet,
1503 fs: Arc<dyn Fs>,
1504 cx: &mut Context<Self>,
1505 ) {
1506 let mut removed_ids = Vec::new();
1507 for update in updated_git_repositories.iter() {
1508 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1509 let existing_work_directory_abs_path =
1510 repo.read(cx).work_directory_abs_path.clone();
1511 Some(&existing_work_directory_abs_path)
1512 == update.old_work_directory_abs_path.as_ref()
1513 || Some(&existing_work_directory_abs_path)
1514 == update.new_work_directory_abs_path.as_ref()
1515 }) {
1516 let repo_id = *id;
1517 if let Some(new_work_directory_abs_path) =
1518 update.new_work_directory_abs_path.clone()
1519 {
1520 self.worktree_ids
1521 .entry(repo_id)
1522 .or_insert_with(HashSet::new)
1523 .insert(worktree_id);
1524 existing.update(cx, |existing, cx| {
1525 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1526 existing.schedule_scan(updates_tx.clone(), cx);
1527 });
1528 } else {
1529 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1530 worktree_ids.remove(&worktree_id);
1531 if worktree_ids.is_empty() {
1532 removed_ids.push(repo_id);
1533 }
1534 }
1535 }
1536 } else if let UpdatedGitRepository {
1537 new_work_directory_abs_path: Some(work_directory_abs_path),
1538 dot_git_abs_path: Some(dot_git_abs_path),
1539 repository_dir_abs_path: Some(repository_dir_abs_path),
1540 common_dir_abs_path: Some(common_dir_abs_path),
1541 ..
1542 } = update
1543 {
1544 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1545 work_directory_abs_path,
1546 common_dir_abs_path,
1547 repository_dir_abs_path,
1548 )
1549 .into();
1550 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1551 let is_trusted = TrustedWorktrees::try_get_global(cx)
1552 .map(|trusted_worktrees| {
1553 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1554 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1555 })
1556 })
1557 .unwrap_or(false);
1558 let git_store = cx.weak_entity();
1559 let repo = cx.new(|cx| {
1560 let mut repo = Repository::local(
1561 id,
1562 work_directory_abs_path.clone(),
1563 original_repo_abs_path.clone(),
1564 dot_git_abs_path.clone(),
1565 project_environment.downgrade(),
1566 fs.clone(),
1567 is_trusted,
1568 git_store,
1569 cx,
1570 );
1571 if let Some(updates_tx) = updates_tx.as_ref() {
1572 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1573 updates_tx
1574 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1575 .ok();
1576 }
1577 repo.schedule_scan(updates_tx.clone(), cx);
1578 repo
1579 });
1580 self._subscriptions
1581 .push(cx.subscribe(&repo, Self::on_repository_event));
1582 self._subscriptions
1583 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1584 self.repositories.insert(id, repo);
1585 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1586 cx.emit(GitStoreEvent::RepositoryAdded);
1587 self.active_repo_id.get_or_insert_with(|| {
1588 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1589 id
1590 });
1591 }
1592 }
1593
1594 for id in removed_ids {
1595 if self.active_repo_id == Some(id) {
1596 self.active_repo_id = None;
1597 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1598 }
1599 self.repositories.remove(&id);
1600 if let Some(updates_tx) = updates_tx.as_ref() {
1601 updates_tx
1602 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1603 .ok();
1604 }
1605 }
1606 }
1607
1608 fn on_trusted_worktrees_event(
1609 &mut self,
1610 _: Entity<TrustedWorktreesStore>,
1611 event: &TrustedWorktreesEvent,
1612 cx: &mut Context<Self>,
1613 ) {
1614 if !matches!(self.state, GitStoreState::Local { .. }) {
1615 return;
1616 }
1617
1618 let (is_trusted, event_paths) = match event {
1619 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1620 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1621 };
1622
1623 for (repo_id, worktree_ids) in &self.worktree_ids {
1624 if worktree_ids
1625 .iter()
1626 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1627 {
1628 if let Some(repo) = self.repositories.get(repo_id) {
1629 let repository_state = repo.read(cx).repository_state.clone();
1630 cx.background_spawn(async move {
1631 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1632 state.backend.set_trusted(is_trusted);
1633 }
1634 })
1635 .detach();
1636 }
1637 }
1638 }
1639 }
1640
1641 fn on_buffer_store_event(
1642 &mut self,
1643 _: Entity<BufferStore>,
1644 event: &BufferStoreEvent,
1645 cx: &mut Context<Self>,
1646 ) {
1647 match event {
1648 BufferStoreEvent::BufferAdded(buffer) => {
1649 cx.subscribe(buffer, |this, buffer, event, cx| {
1650 if let BufferEvent::LanguageChanged(_) = event {
1651 let buffer_id = buffer.read(cx).remote_id();
1652 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1653 diff_state.update(cx, |diff_state, cx| {
1654 diff_state.buffer_language_changed(buffer, cx);
1655 });
1656 }
1657 }
1658 })
1659 .detach();
1660 }
1661 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1662 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1663 diffs.remove(buffer_id);
1664 }
1665 }
1666 BufferStoreEvent::BufferDropped(buffer_id) => {
1667 self.diffs.remove(buffer_id);
1668 for diffs in self.shared_diffs.values_mut() {
1669 diffs.remove(buffer_id);
1670 }
1671 }
1672 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1673 // Whenever a buffer's file path changes, it's possible that the
1674 // new path is actually a path that is being tracked by a git
1675 // repository. In that case, we'll want to update the buffer's
1676 // `BufferDiffState`, in case it already has one.
1677 let buffer_id = buffer.read(cx).remote_id();
1678 let diff_state = self.diffs.get(&buffer_id);
1679 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1680
1681 if let Some(diff_state) = diff_state
1682 && let Some((repo, repo_path)) = repo
1683 {
1684 let buffer = buffer.clone();
1685 let diff_state = diff_state.clone();
1686
1687 cx.spawn(async move |_git_store, cx| {
1688 async {
1689 let diff_bases_change = repo
1690 .update(cx, |repo, cx| {
1691 repo.load_committed_text(buffer_id, repo_path, cx)
1692 })
1693 .await?;
1694
1695 diff_state.update(cx, |diff_state, cx| {
1696 let buffer_snapshot = buffer.read(cx).text_snapshot();
1697 diff_state.diff_bases_changed(
1698 buffer_snapshot,
1699 Some(diff_bases_change),
1700 cx,
1701 );
1702 });
1703 anyhow::Ok(())
1704 }
1705 .await
1706 .log_err();
1707 })
1708 .detach();
1709 }
1710 }
1711 }
1712 }
1713
1714 pub fn recalculate_buffer_diffs(
1715 &mut self,
1716 buffers: Vec<Entity<Buffer>>,
1717 cx: &mut Context<Self>,
1718 ) -> impl Future<Output = ()> + use<> {
1719 let mut futures = Vec::new();
1720 for buffer in buffers {
1721 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1722 let buffer = buffer.read(cx).text_snapshot();
1723 diff_state.update(cx, |diff_state, cx| {
1724 diff_state.recalculate_diffs(buffer.clone(), cx);
1725 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1726 });
1727 futures.push(diff_state.update(cx, |diff_state, cx| {
1728 diff_state
1729 .reparse_conflict_markers(buffer, cx)
1730 .map(|_| {})
1731 .boxed()
1732 }));
1733 }
1734 }
1735 async move {
1736 futures::future::join_all(futures).await;
1737 }
1738 }
1739
1740 fn on_buffer_diff_event(
1741 &mut self,
1742 diff: Entity<buffer_diff::BufferDiff>,
1743 event: &BufferDiffEvent,
1744 cx: &mut Context<Self>,
1745 ) {
1746 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1747 let buffer_id = diff.read(cx).buffer_id;
1748 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1749 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1750 diff_state.hunk_staging_operation_count += 1;
1751 diff_state.hunk_staging_operation_count
1752 });
1753 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1754 let recv = repo.update(cx, |repo, cx| {
1755 log::debug!("hunks changed for {}", path.as_unix_str());
1756 repo.spawn_set_index_text_job(
1757 path,
1758 new_index_text.as_ref().map(|rope| rope.to_string()),
1759 Some(hunk_staging_operation_count),
1760 cx,
1761 )
1762 });
1763 let diff = diff.downgrade();
1764 cx.spawn(async move |this, cx| {
1765 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1766 diff.update(cx, |diff, cx| {
1767 diff.clear_pending_hunks(cx);
1768 })
1769 .ok();
1770 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1771 .ok();
1772 }
1773 })
1774 .detach();
1775 }
1776 }
1777 }
1778 }
1779
1780 fn local_worktree_git_repos_changed(
1781 &mut self,
1782 worktree: Entity<Worktree>,
1783 changed_repos: &UpdatedGitRepositoriesSet,
1784 cx: &mut Context<Self>,
1785 ) {
1786 log::debug!("local worktree repos changed");
1787 debug_assert!(worktree.read(cx).is_local());
1788
1789 for repository in self.repositories.values() {
1790 repository.update(cx, |repository, cx| {
1791 let repo_abs_path = &repository.work_directory_abs_path;
1792 if changed_repos.iter().any(|update| {
1793 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1794 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1795 }) {
1796 repository.reload_buffer_diff_bases(cx);
1797 }
1798 });
1799 }
1800 }
1801
1802 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1803 &self.repositories
1804 }
1805
1806 /// Returns the original (main) repository working directory for the given worktree.
1807 /// For normal checkouts this equals the worktree's own path; for linked
1808 /// worktrees it points back to the original repo.
1809 pub fn original_repo_path_for_worktree(
1810 &self,
1811 worktree_id: WorktreeId,
1812 cx: &App,
1813 ) -> Option<Arc<Path>> {
1814 self.active_repo_id
1815 .iter()
1816 .chain(self.worktree_ids.keys())
1817 .find(|repo_id| {
1818 self.worktree_ids
1819 .get(repo_id)
1820 .is_some_and(|ids| ids.contains(&worktree_id))
1821 })
1822 .and_then(|repo_id| self.repositories.get(repo_id))
1823 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1824 }
1825
1826 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1827 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1828 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1829 Some(status.status)
1830 }
1831
1832 pub fn repository_and_path_for_buffer_id(
1833 &self,
1834 buffer_id: BufferId,
1835 cx: &App,
1836 ) -> Option<(Entity<Repository>, RepoPath)> {
1837 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1838 let project_path = buffer.read(cx).project_path(cx)?;
1839 self.repository_and_path_for_project_path(&project_path, cx)
1840 }
1841
1842 pub fn repository_and_path_for_project_path(
1843 &self,
1844 path: &ProjectPath,
1845 cx: &App,
1846 ) -> Option<(Entity<Repository>, RepoPath)> {
1847 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1848 self.repositories
1849 .values()
1850 .filter_map(|repo| {
1851 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1852 Some((repo.clone(), repo_path))
1853 })
1854 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1855 }
1856
1857 pub fn git_init(
1858 &self,
1859 path: Arc<Path>,
1860 fallback_branch_name: String,
1861 cx: &App,
1862 ) -> Task<Result<()>> {
1863 match &self.state {
1864 GitStoreState::Local { fs, .. } => {
1865 let fs = fs.clone();
1866 cx.background_executor()
1867 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1868 }
1869 GitStoreState::Remote {
1870 upstream_client,
1871 upstream_project_id: project_id,
1872 ..
1873 } => {
1874 let client = upstream_client.clone();
1875 let project_id = *project_id;
1876 cx.background_executor().spawn(async move {
1877 client
1878 .request(proto::GitInit {
1879 project_id: project_id,
1880 abs_path: path.to_string_lossy().into_owned(),
1881 fallback_branch_name,
1882 })
1883 .await?;
1884 Ok(())
1885 })
1886 }
1887 }
1888 }
1889
1890 pub fn git_clone(
1891 &self,
1892 repo: String,
1893 path: impl Into<Arc<std::path::Path>>,
1894 cx: &App,
1895 ) -> Task<Result<()>> {
1896 let path = path.into();
1897 match &self.state {
1898 GitStoreState::Local { fs, .. } => {
1899 let fs = fs.clone();
1900 cx.background_executor()
1901 .spawn(async move { fs.git_clone(&repo, &path).await })
1902 }
1903 GitStoreState::Remote {
1904 upstream_client,
1905 upstream_project_id,
1906 ..
1907 } => {
1908 if upstream_client.is_via_collab() {
1909 return Task::ready(Err(anyhow!(
1910 "Git Clone isn't supported for project guests"
1911 )));
1912 }
1913 let request = upstream_client.request(proto::GitClone {
1914 project_id: *upstream_project_id,
1915 abs_path: path.to_string_lossy().into_owned(),
1916 remote_repo: repo,
1917 });
1918
1919 cx.background_spawn(async move {
1920 let result = request.await?;
1921
1922 match result.success {
1923 true => Ok(()),
1924 false => Err(anyhow!("Git Clone failed")),
1925 }
1926 })
1927 }
1928 }
1929 }
1930
1931 async fn handle_update_repository(
1932 this: Entity<Self>,
1933 envelope: TypedEnvelope<proto::UpdateRepository>,
1934 mut cx: AsyncApp,
1935 ) -> Result<()> {
1936 this.update(&mut cx, |this, cx| {
1937 let path_style = this.worktree_store.read(cx).path_style();
1938 let mut update = envelope.payload;
1939
1940 let id = RepositoryId::from_proto(update.id);
1941 let client = this.upstream_client().context("no upstream client")?;
1942
1943 let original_repo_abs_path: Option<Arc<Path>> = update
1944 .original_repo_abs_path
1945 .as_deref()
1946 .map(|p| Path::new(p).into());
1947
1948 let mut repo_subscription = None;
1949 let repo = this.repositories.entry(id).or_insert_with(|| {
1950 let git_store = cx.weak_entity();
1951 let repo = cx.new(|cx| {
1952 Repository::remote(
1953 id,
1954 Path::new(&update.abs_path).into(),
1955 original_repo_abs_path.clone(),
1956 path_style,
1957 ProjectId(update.project_id),
1958 client,
1959 git_store,
1960 cx,
1961 )
1962 });
1963 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1964 cx.emit(GitStoreEvent::RepositoryAdded);
1965 repo
1966 });
1967 this._subscriptions.extend(repo_subscription);
1968
1969 repo.update(cx, {
1970 let update = update.clone();
1971 |repo, cx| repo.apply_remote_update(update, cx)
1972 })?;
1973
1974 this.active_repo_id.get_or_insert_with(|| {
1975 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1976 id
1977 });
1978
1979 if let Some((client, project_id)) = this.downstream_client() {
1980 update.project_id = project_id.to_proto();
1981 client.send(update).log_err();
1982 }
1983 Ok(())
1984 })
1985 }
1986
1987 async fn handle_remove_repository(
1988 this: Entity<Self>,
1989 envelope: TypedEnvelope<proto::RemoveRepository>,
1990 mut cx: AsyncApp,
1991 ) -> Result<()> {
1992 this.update(&mut cx, |this, cx| {
1993 let mut update = envelope.payload;
1994 let id = RepositoryId::from_proto(update.id);
1995 this.repositories.remove(&id);
1996 if let Some((client, project_id)) = this.downstream_client() {
1997 update.project_id = project_id.to_proto();
1998 client.send(update).log_err();
1999 }
2000 if this.active_repo_id == Some(id) {
2001 this.active_repo_id = None;
2002 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2003 }
2004 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2005 });
2006 Ok(())
2007 }
2008
2009 async fn handle_git_init(
2010 this: Entity<Self>,
2011 envelope: TypedEnvelope<proto::GitInit>,
2012 cx: AsyncApp,
2013 ) -> Result<proto::Ack> {
2014 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2015 let name = envelope.payload.fallback_branch_name;
2016 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2017 .await?;
2018
2019 Ok(proto::Ack {})
2020 }
2021
2022 async fn handle_git_clone(
2023 this: Entity<Self>,
2024 envelope: TypedEnvelope<proto::GitClone>,
2025 cx: AsyncApp,
2026 ) -> Result<proto::GitCloneResponse> {
2027 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2028 let repo_name = envelope.payload.remote_repo;
2029 let result = cx
2030 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2031 .await;
2032
2033 Ok(proto::GitCloneResponse {
2034 success: result.is_ok(),
2035 })
2036 }
2037
2038 async fn handle_fetch(
2039 this: Entity<Self>,
2040 envelope: TypedEnvelope<proto::Fetch>,
2041 mut cx: AsyncApp,
2042 ) -> Result<proto::RemoteMessageResponse> {
2043 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2044 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2045 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2046 let askpass_id = envelope.payload.askpass_id;
2047
2048 let askpass = make_remote_delegate(
2049 this,
2050 envelope.payload.project_id,
2051 repository_id,
2052 askpass_id,
2053 &mut cx,
2054 );
2055
2056 let remote_output = repository_handle
2057 .update(&mut cx, |repository_handle, cx| {
2058 repository_handle.fetch(fetch_options, askpass, cx)
2059 })
2060 .await??;
2061
2062 Ok(proto::RemoteMessageResponse {
2063 stdout: remote_output.stdout,
2064 stderr: remote_output.stderr,
2065 })
2066 }
2067
2068 async fn handle_push(
2069 this: Entity<Self>,
2070 envelope: TypedEnvelope<proto::Push>,
2071 mut cx: AsyncApp,
2072 ) -> Result<proto::RemoteMessageResponse> {
2073 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2074 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2075
2076 let askpass_id = envelope.payload.askpass_id;
2077 let askpass = make_remote_delegate(
2078 this,
2079 envelope.payload.project_id,
2080 repository_id,
2081 askpass_id,
2082 &mut cx,
2083 );
2084
2085 let options = envelope
2086 .payload
2087 .options
2088 .as_ref()
2089 .map(|_| match envelope.payload.options() {
2090 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2091 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2092 });
2093
2094 let branch_name = envelope.payload.branch_name.into();
2095 let remote_branch_name = envelope.payload.remote_branch_name.into();
2096 let remote_name = envelope.payload.remote_name.into();
2097
2098 let remote_output = repository_handle
2099 .update(&mut cx, |repository_handle, cx| {
2100 repository_handle.push(
2101 branch_name,
2102 remote_branch_name,
2103 remote_name,
2104 options,
2105 askpass,
2106 cx,
2107 )
2108 })
2109 .await??;
2110 Ok(proto::RemoteMessageResponse {
2111 stdout: remote_output.stdout,
2112 stderr: remote_output.stderr,
2113 })
2114 }
2115
2116 async fn handle_pull(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::Pull>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::RemoteMessageResponse> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123 let askpass_id = envelope.payload.askpass_id;
2124 let askpass = make_remote_delegate(
2125 this,
2126 envelope.payload.project_id,
2127 repository_id,
2128 askpass_id,
2129 &mut cx,
2130 );
2131
2132 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2133 let remote_name = envelope.payload.remote_name.into();
2134 let rebase = envelope.payload.rebase;
2135
2136 let remote_message = repository_handle
2137 .update(&mut cx, |repository_handle, cx| {
2138 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2139 })
2140 .await??;
2141
2142 Ok(proto::RemoteMessageResponse {
2143 stdout: remote_message.stdout,
2144 stderr: remote_message.stderr,
2145 })
2146 }
2147
2148 async fn handle_stage(
2149 this: Entity<Self>,
2150 envelope: TypedEnvelope<proto::Stage>,
2151 mut cx: AsyncApp,
2152 ) -> Result<proto::Ack> {
2153 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2154 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2155
2156 let entries = envelope
2157 .payload
2158 .paths
2159 .into_iter()
2160 .map(|path| RepoPath::new(&path))
2161 .collect::<Result<Vec<_>>>()?;
2162
2163 repository_handle
2164 .update(&mut cx, |repository_handle, cx| {
2165 repository_handle.stage_entries(entries, cx)
2166 })
2167 .await?;
2168 Ok(proto::Ack {})
2169 }
2170
2171 async fn handle_unstage(
2172 this: Entity<Self>,
2173 envelope: TypedEnvelope<proto::Unstage>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::Ack> {
2176 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2177 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2178
2179 let entries = envelope
2180 .payload
2181 .paths
2182 .into_iter()
2183 .map(|path| RepoPath::new(&path))
2184 .collect::<Result<Vec<_>>>()?;
2185
2186 repository_handle
2187 .update(&mut cx, |repository_handle, cx| {
2188 repository_handle.unstage_entries(entries, cx)
2189 })
2190 .await?;
2191
2192 Ok(proto::Ack {})
2193 }
2194
2195 async fn handle_stash(
2196 this: Entity<Self>,
2197 envelope: TypedEnvelope<proto::Stash>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::Ack> {
2200 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2201 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2202
2203 let entries = envelope
2204 .payload
2205 .paths
2206 .into_iter()
2207 .map(|path| RepoPath::new(&path))
2208 .collect::<Result<Vec<_>>>()?;
2209
2210 repository_handle
2211 .update(&mut cx, |repository_handle, cx| {
2212 repository_handle.stash_entries(entries, cx)
2213 })
2214 .await?;
2215
2216 Ok(proto::Ack {})
2217 }
2218
2219 async fn handle_stash_pop(
2220 this: Entity<Self>,
2221 envelope: TypedEnvelope<proto::StashPop>,
2222 mut cx: AsyncApp,
2223 ) -> Result<proto::Ack> {
2224 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2225 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2226 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2227
2228 repository_handle
2229 .update(&mut cx, |repository_handle, cx| {
2230 repository_handle.stash_pop(stash_index, cx)
2231 })
2232 .await?;
2233
2234 Ok(proto::Ack {})
2235 }
2236
2237 async fn handle_stash_apply(
2238 this: Entity<Self>,
2239 envelope: TypedEnvelope<proto::StashApply>,
2240 mut cx: AsyncApp,
2241 ) -> Result<proto::Ack> {
2242 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2243 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2244 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2245
2246 repository_handle
2247 .update(&mut cx, |repository_handle, cx| {
2248 repository_handle.stash_apply(stash_index, cx)
2249 })
2250 .await?;
2251
2252 Ok(proto::Ack {})
2253 }
2254
2255 async fn handle_stash_drop(
2256 this: Entity<Self>,
2257 envelope: TypedEnvelope<proto::StashDrop>,
2258 mut cx: AsyncApp,
2259 ) -> Result<proto::Ack> {
2260 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2261 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2262 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2263
2264 repository_handle
2265 .update(&mut cx, |repository_handle, cx| {
2266 repository_handle.stash_drop(stash_index, cx)
2267 })
2268 .await??;
2269
2270 Ok(proto::Ack {})
2271 }
2272
2273 async fn handle_set_index_text(
2274 this: Entity<Self>,
2275 envelope: TypedEnvelope<proto::SetIndexText>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::Ack> {
2278 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2279 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2280 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2281
2282 repository_handle
2283 .update(&mut cx, |repository_handle, cx| {
2284 repository_handle.spawn_set_index_text_job(
2285 repo_path,
2286 envelope.payload.text,
2287 None,
2288 cx,
2289 )
2290 })
2291 .await??;
2292 Ok(proto::Ack {})
2293 }
2294
2295 async fn handle_run_hook(
2296 this: Entity<Self>,
2297 envelope: TypedEnvelope<proto::RunGitHook>,
2298 mut cx: AsyncApp,
2299 ) -> Result<proto::Ack> {
2300 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2301 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2302 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2303 repository_handle
2304 .update(&mut cx, |repository_handle, cx| {
2305 repository_handle.run_hook(hook, cx)
2306 })
2307 .await??;
2308 Ok(proto::Ack {})
2309 }
2310
2311 async fn handle_commit(
2312 this: Entity<Self>,
2313 envelope: TypedEnvelope<proto::Commit>,
2314 mut cx: AsyncApp,
2315 ) -> Result<proto::Ack> {
2316 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2317 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2318 let askpass_id = envelope.payload.askpass_id;
2319
2320 let askpass = make_remote_delegate(
2321 this,
2322 envelope.payload.project_id,
2323 repository_id,
2324 askpass_id,
2325 &mut cx,
2326 );
2327
2328 let message = SharedString::from(envelope.payload.message);
2329 let name = envelope.payload.name.map(SharedString::from);
2330 let email = envelope.payload.email.map(SharedString::from);
2331 let options = envelope.payload.options.unwrap_or_default();
2332
2333 repository_handle
2334 .update(&mut cx, |repository_handle, cx| {
2335 repository_handle.commit(
2336 message,
2337 name.zip(email),
2338 CommitOptions {
2339 amend: options.amend,
2340 signoff: options.signoff,
2341 allow_empty: options.allow_empty,
2342 },
2343 askpass,
2344 cx,
2345 )
2346 })
2347 .await??;
2348 Ok(proto::Ack {})
2349 }
2350
2351 async fn handle_get_remotes(
2352 this: Entity<Self>,
2353 envelope: TypedEnvelope<proto::GetRemotes>,
2354 mut cx: AsyncApp,
2355 ) -> Result<proto::GetRemotesResponse> {
2356 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2357 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2358
2359 let branch_name = envelope.payload.branch_name;
2360 let is_push = envelope.payload.is_push;
2361
2362 let remotes = repository_handle
2363 .update(&mut cx, |repository_handle, _| {
2364 repository_handle.get_remotes(branch_name, is_push)
2365 })
2366 .await??;
2367
2368 Ok(proto::GetRemotesResponse {
2369 remotes: remotes
2370 .into_iter()
2371 .map(|remotes| proto::get_remotes_response::Remote {
2372 name: remotes.name.to_string(),
2373 })
2374 .collect::<Vec<_>>(),
2375 })
2376 }
2377
2378 async fn handle_get_worktrees(
2379 this: Entity<Self>,
2380 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2381 mut cx: AsyncApp,
2382 ) -> Result<proto::GitWorktreesResponse> {
2383 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2384 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2385
2386 let worktrees = repository_handle
2387 .update(&mut cx, |repository_handle, _| {
2388 repository_handle.worktrees()
2389 })
2390 .await??;
2391
2392 Ok(proto::GitWorktreesResponse {
2393 worktrees: worktrees
2394 .into_iter()
2395 .map(|worktree| worktree_to_proto(&worktree))
2396 .collect::<Vec<_>>(),
2397 })
2398 }
2399
2400 async fn handle_create_worktree(
2401 this: Entity<Self>,
2402 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2403 mut cx: AsyncApp,
2404 ) -> Result<proto::Ack> {
2405 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2406 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2407 let directory = PathBuf::from(envelope.payload.directory);
2408 let name = envelope.payload.name;
2409 let commit = envelope.payload.commit;
2410
2411 repository_handle
2412 .update(&mut cx, |repository_handle, _| {
2413 repository_handle.create_worktree(name, directory, commit)
2414 })
2415 .await??;
2416
2417 Ok(proto::Ack {})
2418 }
2419
2420 async fn handle_remove_worktree(
2421 this: Entity<Self>,
2422 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2423 mut cx: AsyncApp,
2424 ) -> Result<proto::Ack> {
2425 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2426 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2427 let path = PathBuf::from(envelope.payload.path);
2428 let force = envelope.payload.force;
2429
2430 repository_handle
2431 .update(&mut cx, |repository_handle, _| {
2432 repository_handle.remove_worktree(path, force)
2433 })
2434 .await??;
2435
2436 Ok(proto::Ack {})
2437 }
2438
2439 async fn handle_rename_worktree(
2440 this: Entity<Self>,
2441 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2442 mut cx: AsyncApp,
2443 ) -> Result<proto::Ack> {
2444 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2445 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2446 let old_path = PathBuf::from(envelope.payload.old_path);
2447 let new_path = PathBuf::from(envelope.payload.new_path);
2448
2449 repository_handle
2450 .update(&mut cx, |repository_handle, _| {
2451 repository_handle.rename_worktree(old_path, new_path)
2452 })
2453 .await??;
2454
2455 Ok(proto::Ack {})
2456 }
2457
2458 async fn handle_get_branches(
2459 this: Entity<Self>,
2460 envelope: TypedEnvelope<proto::GitGetBranches>,
2461 mut cx: AsyncApp,
2462 ) -> Result<proto::GitBranchesResponse> {
2463 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2464 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2465
2466 let branches = repository_handle
2467 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2468 .await??;
2469
2470 Ok(proto::GitBranchesResponse {
2471 branches: branches
2472 .into_iter()
2473 .map(|branch| branch_to_proto(&branch))
2474 .collect::<Vec<_>>(),
2475 })
2476 }
2477 async fn handle_get_default_branch(
2478 this: Entity<Self>,
2479 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2480 mut cx: AsyncApp,
2481 ) -> Result<proto::GetDefaultBranchResponse> {
2482 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2483 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2484
2485 let branch = repository_handle
2486 .update(&mut cx, |repository_handle, _| {
2487 repository_handle.default_branch(false)
2488 })
2489 .await??
2490 .map(Into::into);
2491
2492 Ok(proto::GetDefaultBranchResponse { branch })
2493 }
2494 async fn handle_create_branch(
2495 this: Entity<Self>,
2496 envelope: TypedEnvelope<proto::GitCreateBranch>,
2497 mut cx: AsyncApp,
2498 ) -> Result<proto::Ack> {
2499 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2500 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2501 let branch_name = envelope.payload.branch_name;
2502
2503 repository_handle
2504 .update(&mut cx, |repository_handle, _| {
2505 repository_handle.create_branch(branch_name, None)
2506 })
2507 .await??;
2508
2509 Ok(proto::Ack {})
2510 }
2511
2512 async fn handle_change_branch(
2513 this: Entity<Self>,
2514 envelope: TypedEnvelope<proto::GitChangeBranch>,
2515 mut cx: AsyncApp,
2516 ) -> Result<proto::Ack> {
2517 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2518 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2519 let branch_name = envelope.payload.branch_name;
2520
2521 repository_handle
2522 .update(&mut cx, |repository_handle, _| {
2523 repository_handle.change_branch(branch_name)
2524 })
2525 .await??;
2526
2527 Ok(proto::Ack {})
2528 }
2529
2530 async fn handle_rename_branch(
2531 this: Entity<Self>,
2532 envelope: TypedEnvelope<proto::GitRenameBranch>,
2533 mut cx: AsyncApp,
2534 ) -> Result<proto::Ack> {
2535 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2536 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2537 let branch = envelope.payload.branch;
2538 let new_name = envelope.payload.new_name;
2539
2540 repository_handle
2541 .update(&mut cx, |repository_handle, _| {
2542 repository_handle.rename_branch(branch, new_name)
2543 })
2544 .await??;
2545
2546 Ok(proto::Ack {})
2547 }
2548
2549 async fn handle_create_remote(
2550 this: Entity<Self>,
2551 envelope: TypedEnvelope<proto::GitCreateRemote>,
2552 mut cx: AsyncApp,
2553 ) -> Result<proto::Ack> {
2554 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2555 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2556 let remote_name = envelope.payload.remote_name;
2557 let remote_url = envelope.payload.remote_url;
2558
2559 repository_handle
2560 .update(&mut cx, |repository_handle, _| {
2561 repository_handle.create_remote(remote_name, remote_url)
2562 })
2563 .await??;
2564
2565 Ok(proto::Ack {})
2566 }
2567
2568 async fn handle_delete_branch(
2569 this: Entity<Self>,
2570 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2571 mut cx: AsyncApp,
2572 ) -> Result<proto::Ack> {
2573 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2574 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2575 let is_remote = envelope.payload.is_remote;
2576 let branch_name = envelope.payload.branch_name;
2577
2578 repository_handle
2579 .update(&mut cx, |repository_handle, _| {
2580 repository_handle.delete_branch(is_remote, branch_name)
2581 })
2582 .await??;
2583
2584 Ok(proto::Ack {})
2585 }
2586
2587 async fn handle_remove_remote(
2588 this: Entity<Self>,
2589 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2590 mut cx: AsyncApp,
2591 ) -> Result<proto::Ack> {
2592 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2593 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2594 let remote_name = envelope.payload.remote_name;
2595
2596 repository_handle
2597 .update(&mut cx, |repository_handle, _| {
2598 repository_handle.remove_remote(remote_name)
2599 })
2600 .await??;
2601
2602 Ok(proto::Ack {})
2603 }
2604
2605 async fn handle_show(
2606 this: Entity<Self>,
2607 envelope: TypedEnvelope<proto::GitShow>,
2608 mut cx: AsyncApp,
2609 ) -> Result<proto::GitCommitDetails> {
2610 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2611 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2612
2613 let commit = repository_handle
2614 .update(&mut cx, |repository_handle, _| {
2615 repository_handle.show(envelope.payload.commit)
2616 })
2617 .await??;
2618 Ok(proto::GitCommitDetails {
2619 sha: commit.sha.into(),
2620 message: commit.message.into(),
2621 commit_timestamp: commit.commit_timestamp,
2622 author_email: commit.author_email.into(),
2623 author_name: commit.author_name.into(),
2624 })
2625 }
2626
2627 async fn handle_create_checkpoint(
2628 this: Entity<Self>,
2629 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2630 mut cx: AsyncApp,
2631 ) -> Result<proto::GitCreateCheckpointResponse> {
2632 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2633 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2634
2635 let checkpoint = repository_handle
2636 .update(&mut cx, |repository, _| repository.checkpoint())
2637 .await??;
2638
2639 Ok(proto::GitCreateCheckpointResponse {
2640 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2641 })
2642 }
2643
2644 async fn handle_restore_checkpoint(
2645 this: Entity<Self>,
2646 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2647 mut cx: AsyncApp,
2648 ) -> Result<proto::Ack> {
2649 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2650 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2651
2652 let checkpoint = GitRepositoryCheckpoint {
2653 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2654 };
2655
2656 repository_handle
2657 .update(&mut cx, |repository, _| {
2658 repository.restore_checkpoint(checkpoint)
2659 })
2660 .await??;
2661
2662 Ok(proto::Ack {})
2663 }
2664
2665 async fn handle_compare_checkpoints(
2666 this: Entity<Self>,
2667 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2668 mut cx: AsyncApp,
2669 ) -> Result<proto::GitCompareCheckpointsResponse> {
2670 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2671 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2672
2673 let left = GitRepositoryCheckpoint {
2674 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2675 };
2676 let right = GitRepositoryCheckpoint {
2677 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2678 };
2679
2680 let equal = repository_handle
2681 .update(&mut cx, |repository, _| {
2682 repository.compare_checkpoints(left, right)
2683 })
2684 .await??;
2685
2686 Ok(proto::GitCompareCheckpointsResponse { equal })
2687 }
2688
2689 async fn handle_diff_checkpoints(
2690 this: Entity<Self>,
2691 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2692 mut cx: AsyncApp,
2693 ) -> Result<proto::GitDiffCheckpointsResponse> {
2694 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2695 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2696
2697 let base = GitRepositoryCheckpoint {
2698 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2699 };
2700 let target = GitRepositoryCheckpoint {
2701 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2702 };
2703
2704 let diff = repository_handle
2705 .update(&mut cx, |repository, _| {
2706 repository.diff_checkpoints(base, target)
2707 })
2708 .await??;
2709
2710 Ok(proto::GitDiffCheckpointsResponse { diff })
2711 }
2712
2713 async fn handle_load_commit_diff(
2714 this: Entity<Self>,
2715 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2716 mut cx: AsyncApp,
2717 ) -> Result<proto::LoadCommitDiffResponse> {
2718 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2719 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2720
2721 let commit_diff = repository_handle
2722 .update(&mut cx, |repository_handle, _| {
2723 repository_handle.load_commit_diff(envelope.payload.commit)
2724 })
2725 .await??;
2726 Ok(proto::LoadCommitDiffResponse {
2727 files: commit_diff
2728 .files
2729 .into_iter()
2730 .map(|file| proto::CommitFile {
2731 path: file.path.to_proto(),
2732 old_text: file.old_text,
2733 new_text: file.new_text,
2734 is_binary: file.is_binary,
2735 })
2736 .collect(),
2737 })
2738 }
2739
2740 async fn handle_file_history(
2741 this: Entity<Self>,
2742 envelope: TypedEnvelope<proto::GitFileHistory>,
2743 mut cx: AsyncApp,
2744 ) -> Result<proto::GitFileHistoryResponse> {
2745 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2746 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2747 let path = RepoPath::from_proto(&envelope.payload.path)?;
2748 let skip = envelope.payload.skip as usize;
2749 let limit = envelope.payload.limit.map(|l| l as usize);
2750
2751 let file_history = repository_handle
2752 .update(&mut cx, |repository_handle, _| {
2753 repository_handle.file_history_paginated(path, skip, limit)
2754 })
2755 .await??;
2756
2757 Ok(proto::GitFileHistoryResponse {
2758 entries: file_history
2759 .entries
2760 .into_iter()
2761 .map(|entry| proto::FileHistoryEntry {
2762 sha: entry.sha.to_string(),
2763 subject: entry.subject.to_string(),
2764 message: entry.message.to_string(),
2765 commit_timestamp: entry.commit_timestamp,
2766 author_name: entry.author_name.to_string(),
2767 author_email: entry.author_email.to_string(),
2768 })
2769 .collect(),
2770 path: file_history.path.to_proto(),
2771 })
2772 }
2773
2774 async fn handle_reset(
2775 this: Entity<Self>,
2776 envelope: TypedEnvelope<proto::GitReset>,
2777 mut cx: AsyncApp,
2778 ) -> Result<proto::Ack> {
2779 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2780 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2781
2782 let mode = match envelope.payload.mode() {
2783 git_reset::ResetMode::Soft => ResetMode::Soft,
2784 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2785 };
2786
2787 repository_handle
2788 .update(&mut cx, |repository_handle, cx| {
2789 repository_handle.reset(envelope.payload.commit, mode, cx)
2790 })
2791 .await??;
2792 Ok(proto::Ack {})
2793 }
2794
2795 async fn handle_checkout_files(
2796 this: Entity<Self>,
2797 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2798 mut cx: AsyncApp,
2799 ) -> Result<proto::Ack> {
2800 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2801 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2802 let paths = envelope
2803 .payload
2804 .paths
2805 .iter()
2806 .map(|s| RepoPath::from_proto(s))
2807 .collect::<Result<Vec<_>>>()?;
2808
2809 repository_handle
2810 .update(&mut cx, |repository_handle, cx| {
2811 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2812 })
2813 .await?;
2814 Ok(proto::Ack {})
2815 }
2816
2817 async fn handle_open_commit_message_buffer(
2818 this: Entity<Self>,
2819 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2820 mut cx: AsyncApp,
2821 ) -> Result<proto::OpenBufferResponse> {
2822 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2823 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2824 let buffer = repository
2825 .update(&mut cx, |repository, cx| {
2826 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2827 })
2828 .await?;
2829
2830 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2831 this.update(&mut cx, |this, cx| {
2832 this.buffer_store.update(cx, |buffer_store, cx| {
2833 buffer_store
2834 .create_buffer_for_peer(
2835 &buffer,
2836 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2837 cx,
2838 )
2839 .detach_and_log_err(cx);
2840 })
2841 });
2842
2843 Ok(proto::OpenBufferResponse {
2844 buffer_id: buffer_id.to_proto(),
2845 })
2846 }
2847
2848 async fn handle_askpass(
2849 this: Entity<Self>,
2850 envelope: TypedEnvelope<proto::AskPassRequest>,
2851 mut cx: AsyncApp,
2852 ) -> Result<proto::AskPassResponse> {
2853 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2854 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2855
2856 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2857 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2858 debug_panic!("no askpass found");
2859 anyhow::bail!("no askpass found");
2860 };
2861
2862 let response = askpass
2863 .ask_password(envelope.payload.prompt)
2864 .await
2865 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2866
2867 delegates
2868 .lock()
2869 .insert(envelope.payload.askpass_id, askpass);
2870
2871 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2872 Ok(proto::AskPassResponse {
2873 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2874 })
2875 }
2876
2877 async fn handle_check_for_pushed_commits(
2878 this: Entity<Self>,
2879 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2880 mut cx: AsyncApp,
2881 ) -> Result<proto::CheckForPushedCommitsResponse> {
2882 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2883 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2884
2885 let branches = repository_handle
2886 .update(&mut cx, |repository_handle, _| {
2887 repository_handle.check_for_pushed_commits()
2888 })
2889 .await??;
2890 Ok(proto::CheckForPushedCommitsResponse {
2891 pushed_to: branches
2892 .into_iter()
2893 .map(|commit| commit.to_string())
2894 .collect(),
2895 })
2896 }
2897
2898 async fn handle_git_diff(
2899 this: Entity<Self>,
2900 envelope: TypedEnvelope<proto::GitDiff>,
2901 mut cx: AsyncApp,
2902 ) -> Result<proto::GitDiffResponse> {
2903 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2904 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2905 let diff_type = match envelope.payload.diff_type() {
2906 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2907 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2908 proto::git_diff::DiffType::MergeBase => {
2909 let base_ref = envelope
2910 .payload
2911 .merge_base_ref
2912 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2913 DiffType::MergeBase {
2914 base_ref: base_ref.into(),
2915 }
2916 }
2917 };
2918
2919 let mut diff = repository_handle
2920 .update(&mut cx, |repository_handle, cx| {
2921 repository_handle.diff(diff_type, cx)
2922 })
2923 .await??;
2924 const ONE_MB: usize = 1_000_000;
2925 if diff.len() > ONE_MB {
2926 diff = diff.chars().take(ONE_MB).collect()
2927 }
2928
2929 Ok(proto::GitDiffResponse { diff })
2930 }
2931
2932 async fn handle_tree_diff(
2933 this: Entity<Self>,
2934 request: TypedEnvelope<proto::GetTreeDiff>,
2935 mut cx: AsyncApp,
2936 ) -> Result<proto::GetTreeDiffResponse> {
2937 let repository_id = RepositoryId(request.payload.repository_id);
2938 let diff_type = if request.payload.is_merge {
2939 DiffTreeType::MergeBase {
2940 base: request.payload.base.into(),
2941 head: request.payload.head.into(),
2942 }
2943 } else {
2944 DiffTreeType::Since {
2945 base: request.payload.base.into(),
2946 head: request.payload.head.into(),
2947 }
2948 };
2949
2950 let diff = this
2951 .update(&mut cx, |this, cx| {
2952 let repository = this.repositories().get(&repository_id)?;
2953 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2954 })
2955 .context("missing repository")?
2956 .await??;
2957
2958 Ok(proto::GetTreeDiffResponse {
2959 entries: diff
2960 .entries
2961 .into_iter()
2962 .map(|(path, status)| proto::TreeDiffStatus {
2963 path: path.as_ref().to_proto(),
2964 status: match status {
2965 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2966 TreeDiffStatus::Modified { .. } => {
2967 proto::tree_diff_status::Status::Modified.into()
2968 }
2969 TreeDiffStatus::Deleted { .. } => {
2970 proto::tree_diff_status::Status::Deleted.into()
2971 }
2972 },
2973 oid: match status {
2974 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2975 Some(old.to_string())
2976 }
2977 TreeDiffStatus::Added => None,
2978 },
2979 })
2980 .collect(),
2981 })
2982 }
2983
2984 async fn handle_get_blob_content(
2985 this: Entity<Self>,
2986 request: TypedEnvelope<proto::GetBlobContent>,
2987 mut cx: AsyncApp,
2988 ) -> Result<proto::GetBlobContentResponse> {
2989 let oid = git::Oid::from_str(&request.payload.oid)?;
2990 let repository_id = RepositoryId(request.payload.repository_id);
2991 let content = this
2992 .update(&mut cx, |this, cx| {
2993 let repository = this.repositories().get(&repository_id)?;
2994 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2995 })
2996 .context("missing repository")?
2997 .await?;
2998 Ok(proto::GetBlobContentResponse { content })
2999 }
3000
3001 async fn handle_open_unstaged_diff(
3002 this: Entity<Self>,
3003 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3004 mut cx: AsyncApp,
3005 ) -> Result<proto::OpenUnstagedDiffResponse> {
3006 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3007 let diff = this
3008 .update(&mut cx, |this, cx| {
3009 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3010 Some(this.open_unstaged_diff(buffer, cx))
3011 })
3012 .context("missing buffer")?
3013 .await?;
3014 this.update(&mut cx, |this, _| {
3015 let shared_diffs = this
3016 .shared_diffs
3017 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3018 .or_default();
3019 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3020 });
3021 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3022 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3023 }
3024
3025 async fn handle_open_uncommitted_diff(
3026 this: Entity<Self>,
3027 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3028 mut cx: AsyncApp,
3029 ) -> Result<proto::OpenUncommittedDiffResponse> {
3030 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3031 let diff = this
3032 .update(&mut cx, |this, cx| {
3033 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3034 Some(this.open_uncommitted_diff(buffer, cx))
3035 })
3036 .context("missing buffer")?
3037 .await?;
3038 this.update(&mut cx, |this, _| {
3039 let shared_diffs = this
3040 .shared_diffs
3041 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3042 .or_default();
3043 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3044 });
3045 Ok(diff.read_with(&cx, |diff, cx| {
3046 use proto::open_uncommitted_diff_response::Mode;
3047
3048 let unstaged_diff = diff.secondary_diff();
3049 let index_snapshot = unstaged_diff.and_then(|diff| {
3050 let diff = diff.read(cx);
3051 diff.base_text_exists().then(|| diff.base_text(cx))
3052 });
3053
3054 let mode;
3055 let staged_text;
3056 let committed_text;
3057 if diff.base_text_exists() {
3058 let committed_snapshot = diff.base_text(cx);
3059 committed_text = Some(committed_snapshot.text());
3060 if let Some(index_text) = index_snapshot {
3061 if index_text.remote_id() == committed_snapshot.remote_id() {
3062 mode = Mode::IndexMatchesHead;
3063 staged_text = None;
3064 } else {
3065 mode = Mode::IndexAndHead;
3066 staged_text = Some(index_text.text());
3067 }
3068 } else {
3069 mode = Mode::IndexAndHead;
3070 staged_text = None;
3071 }
3072 } else {
3073 mode = Mode::IndexAndHead;
3074 committed_text = None;
3075 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3076 }
3077
3078 proto::OpenUncommittedDiffResponse {
3079 committed_text,
3080 staged_text,
3081 mode: mode.into(),
3082 }
3083 }))
3084 }
3085
3086 async fn handle_update_diff_bases(
3087 this: Entity<Self>,
3088 request: TypedEnvelope<proto::UpdateDiffBases>,
3089 mut cx: AsyncApp,
3090 ) -> Result<()> {
3091 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3092 this.update(&mut cx, |this, cx| {
3093 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3094 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3095 {
3096 let buffer = buffer.read(cx).text_snapshot();
3097 diff_state.update(cx, |diff_state, cx| {
3098 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3099 })
3100 }
3101 });
3102 Ok(())
3103 }
3104
3105 async fn handle_blame_buffer(
3106 this: Entity<Self>,
3107 envelope: TypedEnvelope<proto::BlameBuffer>,
3108 mut cx: AsyncApp,
3109 ) -> Result<proto::BlameBufferResponse> {
3110 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3111 let version = deserialize_version(&envelope.payload.version);
3112 let buffer = this.read_with(&cx, |this, cx| {
3113 this.buffer_store.read(cx).get_existing(buffer_id)
3114 })?;
3115 buffer
3116 .update(&mut cx, |buffer, _| {
3117 buffer.wait_for_version(version.clone())
3118 })
3119 .await?;
3120 let blame = this
3121 .update(&mut cx, |this, cx| {
3122 this.blame_buffer(&buffer, Some(version), cx)
3123 })
3124 .await?;
3125 Ok(serialize_blame_buffer_response(blame))
3126 }
3127
3128 async fn handle_get_permalink_to_line(
3129 this: Entity<Self>,
3130 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3131 mut cx: AsyncApp,
3132 ) -> Result<proto::GetPermalinkToLineResponse> {
3133 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3134 // let version = deserialize_version(&envelope.payload.version);
3135 let selection = {
3136 let proto_selection = envelope
3137 .payload
3138 .selection
3139 .context("no selection to get permalink for defined")?;
3140 proto_selection.start as u32..proto_selection.end as u32
3141 };
3142 let buffer = this.read_with(&cx, |this, cx| {
3143 this.buffer_store.read(cx).get_existing(buffer_id)
3144 })?;
3145 let permalink = this
3146 .update(&mut cx, |this, cx| {
3147 this.get_permalink_to_line(&buffer, selection, cx)
3148 })
3149 .await?;
3150 Ok(proto::GetPermalinkToLineResponse {
3151 permalink: permalink.to_string(),
3152 })
3153 }
3154
3155 fn repository_for_request(
3156 this: &Entity<Self>,
3157 id: RepositoryId,
3158 cx: &mut AsyncApp,
3159 ) -> Result<Entity<Repository>> {
3160 this.read_with(cx, |this, _| {
3161 this.repositories
3162 .get(&id)
3163 .context("missing repository handle")
3164 .cloned()
3165 })
3166 }
3167
3168 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3169 self.repositories
3170 .iter()
3171 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3172 .collect()
3173 }
3174
3175 fn process_updated_entries(
3176 &self,
3177 worktree: &Entity<Worktree>,
3178 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3179 cx: &mut App,
3180 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3181 let path_style = worktree.read(cx).path_style();
3182 let mut repo_paths = self
3183 .repositories
3184 .values()
3185 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3186 .collect::<Vec<_>>();
3187 let mut entries: Vec<_> = updated_entries
3188 .iter()
3189 .map(|(path, _, _)| path.clone())
3190 .collect();
3191 entries.sort();
3192 let worktree = worktree.read(cx);
3193
3194 let entries = entries
3195 .into_iter()
3196 .map(|path| worktree.absolutize(&path))
3197 .collect::<Arc<[_]>>();
3198
3199 let executor = cx.background_executor().clone();
3200 cx.background_executor().spawn(async move {
3201 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3202 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3203 let mut tasks = FuturesOrdered::new();
3204 for (repo_path, repo) in repo_paths.into_iter().rev() {
3205 let entries = entries.clone();
3206 let task = executor.spawn(async move {
3207 // Find all repository paths that belong to this repo
3208 let mut ix = entries.partition_point(|path| path < &*repo_path);
3209 if ix == entries.len() {
3210 return None;
3211 };
3212
3213 let mut paths = Vec::new();
3214 // All paths prefixed by a given repo will constitute a continuous range.
3215 while let Some(path) = entries.get(ix)
3216 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3217 &repo_path, path, path_style,
3218 )
3219 {
3220 paths.push((repo_path, ix));
3221 ix += 1;
3222 }
3223 if paths.is_empty() {
3224 None
3225 } else {
3226 Some((repo, paths))
3227 }
3228 });
3229 tasks.push_back(task);
3230 }
3231
3232 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3233 let mut path_was_used = vec![false; entries.len()];
3234 let tasks = tasks.collect::<Vec<_>>().await;
3235 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3236 // We always want to assign a path to it's innermost repository.
3237 for t in tasks {
3238 let Some((repo, paths)) = t else {
3239 continue;
3240 };
3241 let entry = paths_by_git_repo.entry(repo).or_default();
3242 for (repo_path, ix) in paths {
3243 if path_was_used[ix] {
3244 continue;
3245 }
3246 path_was_used[ix] = true;
3247 entry.push(repo_path);
3248 }
3249 }
3250
3251 paths_by_git_repo
3252 })
3253 }
3254}
3255
3256impl BufferGitState {
3257 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3258 Self {
3259 unstaged_diff: Default::default(),
3260 uncommitted_diff: Default::default(),
3261 oid_diffs: Default::default(),
3262 recalculate_diff_task: Default::default(),
3263 language: Default::default(),
3264 language_registry: Default::default(),
3265 recalculating_tx: postage::watch::channel_with(false).0,
3266 hunk_staging_operation_count: 0,
3267 hunk_staging_operation_count_as_of_write: 0,
3268 head_text: Default::default(),
3269 index_text: Default::default(),
3270 oid_texts: Default::default(),
3271 head_changed: Default::default(),
3272 index_changed: Default::default(),
3273 language_changed: Default::default(),
3274 conflict_updated_futures: Default::default(),
3275 conflict_set: Default::default(),
3276 reparse_conflict_markers_task: Default::default(),
3277 }
3278 }
3279
3280 #[ztracing::instrument(skip_all)]
3281 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3282 self.language = buffer.read(cx).language().cloned();
3283 self.language_changed = true;
3284 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3285 }
3286
3287 fn reparse_conflict_markers(
3288 &mut self,
3289 buffer: text::BufferSnapshot,
3290 cx: &mut Context<Self>,
3291 ) -> oneshot::Receiver<()> {
3292 let (tx, rx) = oneshot::channel();
3293
3294 let Some(conflict_set) = self
3295 .conflict_set
3296 .as_ref()
3297 .and_then(|conflict_set| conflict_set.upgrade())
3298 else {
3299 return rx;
3300 };
3301
3302 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3303 if conflict_set.has_conflict {
3304 Some(conflict_set.snapshot())
3305 } else {
3306 None
3307 }
3308 });
3309
3310 if let Some(old_snapshot) = old_snapshot {
3311 self.conflict_updated_futures.push(tx);
3312 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3313 let (snapshot, changed_range) = cx
3314 .background_spawn(async move {
3315 let new_snapshot = ConflictSet::parse(&buffer);
3316 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3317 (new_snapshot, changed_range)
3318 })
3319 .await;
3320 this.update(cx, |this, cx| {
3321 if let Some(conflict_set) = &this.conflict_set {
3322 conflict_set
3323 .update(cx, |conflict_set, cx| {
3324 conflict_set.set_snapshot(snapshot, changed_range, cx);
3325 })
3326 .ok();
3327 }
3328 let futures = std::mem::take(&mut this.conflict_updated_futures);
3329 for tx in futures {
3330 tx.send(()).ok();
3331 }
3332 })
3333 }))
3334 }
3335
3336 rx
3337 }
3338
3339 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3340 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3341 }
3342
3343 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3344 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3345 }
3346
3347 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3348 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3349 }
3350
3351 fn handle_base_texts_updated(
3352 &mut self,
3353 buffer: text::BufferSnapshot,
3354 message: proto::UpdateDiffBases,
3355 cx: &mut Context<Self>,
3356 ) {
3357 use proto::update_diff_bases::Mode;
3358
3359 let Some(mode) = Mode::from_i32(message.mode) else {
3360 return;
3361 };
3362
3363 let diff_bases_change = match mode {
3364 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3365 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3366 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3367 Mode::IndexAndHead => DiffBasesChange::SetEach {
3368 index: message.staged_text,
3369 head: message.committed_text,
3370 },
3371 };
3372
3373 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3374 }
3375
3376 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3377 if *self.recalculating_tx.borrow() {
3378 let mut rx = self.recalculating_tx.subscribe();
3379 Some(async move {
3380 loop {
3381 let is_recalculating = rx.recv().await;
3382 if is_recalculating != Some(true) {
3383 break;
3384 }
3385 }
3386 })
3387 } else {
3388 None
3389 }
3390 }
3391
3392 fn diff_bases_changed(
3393 &mut self,
3394 buffer: text::BufferSnapshot,
3395 diff_bases_change: Option<DiffBasesChange>,
3396 cx: &mut Context<Self>,
3397 ) {
3398 match diff_bases_change {
3399 Some(DiffBasesChange::SetIndex(index)) => {
3400 self.index_text = index.map(|mut index| {
3401 text::LineEnding::normalize(&mut index);
3402 Arc::from(index.as_str())
3403 });
3404 self.index_changed = true;
3405 }
3406 Some(DiffBasesChange::SetHead(head)) => {
3407 self.head_text = head.map(|mut head| {
3408 text::LineEnding::normalize(&mut head);
3409 Arc::from(head.as_str())
3410 });
3411 self.head_changed = true;
3412 }
3413 Some(DiffBasesChange::SetBoth(text)) => {
3414 let text = text.map(|mut text| {
3415 text::LineEnding::normalize(&mut text);
3416 Arc::from(text.as_str())
3417 });
3418 self.head_text = text.clone();
3419 self.index_text = text;
3420 self.head_changed = true;
3421 self.index_changed = true;
3422 }
3423 Some(DiffBasesChange::SetEach { index, head }) => {
3424 self.index_text = index.map(|mut index| {
3425 text::LineEnding::normalize(&mut index);
3426 Arc::from(index.as_str())
3427 });
3428 self.index_changed = true;
3429 self.head_text = head.map(|mut head| {
3430 text::LineEnding::normalize(&mut head);
3431 Arc::from(head.as_str())
3432 });
3433 self.head_changed = true;
3434 }
3435 None => {}
3436 }
3437
3438 self.recalculate_diffs(buffer, cx)
3439 }
3440
3441 #[ztracing::instrument(skip_all)]
3442 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3443 *self.recalculating_tx.borrow_mut() = true;
3444
3445 let language = self.language.clone();
3446 let language_registry = self.language_registry.clone();
3447 let unstaged_diff = self.unstaged_diff();
3448 let uncommitted_diff = self.uncommitted_diff();
3449 let head = self.head_text.clone();
3450 let index = self.index_text.clone();
3451 let index_changed = self.index_changed;
3452 let head_changed = self.head_changed;
3453 let language_changed = self.language_changed;
3454 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3455 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3456 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3457 (None, None) => true,
3458 _ => false,
3459 };
3460
3461 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3462 .oid_diffs
3463 .iter()
3464 .filter_map(|(oid, weak)| {
3465 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3466 weak.upgrade().map(|diff| (*oid, diff, base_text))
3467 })
3468 .collect();
3469
3470 self.oid_diffs.retain(|oid, weak| {
3471 let alive = weak.upgrade().is_some();
3472 if !alive {
3473 if let Some(oid) = oid {
3474 self.oid_texts.remove(oid);
3475 }
3476 }
3477 alive
3478 });
3479 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3480 log::debug!(
3481 "start recalculating diffs for buffer {}",
3482 buffer.remote_id()
3483 );
3484
3485 let mut new_unstaged_diff = None;
3486 if let Some(unstaged_diff) = &unstaged_diff {
3487 new_unstaged_diff = Some(
3488 cx.update(|cx| {
3489 unstaged_diff.read(cx).update_diff(
3490 buffer.clone(),
3491 index,
3492 index_changed.then_some(false),
3493 language.clone(),
3494 cx,
3495 )
3496 })
3497 .await,
3498 );
3499 }
3500
3501 // Dropping BufferDiff can be expensive, so yield back to the event loop
3502 // for a bit
3503 yield_now().await;
3504
3505 let mut new_uncommitted_diff = None;
3506 if let Some(uncommitted_diff) = &uncommitted_diff {
3507 new_uncommitted_diff = if index_matches_head {
3508 new_unstaged_diff.clone()
3509 } else {
3510 Some(
3511 cx.update(|cx| {
3512 uncommitted_diff.read(cx).update_diff(
3513 buffer.clone(),
3514 head,
3515 head_changed.then_some(true),
3516 language.clone(),
3517 cx,
3518 )
3519 })
3520 .await,
3521 )
3522 }
3523 }
3524
3525 // Dropping BufferDiff can be expensive, so yield back to the event loop
3526 // for a bit
3527 yield_now().await;
3528
3529 let cancel = this.update(cx, |this, _| {
3530 // This checks whether all pending stage/unstage operations
3531 // have quiesced (i.e. both the corresponding write and the
3532 // read of that write have completed). If not, then we cancel
3533 // this recalculation attempt to avoid invalidating pending
3534 // state too quickly; another recalculation will come along
3535 // later and clear the pending state once the state of the index has settled.
3536 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3537 *this.recalculating_tx.borrow_mut() = false;
3538 true
3539 } else {
3540 false
3541 }
3542 })?;
3543 if cancel {
3544 log::debug!(
3545 concat!(
3546 "aborting recalculating diffs for buffer {}",
3547 "due to subsequent hunk operations",
3548 ),
3549 buffer.remote_id()
3550 );
3551 return Ok(());
3552 }
3553
3554 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3555 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3556 {
3557 let task = unstaged_diff.update(cx, |diff, cx| {
3558 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3559 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3560 // view multibuffers.
3561 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3562 });
3563 Some(task.await)
3564 } else {
3565 None
3566 };
3567
3568 yield_now().await;
3569
3570 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3571 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3572 {
3573 uncommitted_diff
3574 .update(cx, |diff, cx| {
3575 if language_changed {
3576 diff.language_changed(language.clone(), language_registry, cx);
3577 }
3578 diff.set_snapshot_with_secondary(
3579 new_uncommitted_diff,
3580 &buffer,
3581 unstaged_changed_range.flatten(),
3582 true,
3583 cx,
3584 )
3585 })
3586 .await;
3587 }
3588
3589 yield_now().await;
3590
3591 for (oid, oid_diff, base_text) in oid_diffs {
3592 let new_oid_diff = cx
3593 .update(|cx| {
3594 oid_diff.read(cx).update_diff(
3595 buffer.clone(),
3596 base_text,
3597 None,
3598 language.clone(),
3599 cx,
3600 )
3601 })
3602 .await;
3603
3604 oid_diff
3605 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3606 .await;
3607
3608 log::debug!(
3609 "finished recalculating oid diff for buffer {} oid {:?}",
3610 buffer.remote_id(),
3611 oid
3612 );
3613
3614 yield_now().await;
3615 }
3616
3617 log::debug!(
3618 "finished recalculating diffs for buffer {}",
3619 buffer.remote_id()
3620 );
3621
3622 if let Some(this) = this.upgrade() {
3623 this.update(cx, |this, _| {
3624 this.index_changed = false;
3625 this.head_changed = false;
3626 this.language_changed = false;
3627 *this.recalculating_tx.borrow_mut() = false;
3628 });
3629 }
3630
3631 Ok(())
3632 }));
3633 }
3634}
3635
3636fn make_remote_delegate(
3637 this: Entity<GitStore>,
3638 project_id: u64,
3639 repository_id: RepositoryId,
3640 askpass_id: u64,
3641 cx: &mut AsyncApp,
3642) -> AskPassDelegate {
3643 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3644 this.update(cx, |this, cx| {
3645 let Some((client, _)) = this.downstream_client() else {
3646 return;
3647 };
3648 let response = client.request(proto::AskPassRequest {
3649 project_id,
3650 repository_id: repository_id.to_proto(),
3651 askpass_id,
3652 prompt,
3653 });
3654 cx.spawn(async move |_, _| {
3655 let mut response = response.await?.response;
3656 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3657 .ok();
3658 response.zeroize();
3659 anyhow::Ok(())
3660 })
3661 .detach_and_log_err(cx);
3662 });
3663 })
3664}
3665
3666impl RepositoryId {
3667 pub fn to_proto(self) -> u64 {
3668 self.0
3669 }
3670
3671 pub fn from_proto(id: u64) -> Self {
3672 RepositoryId(id)
3673 }
3674}
3675
3676impl RepositorySnapshot {
3677 fn empty(
3678 id: RepositoryId,
3679 work_directory_abs_path: Arc<Path>,
3680 original_repo_abs_path: Option<Arc<Path>>,
3681 path_style: PathStyle,
3682 ) -> Self {
3683 Self {
3684 id,
3685 statuses_by_path: Default::default(),
3686 original_repo_abs_path: original_repo_abs_path
3687 .unwrap_or_else(|| work_directory_abs_path.clone()),
3688 work_directory_abs_path,
3689 branch: None,
3690 head_commit: None,
3691 scan_id: 0,
3692 merge: Default::default(),
3693 remote_origin_url: None,
3694 remote_upstream_url: None,
3695 stash_entries: Default::default(),
3696 linked_worktrees: Arc::from([]),
3697 path_style,
3698 }
3699 }
3700
3701 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3702 proto::UpdateRepository {
3703 branch_summary: self.branch.as_ref().map(branch_to_proto),
3704 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3705 updated_statuses: self
3706 .statuses_by_path
3707 .iter()
3708 .map(|entry| entry.to_proto())
3709 .collect(),
3710 removed_statuses: Default::default(),
3711 current_merge_conflicts: self
3712 .merge
3713 .merge_heads_by_conflicted_path
3714 .iter()
3715 .map(|(repo_path, _)| repo_path.to_proto())
3716 .collect(),
3717 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3718 project_id,
3719 id: self.id.to_proto(),
3720 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3721 entry_ids: vec![self.id.to_proto()],
3722 scan_id: self.scan_id,
3723 is_last_update: true,
3724 stash_entries: self
3725 .stash_entries
3726 .entries
3727 .iter()
3728 .map(stash_to_proto)
3729 .collect(),
3730 remote_upstream_url: self.remote_upstream_url.clone(),
3731 remote_origin_url: self.remote_origin_url.clone(),
3732 original_repo_abs_path: Some(
3733 self.original_repo_abs_path.to_string_lossy().into_owned(),
3734 ),
3735 linked_worktrees: self
3736 .linked_worktrees
3737 .iter()
3738 .map(worktree_to_proto)
3739 .collect(),
3740 }
3741 }
3742
3743 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3744 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3745 let mut removed_statuses: Vec<String> = Vec::new();
3746
3747 let mut new_statuses = self.statuses_by_path.iter().peekable();
3748 let mut old_statuses = old.statuses_by_path.iter().peekable();
3749
3750 let mut current_new_entry = new_statuses.next();
3751 let mut current_old_entry = old_statuses.next();
3752 loop {
3753 match (current_new_entry, current_old_entry) {
3754 (Some(new_entry), Some(old_entry)) => {
3755 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3756 Ordering::Less => {
3757 updated_statuses.push(new_entry.to_proto());
3758 current_new_entry = new_statuses.next();
3759 }
3760 Ordering::Equal => {
3761 if new_entry.status != old_entry.status
3762 || new_entry.diff_stat != old_entry.diff_stat
3763 {
3764 updated_statuses.push(new_entry.to_proto());
3765 }
3766 current_old_entry = old_statuses.next();
3767 current_new_entry = new_statuses.next();
3768 }
3769 Ordering::Greater => {
3770 removed_statuses.push(old_entry.repo_path.to_proto());
3771 current_old_entry = old_statuses.next();
3772 }
3773 }
3774 }
3775 (None, Some(old_entry)) => {
3776 removed_statuses.push(old_entry.repo_path.to_proto());
3777 current_old_entry = old_statuses.next();
3778 }
3779 (Some(new_entry), None) => {
3780 updated_statuses.push(new_entry.to_proto());
3781 current_new_entry = new_statuses.next();
3782 }
3783 (None, None) => break,
3784 }
3785 }
3786
3787 proto::UpdateRepository {
3788 branch_summary: self.branch.as_ref().map(branch_to_proto),
3789 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3790 updated_statuses,
3791 removed_statuses,
3792 current_merge_conflicts: self
3793 .merge
3794 .merge_heads_by_conflicted_path
3795 .iter()
3796 .map(|(path, _)| path.to_proto())
3797 .collect(),
3798 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3799 project_id,
3800 id: self.id.to_proto(),
3801 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3802 entry_ids: vec![],
3803 scan_id: self.scan_id,
3804 is_last_update: true,
3805 stash_entries: self
3806 .stash_entries
3807 .entries
3808 .iter()
3809 .map(stash_to_proto)
3810 .collect(),
3811 remote_upstream_url: self.remote_upstream_url.clone(),
3812 remote_origin_url: self.remote_origin_url.clone(),
3813 original_repo_abs_path: Some(
3814 self.original_repo_abs_path.to_string_lossy().into_owned(),
3815 ),
3816 linked_worktrees: self
3817 .linked_worktrees
3818 .iter()
3819 .map(worktree_to_proto)
3820 .collect(),
3821 }
3822 }
3823
3824 /// The main worktree is the original checkout that other worktrees were
3825 /// created from.
3826 ///
3827 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3828 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3829 ///
3830 /// Submodules also return `true` here, since they are not linked worktrees.
3831 pub fn is_main_worktree(&self) -> bool {
3832 self.work_directory_abs_path == self.original_repo_abs_path
3833 }
3834
3835 /// Returns true if this repository is a linked worktree, that is, one that
3836 /// was created from another worktree.
3837 ///
3838 /// Returns `false` for both the main worktree and submodules.
3839 pub fn is_linked_worktree(&self) -> bool {
3840 !self.is_main_worktree()
3841 }
3842
3843 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3844 &self.linked_worktrees
3845 }
3846
3847 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3848 self.statuses_by_path.iter().cloned()
3849 }
3850
3851 pub fn status_summary(&self) -> GitSummary {
3852 self.statuses_by_path.summary().item_summary
3853 }
3854
3855 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3856 self.statuses_by_path
3857 .get(&PathKey(path.as_ref().clone()), ())
3858 .cloned()
3859 }
3860
3861 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3862 self.statuses_by_path
3863 .get(&PathKey(path.as_ref().clone()), ())
3864 .and_then(|entry| entry.diff_stat)
3865 }
3866
3867 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3868 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3869 }
3870
3871 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3872 self.path_style
3873 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3874 .unwrap()
3875 .into()
3876 }
3877
3878 #[inline]
3879 fn abs_path_to_repo_path_inner(
3880 work_directory_abs_path: &Path,
3881 abs_path: &Path,
3882 path_style: PathStyle,
3883 ) -> Option<RepoPath> {
3884 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3885 Some(RepoPath::from_rel_path(&rel_path))
3886 }
3887
3888 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3889 self.merge
3890 .merge_heads_by_conflicted_path
3891 .contains_key(repo_path)
3892 }
3893
3894 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3895 let had_conflict_on_last_merge_head_change = self
3896 .merge
3897 .merge_heads_by_conflicted_path
3898 .contains_key(repo_path);
3899 let has_conflict_currently = self
3900 .status_for_path(repo_path)
3901 .is_some_and(|entry| entry.status.is_conflicted());
3902 had_conflict_on_last_merge_head_change || has_conflict_currently
3903 }
3904
3905 /// This is the name that will be displayed in the repository selector for this repository.
3906 pub fn display_name(&self) -> SharedString {
3907 self.work_directory_abs_path
3908 .file_name()
3909 .unwrap_or_default()
3910 .to_string_lossy()
3911 .to_string()
3912 .into()
3913 }
3914}
3915
3916pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3917 proto::StashEntry {
3918 oid: entry.oid.as_bytes().to_vec(),
3919 message: entry.message.clone(),
3920 branch: entry.branch.clone(),
3921 index: entry.index as u64,
3922 timestamp: entry.timestamp,
3923 }
3924}
3925
3926pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3927 Ok(StashEntry {
3928 oid: Oid::from_bytes(&entry.oid)?,
3929 message: entry.message.clone(),
3930 index: entry.index as usize,
3931 branch: entry.branch.clone(),
3932 timestamp: entry.timestamp,
3933 })
3934}
3935
3936impl MergeDetails {
3937 async fn update(
3938 &mut self,
3939 backend: &Arc<dyn GitRepository>,
3940 current_conflicted_paths: Vec<RepoPath>,
3941 ) -> Result<bool> {
3942 log::debug!("load merge details");
3943 self.message = backend.merge_message().await.map(SharedString::from);
3944 let heads = backend
3945 .revparse_batch(vec![
3946 "MERGE_HEAD".into(),
3947 "CHERRY_PICK_HEAD".into(),
3948 "REBASE_HEAD".into(),
3949 "REVERT_HEAD".into(),
3950 "APPLY_HEAD".into(),
3951 ])
3952 .await
3953 .log_err()
3954 .unwrap_or_default()
3955 .into_iter()
3956 .map(|opt| opt.map(SharedString::from))
3957 .collect::<Vec<_>>();
3958
3959 let mut conflicts_changed = false;
3960
3961 // Record the merge state for newly conflicted paths
3962 for path in ¤t_conflicted_paths {
3963 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3964 conflicts_changed = true;
3965 self.merge_heads_by_conflicted_path
3966 .insert(path.clone(), heads.clone());
3967 }
3968 }
3969
3970 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3971 self.merge_heads_by_conflicted_path
3972 .retain(|path, old_merge_heads| {
3973 let keep = current_conflicted_paths.contains(path)
3974 || (old_merge_heads == &heads
3975 && old_merge_heads.iter().any(|head| head.is_some()));
3976 if !keep {
3977 conflicts_changed = true;
3978 }
3979 keep
3980 });
3981
3982 Ok(conflicts_changed)
3983 }
3984}
3985
3986impl Repository {
3987 pub fn is_trusted(&self) -> bool {
3988 match self.repository_state.peek() {
3989 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3990 _ => false,
3991 }
3992 }
3993
3994 pub fn snapshot(&self) -> RepositorySnapshot {
3995 self.snapshot.clone()
3996 }
3997
3998 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3999 self.pending_ops.iter().cloned()
4000 }
4001
4002 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4003 self.pending_ops.summary().clone()
4004 }
4005
4006 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4007 self.pending_ops
4008 .get(&PathKey(path.as_ref().clone()), ())
4009 .cloned()
4010 }
4011
4012 fn local(
4013 id: RepositoryId,
4014 work_directory_abs_path: Arc<Path>,
4015 original_repo_abs_path: Arc<Path>,
4016 dot_git_abs_path: Arc<Path>,
4017 project_environment: WeakEntity<ProjectEnvironment>,
4018 fs: Arc<dyn Fs>,
4019 is_trusted: bool,
4020 git_store: WeakEntity<GitStore>,
4021 cx: &mut Context<Self>,
4022 ) -> Self {
4023 let snapshot = RepositorySnapshot::empty(
4024 id,
4025 work_directory_abs_path.clone(),
4026 Some(original_repo_abs_path),
4027 PathStyle::local(),
4028 );
4029 let state = cx
4030 .spawn(async move |_, cx| {
4031 LocalRepositoryState::new(
4032 work_directory_abs_path,
4033 dot_git_abs_path,
4034 project_environment,
4035 fs,
4036 is_trusted,
4037 cx,
4038 )
4039 .await
4040 .map_err(|err| err.to_string())
4041 })
4042 .shared();
4043 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4044 let state = cx
4045 .spawn(async move |_, _| {
4046 let state = state.await?;
4047 Ok(RepositoryState::Local(state))
4048 })
4049 .shared();
4050
4051 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4052 RepositoryEvent::BranchChanged => {
4053 if this.scan_id > 1 {
4054 this.initial_graph_data.clear();
4055 }
4056 }
4057 _ => {}
4058 })
4059 .detach();
4060
4061 Repository {
4062 this: cx.weak_entity(),
4063 git_store,
4064 snapshot,
4065 pending_ops: Default::default(),
4066 repository_state: state,
4067 commit_message_buffer: None,
4068 askpass_delegates: Default::default(),
4069 paths_needing_status_update: Default::default(),
4070 latest_askpass_id: 0,
4071 job_sender,
4072 job_id: 0,
4073 active_jobs: Default::default(),
4074 initial_graph_data: Default::default(),
4075 commit_data: Default::default(),
4076 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4077 }
4078 }
4079
4080 fn remote(
4081 id: RepositoryId,
4082 work_directory_abs_path: Arc<Path>,
4083 original_repo_abs_path: Option<Arc<Path>>,
4084 path_style: PathStyle,
4085 project_id: ProjectId,
4086 client: AnyProtoClient,
4087 git_store: WeakEntity<GitStore>,
4088 cx: &mut Context<Self>,
4089 ) -> Self {
4090 let snapshot = RepositorySnapshot::empty(
4091 id,
4092 work_directory_abs_path,
4093 original_repo_abs_path,
4094 path_style,
4095 );
4096 let repository_state = RemoteRepositoryState { project_id, client };
4097 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4098 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4099 Self {
4100 this: cx.weak_entity(),
4101 snapshot,
4102 commit_message_buffer: None,
4103 git_store,
4104 pending_ops: Default::default(),
4105 paths_needing_status_update: Default::default(),
4106 job_sender,
4107 repository_state,
4108 askpass_delegates: Default::default(),
4109 latest_askpass_id: 0,
4110 active_jobs: Default::default(),
4111 job_id: 0,
4112 initial_graph_data: Default::default(),
4113 commit_data: Default::default(),
4114 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4115 }
4116 }
4117
4118 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4119 self.git_store.upgrade()
4120 }
4121
4122 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4123 let this = cx.weak_entity();
4124 let git_store = self.git_store.clone();
4125 let _ = self.send_keyed_job(
4126 Some(GitJobKey::ReloadBufferDiffBases),
4127 None,
4128 |state, mut cx| async move {
4129 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4130 log::error!("tried to recompute diffs for a non-local repository");
4131 return Ok(());
4132 };
4133
4134 let Some(this) = this.upgrade() else {
4135 return Ok(());
4136 };
4137
4138 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4139 git_store.update(cx, |git_store, cx| {
4140 git_store
4141 .diffs
4142 .iter()
4143 .filter_map(|(buffer_id, diff_state)| {
4144 let buffer_store = git_store.buffer_store.read(cx);
4145 let buffer = buffer_store.get(*buffer_id)?;
4146 let file = File::from_dyn(buffer.read(cx).file())?;
4147 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4148 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4149 log::debug!(
4150 "start reload diff bases for repo path {}",
4151 repo_path.as_unix_str()
4152 );
4153 diff_state.update(cx, |diff_state, _| {
4154 let has_unstaged_diff = diff_state
4155 .unstaged_diff
4156 .as_ref()
4157 .is_some_and(|diff| diff.is_upgradable());
4158 let has_uncommitted_diff = diff_state
4159 .uncommitted_diff
4160 .as_ref()
4161 .is_some_and(|set| set.is_upgradable());
4162
4163 Some((
4164 buffer,
4165 repo_path,
4166 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4167 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4168 ))
4169 })
4170 })
4171 .collect::<Vec<_>>()
4172 })
4173 })?;
4174
4175 let buffer_diff_base_changes = cx
4176 .background_spawn(async move {
4177 let mut changes = Vec::new();
4178 for (buffer, repo_path, current_index_text, current_head_text) in
4179 &repo_diff_state_updates
4180 {
4181 let index_text = if current_index_text.is_some() {
4182 backend.load_index_text(repo_path.clone()).await
4183 } else {
4184 None
4185 };
4186 let head_text = if current_head_text.is_some() {
4187 backend.load_committed_text(repo_path.clone()).await
4188 } else {
4189 None
4190 };
4191
4192 let change =
4193 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4194 (Some(current_index), Some(current_head)) => {
4195 let index_changed =
4196 index_text.as_deref() != current_index.as_deref();
4197 let head_changed =
4198 head_text.as_deref() != current_head.as_deref();
4199 if index_changed && head_changed {
4200 if index_text == head_text {
4201 Some(DiffBasesChange::SetBoth(head_text))
4202 } else {
4203 Some(DiffBasesChange::SetEach {
4204 index: index_text,
4205 head: head_text,
4206 })
4207 }
4208 } else if index_changed {
4209 Some(DiffBasesChange::SetIndex(index_text))
4210 } else if head_changed {
4211 Some(DiffBasesChange::SetHead(head_text))
4212 } else {
4213 None
4214 }
4215 }
4216 (Some(current_index), None) => {
4217 let index_changed =
4218 index_text.as_deref() != current_index.as_deref();
4219 index_changed
4220 .then_some(DiffBasesChange::SetIndex(index_text))
4221 }
4222 (None, Some(current_head)) => {
4223 let head_changed =
4224 head_text.as_deref() != current_head.as_deref();
4225 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4226 }
4227 (None, None) => None,
4228 };
4229
4230 changes.push((buffer.clone(), change))
4231 }
4232 changes
4233 })
4234 .await;
4235
4236 git_store.update(&mut cx, |git_store, cx| {
4237 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4238 let buffer_snapshot = buffer.read(cx).text_snapshot();
4239 let buffer_id = buffer_snapshot.remote_id();
4240 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4241 continue;
4242 };
4243
4244 let downstream_client = git_store.downstream_client();
4245 diff_state.update(cx, |diff_state, cx| {
4246 use proto::update_diff_bases::Mode;
4247
4248 if let Some((diff_bases_change, (client, project_id))) =
4249 diff_bases_change.clone().zip(downstream_client)
4250 {
4251 let (staged_text, committed_text, mode) = match diff_bases_change {
4252 DiffBasesChange::SetIndex(index) => {
4253 (index, None, Mode::IndexOnly)
4254 }
4255 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4256 DiffBasesChange::SetEach { index, head } => {
4257 (index, head, Mode::IndexAndHead)
4258 }
4259 DiffBasesChange::SetBoth(text) => {
4260 (None, text, Mode::IndexMatchesHead)
4261 }
4262 };
4263 client
4264 .send(proto::UpdateDiffBases {
4265 project_id: project_id.to_proto(),
4266 buffer_id: buffer_id.to_proto(),
4267 staged_text,
4268 committed_text,
4269 mode: mode as i32,
4270 })
4271 .log_err();
4272 }
4273
4274 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4275 });
4276 }
4277 })
4278 },
4279 );
4280 }
4281
4282 pub fn send_job<F, Fut, R>(
4283 &mut self,
4284 status: Option<SharedString>,
4285 job: F,
4286 ) -> oneshot::Receiver<R>
4287 where
4288 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4289 Fut: Future<Output = R> + 'static,
4290 R: Send + 'static,
4291 {
4292 self.send_keyed_job(None, status, job)
4293 }
4294
4295 fn send_keyed_job<F, Fut, R>(
4296 &mut self,
4297 key: Option<GitJobKey>,
4298 status: Option<SharedString>,
4299 job: F,
4300 ) -> oneshot::Receiver<R>
4301 where
4302 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4303 Fut: Future<Output = R> + 'static,
4304 R: Send + 'static,
4305 {
4306 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4307 let job_id = post_inc(&mut self.job_id);
4308 let this = self.this.clone();
4309 self.job_sender
4310 .unbounded_send(GitJob {
4311 key,
4312 job: Box::new(move |state, cx: &mut AsyncApp| {
4313 let job = job(state, cx.clone());
4314 cx.spawn(async move |cx| {
4315 if let Some(s) = status.clone() {
4316 this.update(cx, |this, cx| {
4317 this.active_jobs.insert(
4318 job_id,
4319 JobInfo {
4320 start: Instant::now(),
4321 message: s.clone(),
4322 },
4323 );
4324
4325 cx.notify();
4326 })
4327 .ok();
4328 }
4329 let result = job.await;
4330
4331 this.update(cx, |this, cx| {
4332 this.active_jobs.remove(&job_id);
4333 cx.notify();
4334 })
4335 .ok();
4336
4337 result_tx.send(result).ok();
4338 })
4339 }),
4340 })
4341 .ok();
4342 result_rx
4343 }
4344
4345 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4346 let Some(git_store) = self.git_store.upgrade() else {
4347 return;
4348 };
4349 let entity = cx.entity();
4350 git_store.update(cx, |git_store, cx| {
4351 let Some((&id, _)) = git_store
4352 .repositories
4353 .iter()
4354 .find(|(_, handle)| *handle == &entity)
4355 else {
4356 return;
4357 };
4358 git_store.active_repo_id = Some(id);
4359 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4360 });
4361 }
4362
4363 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4364 self.snapshot.status()
4365 }
4366
4367 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4368 self.snapshot.diff_stat_for_path(path)
4369 }
4370
4371 pub fn cached_stash(&self) -> GitStash {
4372 self.snapshot.stash_entries.clone()
4373 }
4374
4375 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4376 let git_store = self.git_store.upgrade()?;
4377 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4378 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4379 let abs_path = SanitizedPath::new(&abs_path);
4380 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4381 Some(ProjectPath {
4382 worktree_id: worktree.read(cx).id(),
4383 path: relative_path,
4384 })
4385 }
4386
4387 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4388 let git_store = self.git_store.upgrade()?;
4389 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4390 let abs_path = worktree_store.absolutize(path, cx)?;
4391 self.snapshot.abs_path_to_repo_path(&abs_path)
4392 }
4393
4394 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4395 other
4396 .read(cx)
4397 .snapshot
4398 .work_directory_abs_path
4399 .starts_with(&self.snapshot.work_directory_abs_path)
4400 }
4401
4402 pub fn open_commit_buffer(
4403 &mut self,
4404 languages: Option<Arc<LanguageRegistry>>,
4405 buffer_store: Entity<BufferStore>,
4406 cx: &mut Context<Self>,
4407 ) -> Task<Result<Entity<Buffer>>> {
4408 let id = self.id;
4409 if let Some(buffer) = self.commit_message_buffer.clone() {
4410 return Task::ready(Ok(buffer));
4411 }
4412 let this = cx.weak_entity();
4413
4414 let rx = self.send_job(None, move |state, mut cx| async move {
4415 let Some(this) = this.upgrade() else {
4416 bail!("git store was dropped");
4417 };
4418 match state {
4419 RepositoryState::Local(..) => {
4420 this.update(&mut cx, |_, cx| {
4421 Self::open_local_commit_buffer(languages, buffer_store, cx)
4422 })
4423 .await
4424 }
4425 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4426 let request = client.request(proto::OpenCommitMessageBuffer {
4427 project_id: project_id.0,
4428 repository_id: id.to_proto(),
4429 });
4430 let response = request.await.context("requesting to open commit buffer")?;
4431 let buffer_id = BufferId::new(response.buffer_id)?;
4432 let buffer = buffer_store
4433 .update(&mut cx, |buffer_store, cx| {
4434 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4435 })
4436 .await?;
4437 if let Some(language_registry) = languages {
4438 let git_commit_language =
4439 language_registry.language_for_name("Git Commit").await?;
4440 buffer.update(&mut cx, |buffer, cx| {
4441 buffer.set_language(Some(git_commit_language), cx);
4442 });
4443 }
4444 this.update(&mut cx, |this, _| {
4445 this.commit_message_buffer = Some(buffer.clone());
4446 });
4447 Ok(buffer)
4448 }
4449 }
4450 });
4451
4452 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4453 }
4454
4455 fn open_local_commit_buffer(
4456 language_registry: Option<Arc<LanguageRegistry>>,
4457 buffer_store: Entity<BufferStore>,
4458 cx: &mut Context<Self>,
4459 ) -> Task<Result<Entity<Buffer>>> {
4460 cx.spawn(async move |repository, cx| {
4461 let git_commit_language = match language_registry {
4462 Some(language_registry) => {
4463 Some(language_registry.language_for_name("Git Commit").await?)
4464 }
4465 None => None,
4466 };
4467 let buffer = buffer_store
4468 .update(cx, |buffer_store, cx| {
4469 buffer_store.create_buffer(git_commit_language, false, cx)
4470 })
4471 .await?;
4472
4473 repository.update(cx, |repository, _| {
4474 repository.commit_message_buffer = Some(buffer.clone());
4475 })?;
4476 Ok(buffer)
4477 })
4478 }
4479
4480 pub fn checkout_files(
4481 &mut self,
4482 commit: &str,
4483 paths: Vec<RepoPath>,
4484 cx: &mut Context<Self>,
4485 ) -> Task<Result<()>> {
4486 let commit = commit.to_string();
4487 let id = self.id;
4488
4489 self.spawn_job_with_tracking(
4490 paths.clone(),
4491 pending_op::GitStatus::Reverted,
4492 cx,
4493 async move |this, cx| {
4494 this.update(cx, |this, _cx| {
4495 this.send_job(
4496 Some(format!("git checkout {}", commit).into()),
4497 move |git_repo, _| async move {
4498 match git_repo {
4499 RepositoryState::Local(LocalRepositoryState {
4500 backend,
4501 environment,
4502 ..
4503 }) => {
4504 backend
4505 .checkout_files(commit, paths, environment.clone())
4506 .await
4507 }
4508 RepositoryState::Remote(RemoteRepositoryState {
4509 project_id,
4510 client,
4511 }) => {
4512 client
4513 .request(proto::GitCheckoutFiles {
4514 project_id: project_id.0,
4515 repository_id: id.to_proto(),
4516 commit,
4517 paths: paths
4518 .into_iter()
4519 .map(|p| p.to_proto())
4520 .collect(),
4521 })
4522 .await?;
4523
4524 Ok(())
4525 }
4526 }
4527 },
4528 )
4529 })?
4530 .await?
4531 },
4532 )
4533 }
4534
4535 pub fn reset(
4536 &mut self,
4537 commit: String,
4538 reset_mode: ResetMode,
4539 _cx: &mut App,
4540 ) -> oneshot::Receiver<Result<()>> {
4541 let id = self.id;
4542
4543 self.send_job(None, move |git_repo, _| async move {
4544 match git_repo {
4545 RepositoryState::Local(LocalRepositoryState {
4546 backend,
4547 environment,
4548 ..
4549 }) => backend.reset(commit, reset_mode, environment).await,
4550 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4551 client
4552 .request(proto::GitReset {
4553 project_id: project_id.0,
4554 repository_id: id.to_proto(),
4555 commit,
4556 mode: match reset_mode {
4557 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4558 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4559 },
4560 })
4561 .await?;
4562
4563 Ok(())
4564 }
4565 }
4566 })
4567 }
4568
4569 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4570 let id = self.id;
4571 self.send_job(None, move |git_repo, _cx| async move {
4572 match git_repo {
4573 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4574 backend.show(commit).await
4575 }
4576 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4577 let resp = client
4578 .request(proto::GitShow {
4579 project_id: project_id.0,
4580 repository_id: id.to_proto(),
4581 commit,
4582 })
4583 .await?;
4584
4585 Ok(CommitDetails {
4586 sha: resp.sha.into(),
4587 message: resp.message.into(),
4588 commit_timestamp: resp.commit_timestamp,
4589 author_email: resp.author_email.into(),
4590 author_name: resp.author_name.into(),
4591 })
4592 }
4593 }
4594 })
4595 }
4596
4597 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4598 let id = self.id;
4599 self.send_job(None, move |git_repo, cx| async move {
4600 match git_repo {
4601 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4602 backend.load_commit(commit, cx).await
4603 }
4604 RepositoryState::Remote(RemoteRepositoryState {
4605 client, project_id, ..
4606 }) => {
4607 let response = client
4608 .request(proto::LoadCommitDiff {
4609 project_id: project_id.0,
4610 repository_id: id.to_proto(),
4611 commit,
4612 })
4613 .await?;
4614 Ok(CommitDiff {
4615 files: response
4616 .files
4617 .into_iter()
4618 .map(|file| {
4619 Ok(CommitFile {
4620 path: RepoPath::from_proto(&file.path)?,
4621 old_text: file.old_text,
4622 new_text: file.new_text,
4623 is_binary: file.is_binary,
4624 })
4625 })
4626 .collect::<Result<Vec<_>>>()?,
4627 })
4628 }
4629 }
4630 })
4631 }
4632
4633 pub fn file_history(
4634 &mut self,
4635 path: RepoPath,
4636 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4637 self.file_history_paginated(path, 0, None)
4638 }
4639
4640 pub fn file_history_paginated(
4641 &mut self,
4642 path: RepoPath,
4643 skip: usize,
4644 limit: Option<usize>,
4645 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4646 let id = self.id;
4647 self.send_job(None, move |git_repo, _cx| async move {
4648 match git_repo {
4649 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4650 backend.file_history_paginated(path, skip, limit).await
4651 }
4652 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4653 let response = client
4654 .request(proto::GitFileHistory {
4655 project_id: project_id.0,
4656 repository_id: id.to_proto(),
4657 path: path.to_proto(),
4658 skip: skip as u64,
4659 limit: limit.map(|l| l as u64),
4660 })
4661 .await?;
4662 Ok(git::repository::FileHistory {
4663 entries: response
4664 .entries
4665 .into_iter()
4666 .map(|entry| git::repository::FileHistoryEntry {
4667 sha: entry.sha.into(),
4668 subject: entry.subject.into(),
4669 message: entry.message.into(),
4670 commit_timestamp: entry.commit_timestamp,
4671 author_name: entry.author_name.into(),
4672 author_email: entry.author_email.into(),
4673 })
4674 .collect(),
4675 path: RepoPath::from_proto(&response.path)?,
4676 })
4677 }
4678 }
4679 })
4680 }
4681
4682 pub fn get_graph_data(
4683 &self,
4684 log_source: LogSource,
4685 log_order: LogOrder,
4686 ) -> Option<&InitialGitGraphData> {
4687 self.initial_graph_data.get(&(log_source, log_order))
4688 }
4689
4690 pub fn search_commits(
4691 &mut self,
4692 log_source: LogSource,
4693 search_args: SearchCommitArgs,
4694 request_tx: smol::channel::Sender<Oid>,
4695 cx: &mut Context<Self>,
4696 ) {
4697 let repository_state = self.repository_state.clone();
4698
4699 cx.background_spawn(async move {
4700 let repo_state = repository_state.await;
4701
4702 match repo_state {
4703 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4704 backend
4705 .search_commits(log_source, search_args, request_tx)
4706 .await
4707 .log_err();
4708 }
4709 Ok(RepositoryState::Remote(_)) => {}
4710 Err(_) => {}
4711 };
4712 })
4713 .detach();
4714 }
4715
4716 pub fn graph_data(
4717 &mut self,
4718 log_source: LogSource,
4719 log_order: LogOrder,
4720 range: Range<usize>,
4721 cx: &mut Context<Self>,
4722 ) -> GraphDataResponse<'_> {
4723 let initial_commit_data = self
4724 .initial_graph_data
4725 .entry((log_source.clone(), log_order))
4726 .or_insert_with(|| {
4727 let state = self.repository_state.clone();
4728 let log_source = log_source.clone();
4729
4730 let fetch_task = cx.spawn(async move |repository, cx| {
4731 let state = state.await;
4732 let result = match state {
4733 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4734 Self::local_git_graph_data(
4735 repository.clone(),
4736 backend,
4737 log_source.clone(),
4738 log_order,
4739 cx,
4740 )
4741 .await
4742 }
4743 Ok(RepositoryState::Remote(_)) => {
4744 Err("Git graph is not supported for collab yet".into())
4745 }
4746 Err(e) => Err(SharedString::from(e)),
4747 };
4748
4749 if let Err(fetch_task_error) = result {
4750 repository
4751 .update(cx, |repository, _| {
4752 if let Some(data) = repository
4753 .initial_graph_data
4754 .get_mut(&(log_source, log_order))
4755 {
4756 data.error = Some(fetch_task_error);
4757 } else {
4758 debug_panic!(
4759 "This task would be dropped if this entry doesn't exist"
4760 );
4761 }
4762 })
4763 .ok();
4764 }
4765 });
4766
4767 InitialGitGraphData {
4768 fetch_task,
4769 error: None,
4770 commit_data: Vec::new(),
4771 commit_oid_to_index: HashMap::default(),
4772 }
4773 });
4774
4775 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4776 let max_end = initial_commit_data.commit_data.len();
4777
4778 GraphDataResponse {
4779 commits: &initial_commit_data.commit_data
4780 [range.start.min(max_start)..range.end.min(max_end)],
4781 is_loading: !initial_commit_data.fetch_task.is_ready(),
4782 error: initial_commit_data.error.clone(),
4783 }
4784 }
4785
4786 async fn local_git_graph_data(
4787 this: WeakEntity<Self>,
4788 backend: Arc<dyn GitRepository>,
4789 log_source: LogSource,
4790 log_order: LogOrder,
4791 cx: &mut AsyncApp,
4792 ) -> Result<(), SharedString> {
4793 let (request_tx, request_rx) =
4794 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4795
4796 let task = cx.background_executor().spawn({
4797 let log_source = log_source.clone();
4798 async move {
4799 backend
4800 .initial_graph_data(log_source, log_order, request_tx)
4801 .await
4802 .map_err(|err| SharedString::from(err.to_string()))
4803 }
4804 });
4805
4806 let graph_data_key = (log_source, log_order);
4807
4808 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4809 this.update(cx, |repository, cx| {
4810 let graph_data = repository
4811 .initial_graph_data
4812 .entry(graph_data_key.clone())
4813 .and_modify(|graph_data| {
4814 for commit_data in initial_graph_commit_data {
4815 graph_data
4816 .commit_oid_to_index
4817 .insert(commit_data.sha, graph_data.commit_data.len());
4818 graph_data.commit_data.push(commit_data);
4819 }
4820 cx.emit(RepositoryEvent::GraphEvent(
4821 graph_data_key.clone(),
4822 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4823 ));
4824 });
4825
4826 match &graph_data {
4827 Entry::Occupied(_) => {}
4828 Entry::Vacant(_) => {
4829 debug_panic!("This task should be dropped if data doesn't exist");
4830 }
4831 }
4832 })
4833 .ok();
4834 }
4835
4836 task.await?;
4837 Ok(())
4838 }
4839
4840 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4841 if !self.commit_data.contains_key(&sha) {
4842 match &self.graph_commit_data_handler {
4843 GraphCommitHandlerState::Open(handler) => {
4844 if handler.commit_data_request.try_send(sha).is_ok() {
4845 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4846 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4847 }
4848 }
4849 GraphCommitHandlerState::Closed => {
4850 self.open_graph_commit_data_handler(cx);
4851 }
4852 GraphCommitHandlerState::Starting => {}
4853 }
4854 }
4855
4856 self.commit_data
4857 .get(&sha)
4858 .unwrap_or(&CommitDataState::Loading)
4859 }
4860
4861 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4862 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4863
4864 let state = self.repository_state.clone();
4865 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4866 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4867
4868 let foreground_task = cx.spawn(async move |this, cx| {
4869 while let Ok((sha, commit_data)) = result_rx.recv().await {
4870 let result = this.update(cx, |this, cx| {
4871 let old_value = this
4872 .commit_data
4873 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4874 debug_assert!(
4875 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4876 "We should never overwrite commit data"
4877 );
4878
4879 cx.notify();
4880 });
4881 if result.is_err() {
4882 break;
4883 }
4884 }
4885
4886 this.update(cx, |this, _cx| {
4887 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4888 })
4889 .ok();
4890 });
4891
4892 let request_tx_for_handler = request_tx;
4893 let background_executor = cx.background_executor().clone();
4894
4895 cx.background_spawn(async move {
4896 let backend = match state.await {
4897 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4898 Ok(RepositoryState::Remote(_)) => {
4899 log::error!("commit_data_reader not supported for remote repositories");
4900 return;
4901 }
4902 Err(error) => {
4903 log::error!("failed to get repository state: {error}");
4904 return;
4905 }
4906 };
4907
4908 let reader = match backend.commit_data_reader() {
4909 Ok(reader) => reader,
4910 Err(error) => {
4911 log::error!("failed to create commit data reader: {error:?}");
4912 return;
4913 }
4914 };
4915
4916 loop {
4917 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4918
4919 futures::select_biased! {
4920 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4921 let Ok(sha) = sha else {
4922 break;
4923 };
4924
4925 match reader.read(sha).await {
4926 Ok(commit_data) => {
4927 if result_tx.send((sha, commit_data)).await.is_err() {
4928 break;
4929 }
4930 }
4931 Err(error) => {
4932 log::error!("failed to read commit data for {sha}: {error:?}");
4933 }
4934 }
4935 }
4936 _ = futures::FutureExt::fuse(timeout) => {
4937 break;
4938 }
4939 }
4940 }
4941
4942 drop(result_tx);
4943 })
4944 .detach();
4945
4946 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4947 _task: foreground_task,
4948 commit_data_request: request_tx_for_handler,
4949 });
4950 }
4951
4952 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4953 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4954 }
4955
4956 fn save_buffers<'a>(
4957 &self,
4958 entries: impl IntoIterator<Item = &'a RepoPath>,
4959 cx: &mut Context<Self>,
4960 ) -> Vec<Task<anyhow::Result<()>>> {
4961 let mut save_futures = Vec::new();
4962 if let Some(buffer_store) = self.buffer_store(cx) {
4963 buffer_store.update(cx, |buffer_store, cx| {
4964 for path in entries {
4965 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4966 continue;
4967 };
4968 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4969 && buffer
4970 .read(cx)
4971 .file()
4972 .is_some_and(|file| file.disk_state().exists())
4973 && buffer.read(cx).has_unsaved_edits()
4974 {
4975 save_futures.push(buffer_store.save_buffer(buffer, cx));
4976 }
4977 }
4978 })
4979 }
4980 save_futures
4981 }
4982
4983 pub fn stage_entries(
4984 &mut self,
4985 entries: Vec<RepoPath>,
4986 cx: &mut Context<Self>,
4987 ) -> Task<anyhow::Result<()>> {
4988 self.stage_or_unstage_entries(true, entries, cx)
4989 }
4990
4991 pub fn unstage_entries(
4992 &mut self,
4993 entries: Vec<RepoPath>,
4994 cx: &mut Context<Self>,
4995 ) -> Task<anyhow::Result<()>> {
4996 self.stage_or_unstage_entries(false, entries, cx)
4997 }
4998
4999 fn stage_or_unstage_entries(
5000 &mut self,
5001 stage: bool,
5002 entries: Vec<RepoPath>,
5003 cx: &mut Context<Self>,
5004 ) -> Task<anyhow::Result<()>> {
5005 if entries.is_empty() {
5006 return Task::ready(Ok(()));
5007 }
5008 let Some(git_store) = self.git_store.upgrade() else {
5009 return Task::ready(Ok(()));
5010 };
5011 let id = self.id;
5012 let save_tasks = self.save_buffers(&entries, cx);
5013 let paths = entries
5014 .iter()
5015 .map(|p| p.as_unix_str())
5016 .collect::<Vec<_>>()
5017 .join(" ");
5018 let status = if stage {
5019 format!("git add {paths}")
5020 } else {
5021 format!("git reset {paths}")
5022 };
5023 let job_key = GitJobKey::WriteIndex(entries.clone());
5024
5025 self.spawn_job_with_tracking(
5026 entries.clone(),
5027 if stage {
5028 pending_op::GitStatus::Staged
5029 } else {
5030 pending_op::GitStatus::Unstaged
5031 },
5032 cx,
5033 async move |this, cx| {
5034 for save_task in save_tasks {
5035 save_task.await?;
5036 }
5037
5038 this.update(cx, |this, cx| {
5039 let weak_this = cx.weak_entity();
5040 this.send_keyed_job(
5041 Some(job_key),
5042 Some(status.into()),
5043 move |git_repo, mut cx| async move {
5044 let hunk_staging_operation_counts = weak_this
5045 .update(&mut cx, |this, cx| {
5046 let mut hunk_staging_operation_counts = HashMap::default();
5047 for path in &entries {
5048 let Some(project_path) =
5049 this.repo_path_to_project_path(path, cx)
5050 else {
5051 continue;
5052 };
5053 let Some(buffer) = git_store
5054 .read(cx)
5055 .buffer_store
5056 .read(cx)
5057 .get_by_path(&project_path)
5058 else {
5059 continue;
5060 };
5061 let Some(diff_state) = git_store
5062 .read(cx)
5063 .diffs
5064 .get(&buffer.read(cx).remote_id())
5065 .cloned()
5066 else {
5067 continue;
5068 };
5069 let Some(uncommitted_diff) =
5070 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5071 |uncommitted_diff| uncommitted_diff.upgrade(),
5072 )
5073 else {
5074 continue;
5075 };
5076 let buffer_snapshot = buffer.read(cx).text_snapshot();
5077 let file_exists = buffer
5078 .read(cx)
5079 .file()
5080 .is_some_and(|file| file.disk_state().exists());
5081 let hunk_staging_operation_count =
5082 diff_state.update(cx, |diff_state, cx| {
5083 uncommitted_diff.update(
5084 cx,
5085 |uncommitted_diff, cx| {
5086 uncommitted_diff
5087 .stage_or_unstage_all_hunks(
5088 stage,
5089 &buffer_snapshot,
5090 file_exists,
5091 cx,
5092 );
5093 },
5094 );
5095
5096 diff_state.hunk_staging_operation_count += 1;
5097 diff_state.hunk_staging_operation_count
5098 });
5099 hunk_staging_operation_counts.insert(
5100 diff_state.downgrade(),
5101 hunk_staging_operation_count,
5102 );
5103 }
5104 hunk_staging_operation_counts
5105 })
5106 .unwrap_or_default();
5107
5108 let result = match git_repo {
5109 RepositoryState::Local(LocalRepositoryState {
5110 backend,
5111 environment,
5112 ..
5113 }) => {
5114 if stage {
5115 backend.stage_paths(entries, environment.clone()).await
5116 } else {
5117 backend.unstage_paths(entries, environment.clone()).await
5118 }
5119 }
5120 RepositoryState::Remote(RemoteRepositoryState {
5121 project_id,
5122 client,
5123 }) => {
5124 if stage {
5125 client
5126 .request(proto::Stage {
5127 project_id: project_id.0,
5128 repository_id: id.to_proto(),
5129 paths: entries
5130 .into_iter()
5131 .map(|repo_path| repo_path.to_proto())
5132 .collect(),
5133 })
5134 .await
5135 .context("sending stage request")
5136 .map(|_| ())
5137 } else {
5138 client
5139 .request(proto::Unstage {
5140 project_id: project_id.0,
5141 repository_id: id.to_proto(),
5142 paths: entries
5143 .into_iter()
5144 .map(|repo_path| repo_path.to_proto())
5145 .collect(),
5146 })
5147 .await
5148 .context("sending unstage request")
5149 .map(|_| ())
5150 }
5151 }
5152 };
5153
5154 for (diff_state, hunk_staging_operation_count) in
5155 hunk_staging_operation_counts
5156 {
5157 diff_state
5158 .update(&mut cx, |diff_state, cx| {
5159 if result.is_ok() {
5160 diff_state.hunk_staging_operation_count_as_of_write =
5161 hunk_staging_operation_count;
5162 } else if let Some(uncommitted_diff) =
5163 &diff_state.uncommitted_diff
5164 {
5165 uncommitted_diff
5166 .update(cx, |uncommitted_diff, cx| {
5167 uncommitted_diff.clear_pending_hunks(cx);
5168 })
5169 .ok();
5170 }
5171 })
5172 .ok();
5173 }
5174
5175 result
5176 },
5177 )
5178 })?
5179 .await?
5180 },
5181 )
5182 }
5183
5184 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5185 let snapshot = self.snapshot.clone();
5186 let pending_ops = self.pending_ops.clone();
5187 let to_stage = cx.background_spawn(async move {
5188 snapshot
5189 .status()
5190 .filter_map(|entry| {
5191 if let Some(ops) =
5192 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5193 {
5194 if ops.staging() || ops.staged() {
5195 None
5196 } else {
5197 Some(entry.repo_path)
5198 }
5199 } else if entry.status.staging().is_fully_staged() {
5200 None
5201 } else {
5202 Some(entry.repo_path)
5203 }
5204 })
5205 .collect()
5206 });
5207
5208 cx.spawn(async move |this, cx| {
5209 let to_stage = to_stage.await;
5210 this.update(cx, |this, cx| {
5211 this.stage_or_unstage_entries(true, to_stage, cx)
5212 })?
5213 .await
5214 })
5215 }
5216
5217 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5218 let snapshot = self.snapshot.clone();
5219 let pending_ops = self.pending_ops.clone();
5220 let to_unstage = cx.background_spawn(async move {
5221 snapshot
5222 .status()
5223 .filter_map(|entry| {
5224 if let Some(ops) =
5225 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5226 {
5227 if !ops.staging() && !ops.staged() {
5228 None
5229 } else {
5230 Some(entry.repo_path)
5231 }
5232 } else if entry.status.staging().is_fully_unstaged() {
5233 None
5234 } else {
5235 Some(entry.repo_path)
5236 }
5237 })
5238 .collect()
5239 });
5240
5241 cx.spawn(async move |this, cx| {
5242 let to_unstage = to_unstage.await;
5243 this.update(cx, |this, cx| {
5244 this.stage_or_unstage_entries(false, to_unstage, cx)
5245 })?
5246 .await
5247 })
5248 }
5249
5250 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5251 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5252
5253 self.stash_entries(to_stash, cx)
5254 }
5255
5256 pub fn stash_entries(
5257 &mut self,
5258 entries: Vec<RepoPath>,
5259 cx: &mut Context<Self>,
5260 ) -> Task<anyhow::Result<()>> {
5261 let id = self.id;
5262
5263 cx.spawn(async move |this, cx| {
5264 this.update(cx, |this, _| {
5265 this.send_job(None, move |git_repo, _cx| async move {
5266 match git_repo {
5267 RepositoryState::Local(LocalRepositoryState {
5268 backend,
5269 environment,
5270 ..
5271 }) => backend.stash_paths(entries, environment).await,
5272 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5273 client
5274 .request(proto::Stash {
5275 project_id: project_id.0,
5276 repository_id: id.to_proto(),
5277 paths: entries
5278 .into_iter()
5279 .map(|repo_path| repo_path.to_proto())
5280 .collect(),
5281 })
5282 .await?;
5283 Ok(())
5284 }
5285 }
5286 })
5287 })?
5288 .await??;
5289 Ok(())
5290 })
5291 }
5292
5293 pub fn stash_pop(
5294 &mut self,
5295 index: Option<usize>,
5296 cx: &mut Context<Self>,
5297 ) -> Task<anyhow::Result<()>> {
5298 let id = self.id;
5299 cx.spawn(async move |this, cx| {
5300 this.update(cx, |this, _| {
5301 this.send_job(None, move |git_repo, _cx| async move {
5302 match git_repo {
5303 RepositoryState::Local(LocalRepositoryState {
5304 backend,
5305 environment,
5306 ..
5307 }) => backend.stash_pop(index, environment).await,
5308 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5309 client
5310 .request(proto::StashPop {
5311 project_id: project_id.0,
5312 repository_id: id.to_proto(),
5313 stash_index: index.map(|i| i as u64),
5314 })
5315 .await
5316 .context("sending stash pop request")?;
5317 Ok(())
5318 }
5319 }
5320 })
5321 })?
5322 .await??;
5323 Ok(())
5324 })
5325 }
5326
5327 pub fn stash_apply(
5328 &mut self,
5329 index: Option<usize>,
5330 cx: &mut Context<Self>,
5331 ) -> Task<anyhow::Result<()>> {
5332 let id = self.id;
5333 cx.spawn(async move |this, cx| {
5334 this.update(cx, |this, _| {
5335 this.send_job(None, move |git_repo, _cx| async move {
5336 match git_repo {
5337 RepositoryState::Local(LocalRepositoryState {
5338 backend,
5339 environment,
5340 ..
5341 }) => backend.stash_apply(index, environment).await,
5342 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5343 client
5344 .request(proto::StashApply {
5345 project_id: project_id.0,
5346 repository_id: id.to_proto(),
5347 stash_index: index.map(|i| i as u64),
5348 })
5349 .await
5350 .context("sending stash apply request")?;
5351 Ok(())
5352 }
5353 }
5354 })
5355 })?
5356 .await??;
5357 Ok(())
5358 })
5359 }
5360
5361 pub fn stash_drop(
5362 &mut self,
5363 index: Option<usize>,
5364 cx: &mut Context<Self>,
5365 ) -> oneshot::Receiver<anyhow::Result<()>> {
5366 let id = self.id;
5367 let updates_tx = self
5368 .git_store()
5369 .and_then(|git_store| match &git_store.read(cx).state {
5370 GitStoreState::Local { downstream, .. } => downstream
5371 .as_ref()
5372 .map(|downstream| downstream.updates_tx.clone()),
5373 _ => None,
5374 });
5375 let this = cx.weak_entity();
5376 self.send_job(None, move |git_repo, mut cx| async move {
5377 match git_repo {
5378 RepositoryState::Local(LocalRepositoryState {
5379 backend,
5380 environment,
5381 ..
5382 }) => {
5383 // TODO would be nice to not have to do this manually
5384 let result = backend.stash_drop(index, environment).await;
5385 if result.is_ok()
5386 && let Ok(stash_entries) = backend.stash_entries().await
5387 {
5388 let snapshot = this.update(&mut cx, |this, cx| {
5389 this.snapshot.stash_entries = stash_entries;
5390 cx.emit(RepositoryEvent::StashEntriesChanged);
5391 this.snapshot.clone()
5392 })?;
5393 if let Some(updates_tx) = updates_tx {
5394 updates_tx
5395 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5396 .ok();
5397 }
5398 }
5399
5400 result
5401 }
5402 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5403 client
5404 .request(proto::StashDrop {
5405 project_id: project_id.0,
5406 repository_id: id.to_proto(),
5407 stash_index: index.map(|i| i as u64),
5408 })
5409 .await
5410 .context("sending stash pop request")?;
5411 Ok(())
5412 }
5413 }
5414 })
5415 }
5416
5417 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5418 let id = self.id;
5419 self.send_job(
5420 Some(format!("git hook {}", hook.as_str()).into()),
5421 move |git_repo, _cx| async move {
5422 match git_repo {
5423 RepositoryState::Local(LocalRepositoryState {
5424 backend,
5425 environment,
5426 ..
5427 }) => backend.run_hook(hook, environment.clone()).await,
5428 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5429 client
5430 .request(proto::RunGitHook {
5431 project_id: project_id.0,
5432 repository_id: id.to_proto(),
5433 hook: hook.to_proto(),
5434 })
5435 .await?;
5436
5437 Ok(())
5438 }
5439 }
5440 },
5441 )
5442 }
5443
5444 pub fn commit(
5445 &mut self,
5446 message: SharedString,
5447 name_and_email: Option<(SharedString, SharedString)>,
5448 options: CommitOptions,
5449 askpass: AskPassDelegate,
5450 cx: &mut App,
5451 ) -> oneshot::Receiver<Result<()>> {
5452 let id = self.id;
5453 let askpass_delegates = self.askpass_delegates.clone();
5454 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5455
5456 let rx = self.run_hook(RunHook::PreCommit, cx);
5457
5458 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5459 rx.await??;
5460
5461 match git_repo {
5462 RepositoryState::Local(LocalRepositoryState {
5463 backend,
5464 environment,
5465 ..
5466 }) => {
5467 backend
5468 .commit(message, name_and_email, options, askpass, environment)
5469 .await
5470 }
5471 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5472 askpass_delegates.lock().insert(askpass_id, askpass);
5473 let _defer = util::defer(|| {
5474 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5475 debug_assert!(askpass_delegate.is_some());
5476 });
5477 let (name, email) = name_and_email.unzip();
5478 client
5479 .request(proto::Commit {
5480 project_id: project_id.0,
5481 repository_id: id.to_proto(),
5482 message: String::from(message),
5483 name: name.map(String::from),
5484 email: email.map(String::from),
5485 options: Some(proto::commit::CommitOptions {
5486 amend: options.amend,
5487 signoff: options.signoff,
5488 allow_empty: options.allow_empty,
5489 }),
5490 askpass_id,
5491 })
5492 .await?;
5493
5494 Ok(())
5495 }
5496 }
5497 })
5498 }
5499
5500 pub fn fetch(
5501 &mut self,
5502 fetch_options: FetchOptions,
5503 askpass: AskPassDelegate,
5504 _cx: &mut App,
5505 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5506 let askpass_delegates = self.askpass_delegates.clone();
5507 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5508 let id = self.id;
5509
5510 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5511 match git_repo {
5512 RepositoryState::Local(LocalRepositoryState {
5513 backend,
5514 environment,
5515 ..
5516 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5517 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5518 askpass_delegates.lock().insert(askpass_id, askpass);
5519 let _defer = util::defer(|| {
5520 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5521 debug_assert!(askpass_delegate.is_some());
5522 });
5523
5524 let response = client
5525 .request(proto::Fetch {
5526 project_id: project_id.0,
5527 repository_id: id.to_proto(),
5528 askpass_id,
5529 remote: fetch_options.to_proto(),
5530 })
5531 .await?;
5532
5533 Ok(RemoteCommandOutput {
5534 stdout: response.stdout,
5535 stderr: response.stderr,
5536 })
5537 }
5538 }
5539 })
5540 }
5541
5542 pub fn push(
5543 &mut self,
5544 branch: SharedString,
5545 remote_branch: SharedString,
5546 remote: SharedString,
5547 options: Option<PushOptions>,
5548 askpass: AskPassDelegate,
5549 cx: &mut Context<Self>,
5550 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5551 let askpass_delegates = self.askpass_delegates.clone();
5552 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5553 let id = self.id;
5554
5555 let args = options
5556 .map(|option| match option {
5557 PushOptions::SetUpstream => " --set-upstream",
5558 PushOptions::Force => " --force-with-lease",
5559 })
5560 .unwrap_or("");
5561
5562 let updates_tx = self
5563 .git_store()
5564 .and_then(|git_store| match &git_store.read(cx).state {
5565 GitStoreState::Local { downstream, .. } => downstream
5566 .as_ref()
5567 .map(|downstream| downstream.updates_tx.clone()),
5568 _ => None,
5569 });
5570
5571 let this = cx.weak_entity();
5572 self.send_job(
5573 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5574 move |git_repo, mut cx| async move {
5575 match git_repo {
5576 RepositoryState::Local(LocalRepositoryState {
5577 backend,
5578 environment,
5579 ..
5580 }) => {
5581 let result = backend
5582 .push(
5583 branch.to_string(),
5584 remote_branch.to_string(),
5585 remote.to_string(),
5586 options,
5587 askpass,
5588 environment.clone(),
5589 cx.clone(),
5590 )
5591 .await;
5592 // TODO would be nice to not have to do this manually
5593 if result.is_ok() {
5594 let branches = backend.branches().await?;
5595 let branch = branches.into_iter().find(|branch| branch.is_head);
5596 log::info!("head branch after scan is {branch:?}");
5597 let snapshot = this.update(&mut cx, |this, cx| {
5598 this.snapshot.branch = branch;
5599 cx.emit(RepositoryEvent::BranchChanged);
5600 this.snapshot.clone()
5601 })?;
5602 if let Some(updates_tx) = updates_tx {
5603 updates_tx
5604 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5605 .ok();
5606 }
5607 }
5608 result
5609 }
5610 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5611 askpass_delegates.lock().insert(askpass_id, askpass);
5612 let _defer = util::defer(|| {
5613 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5614 debug_assert!(askpass_delegate.is_some());
5615 });
5616 let response = client
5617 .request(proto::Push {
5618 project_id: project_id.0,
5619 repository_id: id.to_proto(),
5620 askpass_id,
5621 branch_name: branch.to_string(),
5622 remote_branch_name: remote_branch.to_string(),
5623 remote_name: remote.to_string(),
5624 options: options.map(|options| match options {
5625 PushOptions::Force => proto::push::PushOptions::Force,
5626 PushOptions::SetUpstream => {
5627 proto::push::PushOptions::SetUpstream
5628 }
5629 }
5630 as i32),
5631 })
5632 .await?;
5633
5634 Ok(RemoteCommandOutput {
5635 stdout: response.stdout,
5636 stderr: response.stderr,
5637 })
5638 }
5639 }
5640 },
5641 )
5642 }
5643
5644 pub fn pull(
5645 &mut self,
5646 branch: Option<SharedString>,
5647 remote: SharedString,
5648 rebase: bool,
5649 askpass: AskPassDelegate,
5650 _cx: &mut App,
5651 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5652 let askpass_delegates = self.askpass_delegates.clone();
5653 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5654 let id = self.id;
5655
5656 let mut status = "git pull".to_string();
5657 if rebase {
5658 status.push_str(" --rebase");
5659 }
5660 status.push_str(&format!(" {}", remote));
5661 if let Some(b) = &branch {
5662 status.push_str(&format!(" {}", b));
5663 }
5664
5665 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5666 match git_repo {
5667 RepositoryState::Local(LocalRepositoryState {
5668 backend,
5669 environment,
5670 ..
5671 }) => {
5672 backend
5673 .pull(
5674 branch.as_ref().map(|b| b.to_string()),
5675 remote.to_string(),
5676 rebase,
5677 askpass,
5678 environment.clone(),
5679 cx,
5680 )
5681 .await
5682 }
5683 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5684 askpass_delegates.lock().insert(askpass_id, askpass);
5685 let _defer = util::defer(|| {
5686 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5687 debug_assert!(askpass_delegate.is_some());
5688 });
5689 let response = client
5690 .request(proto::Pull {
5691 project_id: project_id.0,
5692 repository_id: id.to_proto(),
5693 askpass_id,
5694 rebase,
5695 branch_name: branch.as_ref().map(|b| b.to_string()),
5696 remote_name: remote.to_string(),
5697 })
5698 .await?;
5699
5700 Ok(RemoteCommandOutput {
5701 stdout: response.stdout,
5702 stderr: response.stderr,
5703 })
5704 }
5705 }
5706 })
5707 }
5708
5709 fn spawn_set_index_text_job(
5710 &mut self,
5711 path: RepoPath,
5712 content: Option<String>,
5713 hunk_staging_operation_count: Option<usize>,
5714 cx: &mut Context<Self>,
5715 ) -> oneshot::Receiver<anyhow::Result<()>> {
5716 let id = self.id;
5717 let this = cx.weak_entity();
5718 let git_store = self.git_store.clone();
5719 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5720 self.send_keyed_job(
5721 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5722 None,
5723 move |git_repo, mut cx| async move {
5724 log::debug!(
5725 "start updating index text for buffer {}",
5726 path.as_unix_str()
5727 );
5728
5729 match git_repo {
5730 RepositoryState::Local(LocalRepositoryState {
5731 fs,
5732 backend,
5733 environment,
5734 ..
5735 }) => {
5736 let executable = match fs.metadata(&abs_path).await {
5737 Ok(Some(meta)) => meta.is_executable,
5738 Ok(None) => false,
5739 Err(_err) => false,
5740 };
5741 backend
5742 .set_index_text(path.clone(), content, environment.clone(), executable)
5743 .await?;
5744 }
5745 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5746 client
5747 .request(proto::SetIndexText {
5748 project_id: project_id.0,
5749 repository_id: id.to_proto(),
5750 path: path.to_proto(),
5751 text: content,
5752 })
5753 .await?;
5754 }
5755 }
5756 log::debug!(
5757 "finish updating index text for buffer {}",
5758 path.as_unix_str()
5759 );
5760
5761 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5762 let project_path = this
5763 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5764 .ok()
5765 .flatten();
5766 git_store
5767 .update(&mut cx, |git_store, cx| {
5768 let buffer_id = git_store
5769 .buffer_store
5770 .read(cx)
5771 .get_by_path(&project_path?)?
5772 .read(cx)
5773 .remote_id();
5774 let diff_state = git_store.diffs.get(&buffer_id)?;
5775 diff_state.update(cx, |diff_state, _| {
5776 diff_state.hunk_staging_operation_count_as_of_write =
5777 hunk_staging_operation_count;
5778 });
5779 Some(())
5780 })
5781 .context("Git store dropped")?;
5782 }
5783 Ok(())
5784 },
5785 )
5786 }
5787
5788 pub fn create_remote(
5789 &mut self,
5790 remote_name: String,
5791 remote_url: String,
5792 ) -> oneshot::Receiver<Result<()>> {
5793 let id = self.id;
5794 self.send_job(
5795 Some(format!("git remote add {remote_name} {remote_url}").into()),
5796 move |repo, _cx| async move {
5797 match repo {
5798 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5799 backend.create_remote(remote_name, remote_url).await
5800 }
5801 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5802 client
5803 .request(proto::GitCreateRemote {
5804 project_id: project_id.0,
5805 repository_id: id.to_proto(),
5806 remote_name,
5807 remote_url,
5808 })
5809 .await?;
5810
5811 Ok(())
5812 }
5813 }
5814 },
5815 )
5816 }
5817
5818 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5819 let id = self.id;
5820 self.send_job(
5821 Some(format!("git remove remote {remote_name}").into()),
5822 move |repo, _cx| async move {
5823 match repo {
5824 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5825 backend.remove_remote(remote_name).await
5826 }
5827 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5828 client
5829 .request(proto::GitRemoveRemote {
5830 project_id: project_id.0,
5831 repository_id: id.to_proto(),
5832 remote_name,
5833 })
5834 .await?;
5835
5836 Ok(())
5837 }
5838 }
5839 },
5840 )
5841 }
5842
5843 pub fn get_remotes(
5844 &mut self,
5845 branch_name: Option<String>,
5846 is_push: bool,
5847 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5848 let id = self.id;
5849 self.send_job(None, move |repo, _cx| async move {
5850 match repo {
5851 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5852 let remote = if let Some(branch_name) = branch_name {
5853 if is_push {
5854 backend.get_push_remote(branch_name).await?
5855 } else {
5856 backend.get_branch_remote(branch_name).await?
5857 }
5858 } else {
5859 None
5860 };
5861
5862 match remote {
5863 Some(remote) => Ok(vec![remote]),
5864 None => backend.get_all_remotes().await,
5865 }
5866 }
5867 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5868 let response = client
5869 .request(proto::GetRemotes {
5870 project_id: project_id.0,
5871 repository_id: id.to_proto(),
5872 branch_name,
5873 is_push,
5874 })
5875 .await?;
5876
5877 let remotes = response
5878 .remotes
5879 .into_iter()
5880 .map(|remotes| Remote {
5881 name: remotes.name.into(),
5882 })
5883 .collect();
5884
5885 Ok(remotes)
5886 }
5887 }
5888 })
5889 }
5890
5891 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5892 let id = self.id;
5893 self.send_job(None, move |repo, _| async move {
5894 match repo {
5895 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5896 backend.branches().await
5897 }
5898 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5899 let response = client
5900 .request(proto::GitGetBranches {
5901 project_id: project_id.0,
5902 repository_id: id.to_proto(),
5903 })
5904 .await?;
5905
5906 let branches = response
5907 .branches
5908 .into_iter()
5909 .map(|branch| proto_to_branch(&branch))
5910 .collect();
5911
5912 Ok(branches)
5913 }
5914 }
5915 })
5916 }
5917
5918 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5919 /// returns the pathed for the linked worktree.
5920 ///
5921 /// Returns None if this is the main checkout.
5922 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5923 if self.work_directory_abs_path != self.original_repo_abs_path {
5924 Some(&self.work_directory_abs_path)
5925 } else {
5926 None
5927 }
5928 }
5929
5930 pub fn path_for_new_linked_worktree(
5931 &self,
5932 branch_name: &str,
5933 worktree_directory_setting: &str,
5934 ) -> Result<PathBuf> {
5935 let original_repo = self.original_repo_abs_path.clone();
5936 let project_name = original_repo
5937 .file_name()
5938 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5939 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5940 Ok(directory.join(branch_name).join(project_name))
5941 }
5942
5943 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5944 let id = self.id;
5945 self.send_job(None, move |repo, _| async move {
5946 match repo {
5947 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5948 backend.worktrees().await
5949 }
5950 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5951 let response = client
5952 .request(proto::GitGetWorktrees {
5953 project_id: project_id.0,
5954 repository_id: id.to_proto(),
5955 })
5956 .await?;
5957
5958 let worktrees = response
5959 .worktrees
5960 .into_iter()
5961 .map(|worktree| proto_to_worktree(&worktree))
5962 .collect();
5963
5964 Ok(worktrees)
5965 }
5966 }
5967 })
5968 }
5969
5970 pub fn create_worktree(
5971 &mut self,
5972 branch_name: String,
5973 path: PathBuf,
5974 commit: Option<String>,
5975 ) -> oneshot::Receiver<Result<()>> {
5976 let id = self.id;
5977 self.send_job(
5978 Some(format!("git worktree add: {}", branch_name).into()),
5979 move |repo, _cx| async move {
5980 match repo {
5981 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5982 backend
5983 .create_worktree(Some(branch_name), path, commit)
5984 .await
5985 }
5986 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5987 client
5988 .request(proto::GitCreateWorktree {
5989 project_id: project_id.0,
5990 repository_id: id.to_proto(),
5991 name: branch_name,
5992 directory: path.to_string_lossy().to_string(),
5993 commit,
5994 })
5995 .await?;
5996
5997 Ok(())
5998 }
5999 }
6000 },
6001 )
6002 }
6003
6004 pub fn create_worktree_detached(
6005 &mut self,
6006 path: PathBuf,
6007 commit: String,
6008 ) -> oneshot::Receiver<Result<()>> {
6009 self.send_job(
6010 Some("git worktree add (detached)".into()),
6011 move |repo, _cx| async move {
6012 match repo {
6013 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6014 backend.create_worktree(None, path, Some(commit)).await
6015 }
6016 RepositoryState::Remote(_) => {
6017 anyhow::bail!(
6018 "create_worktree_detached is not supported for remote repositories"
6019 )
6020 }
6021 }
6022 },
6023 )
6024 }
6025
6026 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6027 self.send_job(None, move |repo, _cx| async move {
6028 match repo {
6029 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6030 Ok(backend.head_sha().await)
6031 }
6032 RepositoryState::Remote(_) => {
6033 anyhow::bail!("head_sha is not supported for remote repositories")
6034 }
6035 }
6036 })
6037 }
6038
6039 pub fn update_ref(
6040 &mut self,
6041 ref_name: String,
6042 commit: String,
6043 ) -> oneshot::Receiver<Result<()>> {
6044 self.send_job(None, move |repo, _cx| async move {
6045 match repo {
6046 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6047 backend.update_ref(ref_name, commit).await
6048 }
6049 RepositoryState::Remote(_) => {
6050 anyhow::bail!("update_ref is not supported for remote repositories")
6051 }
6052 }
6053 })
6054 }
6055
6056 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6057 self.send_job(None, move |repo, _cx| async move {
6058 match repo {
6059 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6060 backend.delete_ref(ref_name).await
6061 }
6062 RepositoryState::Remote(_) => {
6063 anyhow::bail!("delete_ref is not supported for remote repositories")
6064 }
6065 }
6066 })
6067 }
6068
6069 pub fn stage_all_including_untracked(&mut self) -> oneshot::Receiver<Result<()>> {
6070 self.send_job(None, move |repo, _cx| async move {
6071 match repo {
6072 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6073 backend.stage_all_including_untracked().await
6074 }
6075 RepositoryState::Remote(_) => {
6076 anyhow::bail!(
6077 "stage_all_including_untracked is not supported for remote repositories"
6078 )
6079 }
6080 }
6081 })
6082 }
6083
6084 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6085 let id = self.id;
6086 self.send_job(
6087 Some(format!("git worktree remove: {}", path.display()).into()),
6088 move |repo, _cx| async move {
6089 match repo {
6090 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6091 backend.remove_worktree(path, force).await
6092 }
6093 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6094 client
6095 .request(proto::GitRemoveWorktree {
6096 project_id: project_id.0,
6097 repository_id: id.to_proto(),
6098 path: path.to_string_lossy().to_string(),
6099 force,
6100 })
6101 .await?;
6102
6103 Ok(())
6104 }
6105 }
6106 },
6107 )
6108 }
6109
6110 pub fn rename_worktree(
6111 &mut self,
6112 old_path: PathBuf,
6113 new_path: PathBuf,
6114 ) -> oneshot::Receiver<Result<()>> {
6115 let id = self.id;
6116 self.send_job(
6117 Some(format!("git worktree move: {}", old_path.display()).into()),
6118 move |repo, _cx| async move {
6119 match repo {
6120 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6121 backend.rename_worktree(old_path, new_path).await
6122 }
6123 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6124 client
6125 .request(proto::GitRenameWorktree {
6126 project_id: project_id.0,
6127 repository_id: id.to_proto(),
6128 old_path: old_path.to_string_lossy().to_string(),
6129 new_path: new_path.to_string_lossy().to_string(),
6130 })
6131 .await?;
6132
6133 Ok(())
6134 }
6135 }
6136 },
6137 )
6138 }
6139
6140 pub fn default_branch(
6141 &mut self,
6142 include_remote_name: bool,
6143 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6144 let id = self.id;
6145 self.send_job(None, move |repo, _| async move {
6146 match repo {
6147 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6148 backend.default_branch(include_remote_name).await
6149 }
6150 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6151 let response = client
6152 .request(proto::GetDefaultBranch {
6153 project_id: project_id.0,
6154 repository_id: id.to_proto(),
6155 })
6156 .await?;
6157
6158 anyhow::Ok(response.branch.map(SharedString::from))
6159 }
6160 }
6161 })
6162 }
6163
6164 pub fn diff_tree(
6165 &mut self,
6166 diff_type: DiffTreeType,
6167 _cx: &App,
6168 ) -> oneshot::Receiver<Result<TreeDiff>> {
6169 let repository_id = self.snapshot.id;
6170 self.send_job(None, move |repo, _cx| async move {
6171 match repo {
6172 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6173 backend.diff_tree(diff_type).await
6174 }
6175 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6176 let response = client
6177 .request(proto::GetTreeDiff {
6178 project_id: project_id.0,
6179 repository_id: repository_id.0,
6180 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6181 base: diff_type.base().to_string(),
6182 head: diff_type.head().to_string(),
6183 })
6184 .await?;
6185
6186 let entries = response
6187 .entries
6188 .into_iter()
6189 .filter_map(|entry| {
6190 let status = match entry.status() {
6191 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6192 proto::tree_diff_status::Status::Modified => {
6193 TreeDiffStatus::Modified {
6194 old: git::Oid::from_str(
6195 &entry.oid.context("missing oid").log_err()?,
6196 )
6197 .log_err()?,
6198 }
6199 }
6200 proto::tree_diff_status::Status::Deleted => {
6201 TreeDiffStatus::Deleted {
6202 old: git::Oid::from_str(
6203 &entry.oid.context("missing oid").log_err()?,
6204 )
6205 .log_err()?,
6206 }
6207 }
6208 };
6209 Some((
6210 RepoPath::from_rel_path(
6211 &RelPath::from_proto(&entry.path).log_err()?,
6212 ),
6213 status,
6214 ))
6215 })
6216 .collect();
6217
6218 Ok(TreeDiff { entries })
6219 }
6220 }
6221 })
6222 }
6223
6224 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6225 let id = self.id;
6226 self.send_job(None, move |repo, _cx| async move {
6227 match repo {
6228 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6229 backend.diff(diff_type).await
6230 }
6231 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6232 let (proto_diff_type, merge_base_ref) = match &diff_type {
6233 DiffType::HeadToIndex => {
6234 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6235 }
6236 DiffType::HeadToWorktree => {
6237 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6238 }
6239 DiffType::MergeBase { base_ref } => (
6240 proto::git_diff::DiffType::MergeBase.into(),
6241 Some(base_ref.to_string()),
6242 ),
6243 };
6244 let response = client
6245 .request(proto::GitDiff {
6246 project_id: project_id.0,
6247 repository_id: id.to_proto(),
6248 diff_type: proto_diff_type,
6249 merge_base_ref,
6250 })
6251 .await?;
6252
6253 Ok(response.diff)
6254 }
6255 }
6256 })
6257 }
6258
6259 pub fn create_branch(
6260 &mut self,
6261 branch_name: String,
6262 base_branch: Option<String>,
6263 ) -> oneshot::Receiver<Result<()>> {
6264 let id = self.id;
6265 let status_msg = if let Some(ref base) = base_branch {
6266 format!("git switch -c {branch_name} {base}").into()
6267 } else {
6268 format!("git switch -c {branch_name}").into()
6269 };
6270 self.send_job(Some(status_msg), move |repo, _cx| async move {
6271 match repo {
6272 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6273 backend.create_branch(branch_name, base_branch).await
6274 }
6275 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6276 client
6277 .request(proto::GitCreateBranch {
6278 project_id: project_id.0,
6279 repository_id: id.to_proto(),
6280 branch_name,
6281 })
6282 .await?;
6283
6284 Ok(())
6285 }
6286 }
6287 })
6288 }
6289
6290 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6291 let id = self.id;
6292 self.send_job(
6293 Some(format!("git switch {branch_name}").into()),
6294 move |repo, _cx| async move {
6295 match repo {
6296 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6297 backend.change_branch(branch_name).await
6298 }
6299 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6300 client
6301 .request(proto::GitChangeBranch {
6302 project_id: project_id.0,
6303 repository_id: id.to_proto(),
6304 branch_name,
6305 })
6306 .await?;
6307
6308 Ok(())
6309 }
6310 }
6311 },
6312 )
6313 }
6314
6315 pub fn delete_branch(
6316 &mut self,
6317 is_remote: bool,
6318 branch_name: String,
6319 ) -> oneshot::Receiver<Result<()>> {
6320 let id = self.id;
6321 self.send_job(
6322 Some(
6323 format!(
6324 "git branch {} {}",
6325 if is_remote { "-dr" } else { "-d" },
6326 branch_name
6327 )
6328 .into(),
6329 ),
6330 move |repo, _cx| async move {
6331 match repo {
6332 RepositoryState::Local(state) => {
6333 state.backend.delete_branch(is_remote, branch_name).await
6334 }
6335 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6336 client
6337 .request(proto::GitDeleteBranch {
6338 project_id: project_id.0,
6339 repository_id: id.to_proto(),
6340 is_remote,
6341 branch_name,
6342 })
6343 .await?;
6344
6345 Ok(())
6346 }
6347 }
6348 },
6349 )
6350 }
6351
6352 pub fn rename_branch(
6353 &mut self,
6354 branch: String,
6355 new_name: String,
6356 ) -> oneshot::Receiver<Result<()>> {
6357 let id = self.id;
6358 self.send_job(
6359 Some(format!("git branch -m {branch} {new_name}").into()),
6360 move |repo, _cx| async move {
6361 match repo {
6362 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6363 backend.rename_branch(branch, new_name).await
6364 }
6365 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6366 client
6367 .request(proto::GitRenameBranch {
6368 project_id: project_id.0,
6369 repository_id: id.to_proto(),
6370 branch,
6371 new_name,
6372 })
6373 .await?;
6374
6375 Ok(())
6376 }
6377 }
6378 },
6379 )
6380 }
6381
6382 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6383 let id = self.id;
6384 self.send_job(None, move |repo, _cx| async move {
6385 match repo {
6386 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6387 backend.check_for_pushed_commit().await
6388 }
6389 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6390 let response = client
6391 .request(proto::CheckForPushedCommits {
6392 project_id: project_id.0,
6393 repository_id: id.to_proto(),
6394 })
6395 .await?;
6396
6397 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6398
6399 Ok(branches)
6400 }
6401 }
6402 })
6403 }
6404
6405 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6406 let id = self.id;
6407 self.send_job(None, move |repo, _cx| async move {
6408 match repo {
6409 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6410 backend.checkpoint().await
6411 }
6412 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6413 let response = client
6414 .request(proto::GitCreateCheckpoint {
6415 project_id: project_id.0,
6416 repository_id: id.to_proto(),
6417 })
6418 .await?;
6419
6420 Ok(GitRepositoryCheckpoint {
6421 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6422 })
6423 }
6424 }
6425 })
6426 }
6427
6428 pub fn restore_checkpoint(
6429 &mut self,
6430 checkpoint: GitRepositoryCheckpoint,
6431 ) -> oneshot::Receiver<Result<()>> {
6432 let id = self.id;
6433 self.send_job(None, move |repo, _cx| async move {
6434 match repo {
6435 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6436 backend.restore_checkpoint(checkpoint).await
6437 }
6438 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6439 client
6440 .request(proto::GitRestoreCheckpoint {
6441 project_id: project_id.0,
6442 repository_id: id.to_proto(),
6443 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6444 })
6445 .await?;
6446 Ok(())
6447 }
6448 }
6449 })
6450 }
6451
6452 pub(crate) fn apply_remote_update(
6453 &mut self,
6454 update: proto::UpdateRepository,
6455 cx: &mut Context<Self>,
6456 ) -> Result<()> {
6457 if let Some(main_path) = &update.original_repo_abs_path {
6458 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6459 }
6460
6461 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6462 let new_head_commit = update
6463 .head_commit_details
6464 .as_ref()
6465 .map(proto_to_commit_details);
6466 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6467 cx.emit(RepositoryEvent::BranchChanged)
6468 }
6469 self.snapshot.branch = new_branch;
6470 self.snapshot.head_commit = new_head_commit;
6471
6472 // We don't store any merge head state for downstream projects; the upstream
6473 // will track it and we will just get the updated conflicts
6474 let new_merge_heads = TreeMap::from_ordered_entries(
6475 update
6476 .current_merge_conflicts
6477 .into_iter()
6478 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6479 );
6480 let conflicts_changed =
6481 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6482 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6483 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6484 let new_stash_entries = GitStash {
6485 entries: update
6486 .stash_entries
6487 .iter()
6488 .filter_map(|entry| proto_to_stash(entry).ok())
6489 .collect(),
6490 };
6491 if self.snapshot.stash_entries != new_stash_entries {
6492 cx.emit(RepositoryEvent::StashEntriesChanged)
6493 }
6494 self.snapshot.stash_entries = new_stash_entries;
6495 let new_linked_worktrees: Arc<[GitWorktree]> = update
6496 .linked_worktrees
6497 .iter()
6498 .map(proto_to_worktree)
6499 .collect();
6500 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6501 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6502 }
6503 self.snapshot.linked_worktrees = new_linked_worktrees;
6504 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6505 self.snapshot.remote_origin_url = update.remote_origin_url;
6506
6507 let edits = update
6508 .removed_statuses
6509 .into_iter()
6510 .filter_map(|path| {
6511 Some(sum_tree::Edit::Remove(PathKey(
6512 RelPath::from_proto(&path).log_err()?,
6513 )))
6514 })
6515 .chain(
6516 update
6517 .updated_statuses
6518 .into_iter()
6519 .filter_map(|updated_status| {
6520 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6521 }),
6522 )
6523 .collect::<Vec<_>>();
6524 if conflicts_changed || !edits.is_empty() {
6525 cx.emit(RepositoryEvent::StatusesChanged);
6526 }
6527 self.snapshot.statuses_by_path.edit(edits, ());
6528
6529 if update.is_last_update {
6530 self.snapshot.scan_id = update.scan_id;
6531 }
6532 self.clear_pending_ops(cx);
6533 Ok(())
6534 }
6535
6536 pub fn compare_checkpoints(
6537 &mut self,
6538 left: GitRepositoryCheckpoint,
6539 right: GitRepositoryCheckpoint,
6540 ) -> oneshot::Receiver<Result<bool>> {
6541 let id = self.id;
6542 self.send_job(None, move |repo, _cx| async move {
6543 match repo {
6544 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6545 backend.compare_checkpoints(left, right).await
6546 }
6547 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6548 let response = client
6549 .request(proto::GitCompareCheckpoints {
6550 project_id: project_id.0,
6551 repository_id: id.to_proto(),
6552 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6553 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6554 })
6555 .await?;
6556 Ok(response.equal)
6557 }
6558 }
6559 })
6560 }
6561
6562 pub fn diff_checkpoints(
6563 &mut self,
6564 base_checkpoint: GitRepositoryCheckpoint,
6565 target_checkpoint: GitRepositoryCheckpoint,
6566 ) -> oneshot::Receiver<Result<String>> {
6567 let id = self.id;
6568 self.send_job(None, move |repo, _cx| async move {
6569 match repo {
6570 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6571 backend
6572 .diff_checkpoints(base_checkpoint, target_checkpoint)
6573 .await
6574 }
6575 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6576 let response = client
6577 .request(proto::GitDiffCheckpoints {
6578 project_id: project_id.0,
6579 repository_id: id.to_proto(),
6580 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6581 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6582 })
6583 .await?;
6584 Ok(response.diff)
6585 }
6586 }
6587 })
6588 }
6589
6590 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6591 let updated = SumTree::from_iter(
6592 self.pending_ops.iter().filter_map(|ops| {
6593 let inner_ops: Vec<PendingOp> =
6594 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6595 if inner_ops.is_empty() {
6596 None
6597 } else {
6598 Some(PendingOps {
6599 repo_path: ops.repo_path.clone(),
6600 ops: inner_ops,
6601 })
6602 }
6603 }),
6604 (),
6605 );
6606
6607 if updated != self.pending_ops {
6608 cx.emit(RepositoryEvent::PendingOpsChanged {
6609 pending_ops: self.pending_ops.clone(),
6610 })
6611 }
6612
6613 self.pending_ops = updated;
6614 }
6615
6616 fn schedule_scan(
6617 &mut self,
6618 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6619 cx: &mut Context<Self>,
6620 ) {
6621 let this = cx.weak_entity();
6622 let _ = self.send_keyed_job(
6623 Some(GitJobKey::ReloadGitState),
6624 None,
6625 |state, mut cx| async move {
6626 log::debug!("run scheduled git status scan");
6627
6628 let Some(this) = this.upgrade() else {
6629 return Ok(());
6630 };
6631 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6632 bail!("not a local repository")
6633 };
6634 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6635 this.update(&mut cx, |this, cx| {
6636 this.clear_pending_ops(cx);
6637 });
6638 if let Some(updates_tx) = updates_tx {
6639 updates_tx
6640 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6641 .ok();
6642 }
6643 Ok(())
6644 },
6645 );
6646 }
6647
6648 fn spawn_local_git_worker(
6649 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6650 cx: &mut Context<Self>,
6651 ) -> mpsc::UnboundedSender<GitJob> {
6652 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6653
6654 cx.spawn(async move |_, cx| {
6655 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6656 if let Some(git_hosting_provider_registry) =
6657 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6658 {
6659 git_hosting_providers::register_additional_providers(
6660 git_hosting_provider_registry,
6661 state.backend.clone(),
6662 )
6663 .await;
6664 }
6665 let state = RepositoryState::Local(state);
6666 let mut jobs = VecDeque::new();
6667 loop {
6668 while let Ok(Some(next_job)) = job_rx.try_next() {
6669 jobs.push_back(next_job);
6670 }
6671
6672 if let Some(job) = jobs.pop_front() {
6673 if let Some(current_key) = &job.key
6674 && jobs
6675 .iter()
6676 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6677 {
6678 continue;
6679 }
6680 (job.job)(state.clone(), cx).await;
6681 } else if let Some(job) = job_rx.next().await {
6682 jobs.push_back(job);
6683 } else {
6684 break;
6685 }
6686 }
6687 anyhow::Ok(())
6688 })
6689 .detach_and_log_err(cx);
6690
6691 job_tx
6692 }
6693
6694 fn spawn_remote_git_worker(
6695 state: RemoteRepositoryState,
6696 cx: &mut Context<Self>,
6697 ) -> mpsc::UnboundedSender<GitJob> {
6698 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6699
6700 cx.spawn(async move |_, cx| {
6701 let state = RepositoryState::Remote(state);
6702 let mut jobs = VecDeque::new();
6703 loop {
6704 while let Ok(Some(next_job)) = job_rx.try_next() {
6705 jobs.push_back(next_job);
6706 }
6707
6708 if let Some(job) = jobs.pop_front() {
6709 if let Some(current_key) = &job.key
6710 && jobs
6711 .iter()
6712 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6713 {
6714 continue;
6715 }
6716 (job.job)(state.clone(), cx).await;
6717 } else if let Some(job) = job_rx.next().await {
6718 jobs.push_back(job);
6719 } else {
6720 break;
6721 }
6722 }
6723 anyhow::Ok(())
6724 })
6725 .detach_and_log_err(cx);
6726
6727 job_tx
6728 }
6729
6730 fn load_staged_text(
6731 &mut self,
6732 buffer_id: BufferId,
6733 repo_path: RepoPath,
6734 cx: &App,
6735 ) -> Task<Result<Option<String>>> {
6736 let rx = self.send_job(None, move |state, _| async move {
6737 match state {
6738 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6739 anyhow::Ok(backend.load_index_text(repo_path).await)
6740 }
6741 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6742 let response = client
6743 .request(proto::OpenUnstagedDiff {
6744 project_id: project_id.to_proto(),
6745 buffer_id: buffer_id.to_proto(),
6746 })
6747 .await?;
6748 Ok(response.staged_text)
6749 }
6750 }
6751 });
6752 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6753 }
6754
6755 fn load_committed_text(
6756 &mut self,
6757 buffer_id: BufferId,
6758 repo_path: RepoPath,
6759 cx: &App,
6760 ) -> Task<Result<DiffBasesChange>> {
6761 let rx = self.send_job(None, move |state, _| async move {
6762 match state {
6763 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6764 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6765 let staged_text = backend.load_index_text(repo_path).await;
6766 let diff_bases_change = if committed_text == staged_text {
6767 DiffBasesChange::SetBoth(committed_text)
6768 } else {
6769 DiffBasesChange::SetEach {
6770 index: staged_text,
6771 head: committed_text,
6772 }
6773 };
6774 anyhow::Ok(diff_bases_change)
6775 }
6776 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6777 use proto::open_uncommitted_diff_response::Mode;
6778
6779 let response = client
6780 .request(proto::OpenUncommittedDiff {
6781 project_id: project_id.to_proto(),
6782 buffer_id: buffer_id.to_proto(),
6783 })
6784 .await?;
6785 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6786 let bases = match mode {
6787 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6788 Mode::IndexAndHead => DiffBasesChange::SetEach {
6789 head: response.committed_text,
6790 index: response.staged_text,
6791 },
6792 };
6793 Ok(bases)
6794 }
6795 }
6796 });
6797
6798 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6799 }
6800
6801 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6802 let repository_id = self.snapshot.id;
6803 let rx = self.send_job(None, move |state, _| async move {
6804 match state {
6805 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6806 backend.load_blob_content(oid).await
6807 }
6808 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6809 let response = client
6810 .request(proto::GetBlobContent {
6811 project_id: project_id.to_proto(),
6812 repository_id: repository_id.0,
6813 oid: oid.to_string(),
6814 })
6815 .await?;
6816 Ok(response.content)
6817 }
6818 }
6819 });
6820 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6821 }
6822
6823 fn paths_changed(
6824 &mut self,
6825 paths: Vec<RepoPath>,
6826 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6827 cx: &mut Context<Self>,
6828 ) {
6829 if !paths.is_empty() {
6830 self.paths_needing_status_update.push(paths);
6831 }
6832
6833 let this = cx.weak_entity();
6834 let _ = self.send_keyed_job(
6835 Some(GitJobKey::RefreshStatuses),
6836 None,
6837 |state, mut cx| async move {
6838 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6839 (
6840 this.snapshot.clone(),
6841 mem::take(&mut this.paths_needing_status_update),
6842 )
6843 })?;
6844 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6845 bail!("not a local repository")
6846 };
6847
6848 if changed_paths.is_empty() {
6849 return Ok(());
6850 }
6851
6852 let has_head = prev_snapshot.head_commit.is_some();
6853
6854 let stash_entries = backend.stash_entries().await?;
6855 let changed_path_statuses = cx
6856 .background_spawn(async move {
6857 let mut changed_paths =
6858 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6859 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6860
6861 let status_task = backend.status(&changed_paths_vec);
6862 let diff_stat_future = if has_head {
6863 backend.diff_stat(&changed_paths_vec)
6864 } else {
6865 future::ready(Ok(status::GitDiffStat {
6866 entries: Arc::default(),
6867 }))
6868 .boxed()
6869 };
6870
6871 let (statuses, diff_stats) =
6872 futures::future::try_join(status_task, diff_stat_future).await?;
6873
6874 let diff_stats: HashMap<RepoPath, DiffStat> =
6875 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6876
6877 let mut changed_path_statuses = Vec::new();
6878 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6879 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6880
6881 for (repo_path, status) in &*statuses.entries {
6882 let current_diff_stat = diff_stats.get(repo_path).copied();
6883
6884 changed_paths.remove(repo_path);
6885 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6886 && cursor.item().is_some_and(|entry| {
6887 entry.status == *status && entry.diff_stat == current_diff_stat
6888 })
6889 {
6890 continue;
6891 }
6892
6893 changed_path_statuses.push(Edit::Insert(StatusEntry {
6894 repo_path: repo_path.clone(),
6895 status: *status,
6896 diff_stat: current_diff_stat,
6897 }));
6898 }
6899 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6900 for path in changed_paths.into_iter() {
6901 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6902 changed_path_statuses
6903 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6904 }
6905 }
6906 anyhow::Ok(changed_path_statuses)
6907 })
6908 .await?;
6909
6910 this.update(&mut cx, |this, cx| {
6911 if this.snapshot.stash_entries != stash_entries {
6912 cx.emit(RepositoryEvent::StashEntriesChanged);
6913 this.snapshot.stash_entries = stash_entries;
6914 }
6915
6916 if !changed_path_statuses.is_empty() {
6917 cx.emit(RepositoryEvent::StatusesChanged);
6918 this.snapshot
6919 .statuses_by_path
6920 .edit(changed_path_statuses, ());
6921 this.snapshot.scan_id += 1;
6922 }
6923
6924 if let Some(updates_tx) = updates_tx {
6925 updates_tx
6926 .unbounded_send(DownstreamUpdate::UpdateRepository(
6927 this.snapshot.clone(),
6928 ))
6929 .ok();
6930 }
6931 })
6932 },
6933 );
6934 }
6935
6936 /// currently running git command and when it started
6937 pub fn current_job(&self) -> Option<JobInfo> {
6938 self.active_jobs.values().next().cloned()
6939 }
6940
6941 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6942 self.send_job(None, |_, _| async {})
6943 }
6944
6945 fn spawn_job_with_tracking<AsyncFn>(
6946 &mut self,
6947 paths: Vec<RepoPath>,
6948 git_status: pending_op::GitStatus,
6949 cx: &mut Context<Self>,
6950 f: AsyncFn,
6951 ) -> Task<Result<()>>
6952 where
6953 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6954 {
6955 let ids = self.new_pending_ops_for_paths(paths, git_status);
6956
6957 cx.spawn(async move |this, cx| {
6958 let (job_status, result) = match f(this.clone(), cx).await {
6959 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6960 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6961 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6962 };
6963
6964 this.update(cx, |this, _| {
6965 let mut edits = Vec::with_capacity(ids.len());
6966 for (id, entry) in ids {
6967 if let Some(mut ops) = this
6968 .pending_ops
6969 .get(&PathKey(entry.as_ref().clone()), ())
6970 .cloned()
6971 {
6972 if let Some(op) = ops.op_by_id_mut(id) {
6973 op.job_status = job_status;
6974 }
6975 edits.push(sum_tree::Edit::Insert(ops));
6976 }
6977 }
6978 this.pending_ops.edit(edits, ());
6979 })?;
6980
6981 result
6982 })
6983 }
6984
6985 fn new_pending_ops_for_paths(
6986 &mut self,
6987 paths: Vec<RepoPath>,
6988 git_status: pending_op::GitStatus,
6989 ) -> Vec<(PendingOpId, RepoPath)> {
6990 let mut edits = Vec::with_capacity(paths.len());
6991 let mut ids = Vec::with_capacity(paths.len());
6992 for path in paths {
6993 let mut ops = self
6994 .pending_ops
6995 .get(&PathKey(path.as_ref().clone()), ())
6996 .cloned()
6997 .unwrap_or_else(|| PendingOps::new(&path));
6998 let id = ops.max_id() + 1;
6999 ops.ops.push(PendingOp {
7000 id,
7001 git_status,
7002 job_status: pending_op::JobStatus::Running,
7003 });
7004 edits.push(sum_tree::Edit::Insert(ops));
7005 ids.push((id, path));
7006 }
7007 self.pending_ops.edit(edits, ());
7008 ids
7009 }
7010 pub fn default_remote_url(&self) -> Option<String> {
7011 self.remote_upstream_url
7012 .clone()
7013 .or(self.remote_origin_url.clone())
7014 }
7015}
7016
7017/// If `path` is a git linked worktree checkout, resolves it to the main
7018/// repository's working directory path. Returns `None` if `path` is a normal
7019/// repository, not a git repo, or if resolution fails.
7020///
7021/// Resolution works by:
7022/// 1. Reading the `.git` file to get the `gitdir:` pointer
7023/// 2. Following that to the worktree-specific git directory
7024/// 3. Reading the `commondir` file to find the shared `.git` directory
7025/// 4. Deriving the main repo's working directory from the common dir
7026pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7027 let dot_git = path.join(".git");
7028 let metadata = fs.metadata(&dot_git).await.ok()??;
7029 if metadata.is_dir {
7030 return None; // Normal repo, not a linked worktree
7031 }
7032 // It's a .git file — parse the gitdir: pointer
7033 let content = fs.load(&dot_git).await.ok()?;
7034 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7035 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7036 // Read commondir to find the main .git directory
7037 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7038 let common_dir = fs
7039 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7040 .await
7041 .ok()?;
7042 Some(git::repository::original_repo_path_from_common_dir(
7043 &common_dir,
7044 ))
7045}
7046
7047/// Validates that the resolved worktree directory is acceptable:
7048/// - The setting must not be an absolute path.
7049/// - The resolved path must be either a subdirectory of the working
7050/// directory or a subdirectory of its parent (i.e., a sibling).
7051///
7052/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7053pub fn worktrees_directory_for_repo(
7054 original_repo_abs_path: &Path,
7055 worktree_directory_setting: &str,
7056) -> Result<PathBuf> {
7057 // Check the original setting before trimming, since a path like "///"
7058 // is absolute but becomes "" after stripping trailing separators.
7059 // Also check for leading `/` or `\` explicitly, because on Windows
7060 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7061 // would slip through even though it's clearly not a relative path.
7062 if Path::new(worktree_directory_setting).is_absolute()
7063 || worktree_directory_setting.starts_with('/')
7064 || worktree_directory_setting.starts_with('\\')
7065 {
7066 anyhow::bail!(
7067 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7068 );
7069 }
7070
7071 if worktree_directory_setting.is_empty() {
7072 anyhow::bail!("git.worktree_directory must not be empty");
7073 }
7074
7075 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7076 if trimmed == ".." {
7077 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7078 }
7079
7080 let joined = original_repo_abs_path.join(trimmed);
7081 let resolved = util::normalize_path(&joined);
7082 let resolved = if resolved.starts_with(original_repo_abs_path) {
7083 resolved
7084 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7085 resolved.join(repo_dir_name)
7086 } else {
7087 resolved
7088 };
7089
7090 let parent = original_repo_abs_path
7091 .parent()
7092 .unwrap_or(original_repo_abs_path);
7093
7094 if !resolved.starts_with(parent) {
7095 anyhow::bail!(
7096 "git.worktree_directory resolved to {resolved:?}, which is outside \
7097 the project root and its parent directory. It must resolve to a \
7098 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7099 );
7100 }
7101
7102 Ok(resolved)
7103}
7104
7105/// Returns a short name for a linked worktree suitable for UI display
7106///
7107/// Uses the main worktree path to come up with a short name that disambiguates
7108/// the linked worktree from the main worktree.
7109pub fn linked_worktree_short_name(
7110 main_worktree_path: &Path,
7111 linked_worktree_path: &Path,
7112) -> Option<SharedString> {
7113 if main_worktree_path == linked_worktree_path {
7114 return None;
7115 }
7116
7117 let project_name = main_worktree_path.file_name()?.to_str()?;
7118 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7119 let name = if directory_name != project_name {
7120 directory_name.to_string()
7121 } else {
7122 linked_worktree_path
7123 .parent()?
7124 .file_name()?
7125 .to_str()?
7126 .to_string()
7127 };
7128 Some(name.into())
7129}
7130
7131fn get_permalink_in_rust_registry_src(
7132 provider_registry: Arc<GitHostingProviderRegistry>,
7133 path: PathBuf,
7134 selection: Range<u32>,
7135) -> Result<url::Url> {
7136 #[derive(Deserialize)]
7137 struct CargoVcsGit {
7138 sha1: String,
7139 }
7140
7141 #[derive(Deserialize)]
7142 struct CargoVcsInfo {
7143 git: CargoVcsGit,
7144 path_in_vcs: String,
7145 }
7146
7147 #[derive(Deserialize)]
7148 struct CargoPackage {
7149 repository: String,
7150 }
7151
7152 #[derive(Deserialize)]
7153 struct CargoToml {
7154 package: CargoPackage,
7155 }
7156
7157 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7158 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7159 Some((dir, json))
7160 }) else {
7161 bail!("No .cargo_vcs_info.json found in parent directories")
7162 };
7163 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7164 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7165 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7166 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7167 .context("parsing package.repository field of manifest")?;
7168 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7169 let permalink = provider.build_permalink(
7170 remote,
7171 BuildPermalinkParams::new(
7172 &cargo_vcs_info.git.sha1,
7173 &RepoPath::from_rel_path(
7174 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7175 ),
7176 Some(selection),
7177 ),
7178 );
7179 Ok(permalink)
7180}
7181
7182fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7183 let Some(blame) = blame else {
7184 return proto::BlameBufferResponse {
7185 blame_response: None,
7186 };
7187 };
7188
7189 let entries = blame
7190 .entries
7191 .into_iter()
7192 .map(|entry| proto::BlameEntry {
7193 sha: entry.sha.as_bytes().into(),
7194 start_line: entry.range.start,
7195 end_line: entry.range.end,
7196 original_line_number: entry.original_line_number,
7197 author: entry.author,
7198 author_mail: entry.author_mail,
7199 author_time: entry.author_time,
7200 author_tz: entry.author_tz,
7201 committer: entry.committer_name,
7202 committer_mail: entry.committer_email,
7203 committer_time: entry.committer_time,
7204 committer_tz: entry.committer_tz,
7205 summary: entry.summary,
7206 previous: entry.previous,
7207 filename: entry.filename,
7208 })
7209 .collect::<Vec<_>>();
7210
7211 let messages = blame
7212 .messages
7213 .into_iter()
7214 .map(|(oid, message)| proto::CommitMessage {
7215 oid: oid.as_bytes().into(),
7216 message,
7217 })
7218 .collect::<Vec<_>>();
7219
7220 proto::BlameBufferResponse {
7221 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7222 }
7223}
7224
7225fn deserialize_blame_buffer_response(
7226 response: proto::BlameBufferResponse,
7227) -> Option<git::blame::Blame> {
7228 let response = response.blame_response?;
7229 let entries = response
7230 .entries
7231 .into_iter()
7232 .filter_map(|entry| {
7233 Some(git::blame::BlameEntry {
7234 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7235 range: entry.start_line..entry.end_line,
7236 original_line_number: entry.original_line_number,
7237 committer_name: entry.committer,
7238 committer_time: entry.committer_time,
7239 committer_tz: entry.committer_tz,
7240 committer_email: entry.committer_mail,
7241 author: entry.author,
7242 author_mail: entry.author_mail,
7243 author_time: entry.author_time,
7244 author_tz: entry.author_tz,
7245 summary: entry.summary,
7246 previous: entry.previous,
7247 filename: entry.filename,
7248 })
7249 })
7250 .collect::<Vec<_>>();
7251
7252 let messages = response
7253 .messages
7254 .into_iter()
7255 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7256 .collect::<HashMap<_, _>>();
7257
7258 Some(Blame { entries, messages })
7259}
7260
7261fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7262 proto::Branch {
7263 is_head: branch.is_head,
7264 ref_name: branch.ref_name.to_string(),
7265 unix_timestamp: branch
7266 .most_recent_commit
7267 .as_ref()
7268 .map(|commit| commit.commit_timestamp as u64),
7269 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7270 ref_name: upstream.ref_name.to_string(),
7271 tracking: upstream
7272 .tracking
7273 .status()
7274 .map(|upstream| proto::UpstreamTracking {
7275 ahead: upstream.ahead as u64,
7276 behind: upstream.behind as u64,
7277 }),
7278 }),
7279 most_recent_commit: branch
7280 .most_recent_commit
7281 .as_ref()
7282 .map(|commit| proto::CommitSummary {
7283 sha: commit.sha.to_string(),
7284 subject: commit.subject.to_string(),
7285 commit_timestamp: commit.commit_timestamp,
7286 author_name: commit.author_name.to_string(),
7287 }),
7288 }
7289}
7290
7291fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7292 proto::Worktree {
7293 path: worktree.path.to_string_lossy().to_string(),
7294 ref_name: worktree
7295 .ref_name
7296 .as_ref()
7297 .map(|s| s.to_string())
7298 .unwrap_or_default(),
7299 sha: worktree.sha.to_string(),
7300 is_main: worktree.is_main,
7301 }
7302}
7303
7304fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7305 git::repository::Worktree {
7306 path: PathBuf::from(proto.path.clone()),
7307 ref_name: Some(SharedString::from(&proto.ref_name)),
7308 sha: proto.sha.clone().into(),
7309 is_main: proto.is_main,
7310 }
7311}
7312
7313fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7314 git::repository::Branch {
7315 is_head: proto.is_head,
7316 ref_name: proto.ref_name.clone().into(),
7317 upstream: proto
7318 .upstream
7319 .as_ref()
7320 .map(|upstream| git::repository::Upstream {
7321 ref_name: upstream.ref_name.to_string().into(),
7322 tracking: upstream
7323 .tracking
7324 .as_ref()
7325 .map(|tracking| {
7326 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7327 ahead: tracking.ahead as u32,
7328 behind: tracking.behind as u32,
7329 })
7330 })
7331 .unwrap_or(git::repository::UpstreamTracking::Gone),
7332 }),
7333 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7334 git::repository::CommitSummary {
7335 sha: commit.sha.to_string().into(),
7336 subject: commit.subject.to_string().into(),
7337 commit_timestamp: commit.commit_timestamp,
7338 author_name: commit.author_name.to_string().into(),
7339 has_parent: true,
7340 }
7341 }),
7342 }
7343}
7344
7345fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7346 proto::GitCommitDetails {
7347 sha: commit.sha.to_string(),
7348 message: commit.message.to_string(),
7349 commit_timestamp: commit.commit_timestamp,
7350 author_email: commit.author_email.to_string(),
7351 author_name: commit.author_name.to_string(),
7352 }
7353}
7354
7355fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7356 CommitDetails {
7357 sha: proto.sha.clone().into(),
7358 message: proto.message.clone().into(),
7359 commit_timestamp: proto.commit_timestamp,
7360 author_email: proto.author_email.clone().into(),
7361 author_name: proto.author_name.clone().into(),
7362 }
7363}
7364
7365/// This snapshot computes the repository state on the foreground thread while
7366/// running the git commands on the background thread. We update branch, head,
7367/// remotes, and worktrees first so the UI can react sooner, then compute file
7368/// state and emit those events immediately after.
7369async fn compute_snapshot(
7370 this: Entity<Repository>,
7371 backend: Arc<dyn GitRepository>,
7372 cx: &mut AsyncApp,
7373) -> Result<RepositorySnapshot> {
7374 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7375 this.paths_needing_status_update.clear();
7376 (
7377 this.id,
7378 this.work_directory_abs_path.clone(),
7379 this.snapshot.clone(),
7380 )
7381 });
7382
7383 let head_commit_future = {
7384 let backend = backend.clone();
7385 async move {
7386 Ok(match backend.head_sha().await {
7387 Some(head_sha) => backend.show(head_sha).await.log_err(),
7388 None => None,
7389 })
7390 }
7391 };
7392 let (branches, head_commit, all_worktrees) = cx
7393 .background_spawn({
7394 let backend = backend.clone();
7395 async move {
7396 futures::future::try_join3(
7397 backend.branches(),
7398 head_commit_future,
7399 backend.worktrees(),
7400 )
7401 .await
7402 }
7403 })
7404 .await?;
7405 let branch = branches.into_iter().find(|branch| branch.is_head);
7406
7407 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7408 .into_iter()
7409 .filter(|wt| wt.path != *work_directory_abs_path)
7410 .collect();
7411
7412 let (remote_origin_url, remote_upstream_url) = cx
7413 .background_spawn({
7414 let backend = backend.clone();
7415 async move {
7416 Ok::<_, anyhow::Error>(
7417 futures::future::join(
7418 backend.remote_url("origin"),
7419 backend.remote_url("upstream"),
7420 )
7421 .await,
7422 )
7423 }
7424 })
7425 .await?;
7426
7427 let snapshot = this.update(cx, |this, cx| {
7428 let branch_changed =
7429 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7430 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7431
7432 this.snapshot = RepositorySnapshot {
7433 id,
7434 work_directory_abs_path,
7435 branch,
7436 head_commit,
7437 remote_origin_url,
7438 remote_upstream_url,
7439 linked_worktrees,
7440 scan_id: prev_snapshot.scan_id + 1,
7441 ..prev_snapshot
7442 };
7443
7444 if branch_changed {
7445 cx.emit(RepositoryEvent::BranchChanged);
7446 }
7447
7448 if worktrees_changed {
7449 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7450 }
7451
7452 this.snapshot.clone()
7453 });
7454
7455 let (statuses, diff_stats, stash_entries) = cx
7456 .background_spawn({
7457 let backend = backend.clone();
7458 let snapshot = snapshot.clone();
7459 async move {
7460 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7461 if snapshot.head_commit.is_some() {
7462 backend.diff_stat(&[])
7463 } else {
7464 future::ready(Ok(status::GitDiffStat {
7465 entries: Arc::default(),
7466 }))
7467 .boxed()
7468 };
7469 futures::future::try_join3(
7470 backend.status(&[RepoPath::from_rel_path(
7471 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7472 )]),
7473 diff_stat_future,
7474 backend.stash_entries(),
7475 )
7476 .await
7477 }
7478 })
7479 .await?;
7480
7481 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7482 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7483 let mut conflicted_paths = Vec::new();
7484 let statuses_by_path = SumTree::from_iter(
7485 statuses.entries.iter().map(|(repo_path, status)| {
7486 if status.is_conflicted() {
7487 conflicted_paths.push(repo_path.clone());
7488 }
7489 StatusEntry {
7490 repo_path: repo_path.clone(),
7491 status: *status,
7492 diff_stat: diff_stat_map.get(repo_path).copied(),
7493 }
7494 }),
7495 (),
7496 );
7497
7498 let merge_details = cx
7499 .background_spawn({
7500 let backend = backend.clone();
7501 let mut merge_details = snapshot.merge.clone();
7502 async move {
7503 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7504 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7505 }
7506 })
7507 .await?;
7508 let (merge_details, conflicts_changed) = merge_details;
7509 log::debug!("new merge details: {merge_details:?}");
7510
7511 Ok(this.update(cx, |this, cx| {
7512 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7513 cx.emit(RepositoryEvent::StatusesChanged);
7514 }
7515 if stash_entries != this.snapshot.stash_entries {
7516 cx.emit(RepositoryEvent::StashEntriesChanged);
7517 }
7518
7519 this.snapshot.scan_id += 1;
7520 this.snapshot.merge = merge_details;
7521 this.snapshot.statuses_by_path = statuses_by_path;
7522 this.snapshot.stash_entries = stash_entries;
7523
7524 this.snapshot.clone()
7525 }))
7526}
7527
7528fn status_from_proto(
7529 simple_status: i32,
7530 status: Option<proto::GitFileStatus>,
7531) -> anyhow::Result<FileStatus> {
7532 use proto::git_file_status::Variant;
7533
7534 let Some(variant) = status.and_then(|status| status.variant) else {
7535 let code = proto::GitStatus::from_i32(simple_status)
7536 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7537 let result = match code {
7538 proto::GitStatus::Added => TrackedStatus {
7539 worktree_status: StatusCode::Added,
7540 index_status: StatusCode::Unmodified,
7541 }
7542 .into(),
7543 proto::GitStatus::Modified => TrackedStatus {
7544 worktree_status: StatusCode::Modified,
7545 index_status: StatusCode::Unmodified,
7546 }
7547 .into(),
7548 proto::GitStatus::Conflict => UnmergedStatus {
7549 first_head: UnmergedStatusCode::Updated,
7550 second_head: UnmergedStatusCode::Updated,
7551 }
7552 .into(),
7553 proto::GitStatus::Deleted => TrackedStatus {
7554 worktree_status: StatusCode::Deleted,
7555 index_status: StatusCode::Unmodified,
7556 }
7557 .into(),
7558 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7559 };
7560 return Ok(result);
7561 };
7562
7563 let result = match variant {
7564 Variant::Untracked(_) => FileStatus::Untracked,
7565 Variant::Ignored(_) => FileStatus::Ignored,
7566 Variant::Unmerged(unmerged) => {
7567 let [first_head, second_head] =
7568 [unmerged.first_head, unmerged.second_head].map(|head| {
7569 let code = proto::GitStatus::from_i32(head)
7570 .with_context(|| format!("Invalid git status code: {head}"))?;
7571 let result = match code {
7572 proto::GitStatus::Added => UnmergedStatusCode::Added,
7573 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7574 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7575 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7576 };
7577 Ok(result)
7578 });
7579 let [first_head, second_head] = [first_head?, second_head?];
7580 UnmergedStatus {
7581 first_head,
7582 second_head,
7583 }
7584 .into()
7585 }
7586 Variant::Tracked(tracked) => {
7587 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7588 .map(|status| {
7589 let code = proto::GitStatus::from_i32(status)
7590 .with_context(|| format!("Invalid git status code: {status}"))?;
7591 let result = match code {
7592 proto::GitStatus::Modified => StatusCode::Modified,
7593 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7594 proto::GitStatus::Added => StatusCode::Added,
7595 proto::GitStatus::Deleted => StatusCode::Deleted,
7596 proto::GitStatus::Renamed => StatusCode::Renamed,
7597 proto::GitStatus::Copied => StatusCode::Copied,
7598 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7599 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7600 };
7601 Ok(result)
7602 });
7603 let [index_status, worktree_status] = [index_status?, worktree_status?];
7604 TrackedStatus {
7605 index_status,
7606 worktree_status,
7607 }
7608 .into()
7609 }
7610 };
7611 Ok(result)
7612}
7613
7614fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7615 use proto::git_file_status::{Tracked, Unmerged, Variant};
7616
7617 let variant = match status {
7618 FileStatus::Untracked => Variant::Untracked(Default::default()),
7619 FileStatus::Ignored => Variant::Ignored(Default::default()),
7620 FileStatus::Unmerged(UnmergedStatus {
7621 first_head,
7622 second_head,
7623 }) => Variant::Unmerged(Unmerged {
7624 first_head: unmerged_status_to_proto(first_head),
7625 second_head: unmerged_status_to_proto(second_head),
7626 }),
7627 FileStatus::Tracked(TrackedStatus {
7628 index_status,
7629 worktree_status,
7630 }) => Variant::Tracked(Tracked {
7631 index_status: tracked_status_to_proto(index_status),
7632 worktree_status: tracked_status_to_proto(worktree_status),
7633 }),
7634 };
7635 proto::GitFileStatus {
7636 variant: Some(variant),
7637 }
7638}
7639
7640fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7641 match code {
7642 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7643 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7644 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7645 }
7646}
7647
7648fn tracked_status_to_proto(code: StatusCode) -> i32 {
7649 match code {
7650 StatusCode::Added => proto::GitStatus::Added as _,
7651 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7652 StatusCode::Modified => proto::GitStatus::Modified as _,
7653 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7654 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7655 StatusCode::Copied => proto::GitStatus::Copied as _,
7656 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7657 }
7658}