1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Copy, Clone)]
302pub enum CancelOutcome {
303 /// The job was still queued and was removed before it started.
304 RemovedFromQueue,
305 /// The job was running; the process was killed. Cleanup callback has been awaited.
306 KilledRunning,
307 /// The job had already finished before the cancel arrived.
308 AlreadyFinished,
309}
310
311pub struct CancellationToken {
312 id: JobId,
313 job_sender: mpsc::UnboundedSender<GitWorkerMessage>,
314}
315
316impl CancellationToken {
317 pub fn cancel(
318 self,
319 cleanup: impl FnOnce(RepositoryState, &mut AsyncApp) -> Task<()> + 'static,
320 ) -> oneshot::Receiver<CancelOutcome> {
321 let (result_tx, result_rx) = oneshot::channel();
322 self.job_sender
323 .unbounded_send(GitWorkerMessage::Cancel {
324 id: self.id,
325 cleanup: Some(Box::new(cleanup)),
326 result_tx,
327 })
328 .ok();
329 result_rx
330 }
331}
332
333enum GitWorkerMessage {
334 Job(GitJob),
335 Cancel {
336 id: JobId,
337 cleanup: Option<Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>>,
338 result_tx: oneshot::Sender<CancelOutcome>,
339 },
340}
341
342#[derive(Clone, Debug, PartialEq, Eq)]
343pub struct JobInfo {
344 pub start: Instant,
345 pub message: SharedString,
346}
347
348struct GraphCommitDataHandler {
349 _task: Task<()>,
350 commit_data_request: smol::channel::Sender<Oid>,
351}
352
353enum GraphCommitHandlerState {
354 Starting,
355 Open(GraphCommitDataHandler),
356 Closed,
357}
358
359pub struct InitialGitGraphData {
360 fetch_task: Task<()>,
361 pub error: Option<SharedString>,
362 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
363 pub commit_oid_to_index: HashMap<Oid, usize>,
364}
365
366pub struct GraphDataResponse<'a> {
367 pub commits: &'a [Arc<InitialGraphCommitData>],
368 pub is_loading: bool,
369 pub error: Option<SharedString>,
370}
371
372pub struct Repository {
373 this: WeakEntity<Self>,
374 snapshot: RepositorySnapshot,
375 commit_message_buffer: Option<Entity<Buffer>>,
376 git_store: WeakEntity<GitStore>,
377 // For a local repository, holds paths that have had worktree events since the last status scan completed,
378 // and that should be examined during the next status scan.
379 paths_needing_status_update: Vec<Vec<RepoPath>>,
380 job_sender: mpsc::UnboundedSender<GitWorkerMessage>,
381 active_jobs: HashMap<JobId, JobInfo>,
382 pending_ops: SumTree<PendingOps>,
383 job_id: JobId,
384 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
385 latest_askpass_id: u64,
386 repository_state: Shared<Task<Result<RepositoryState, String>>>,
387 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
388 graph_commit_data_handler: GraphCommitHandlerState,
389 commit_data: HashMap<Oid, CommitDataState>,
390}
391
392impl std::ops::Deref for Repository {
393 type Target = RepositorySnapshot;
394
395 fn deref(&self) -> &Self::Target {
396 &self.snapshot
397 }
398}
399
400#[derive(Clone)]
401pub struct LocalRepositoryState {
402 pub fs: Arc<dyn Fs>,
403 pub backend: Arc<dyn GitRepository>,
404 pub environment: Arc<HashMap<String, String>>,
405}
406
407impl LocalRepositoryState {
408 async fn new(
409 work_directory_abs_path: Arc<Path>,
410 dot_git_abs_path: Arc<Path>,
411 project_environment: WeakEntity<ProjectEnvironment>,
412 fs: Arc<dyn Fs>,
413 is_trusted: bool,
414 cx: &mut AsyncApp,
415 ) -> anyhow::Result<Self> {
416 let environment = project_environment
417 .update(cx, |project_environment, cx| {
418 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
419 })?
420 .await
421 .unwrap_or_else(|| {
422 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
423 HashMap::default()
424 });
425 let search_paths = environment.get("PATH").map(|val| val.to_owned());
426 let backend = cx
427 .background_spawn({
428 let fs = fs.clone();
429 async move {
430 let system_git_binary_path = search_paths
431 .and_then(|search_paths| {
432 which::which_in("git", Some(search_paths), &work_directory_abs_path)
433 .ok()
434 })
435 .or_else(|| which::which("git").ok());
436 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
437 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
438 }
439 })
440 .await?;
441 backend.set_trusted(is_trusted);
442 Ok(LocalRepositoryState {
443 backend,
444 environment: Arc::new(environment),
445 fs,
446 })
447 }
448}
449
450#[derive(Clone)]
451pub struct RemoteRepositoryState {
452 pub project_id: ProjectId,
453 pub client: AnyProtoClient,
454}
455
456#[derive(Clone)]
457pub enum RepositoryState {
458 Local(LocalRepositoryState),
459 Remote(RemoteRepositoryState),
460}
461
462#[derive(Clone, Debug, PartialEq, Eq)]
463pub enum GitGraphEvent {
464 CountUpdated(usize),
465 FullyLoaded,
466 LoadingError,
467}
468
469#[derive(Clone, Debug, PartialEq, Eq)]
470pub enum RepositoryEvent {
471 StatusesChanged,
472 BranchChanged,
473 StashEntriesChanged,
474 GitWorktreeListChanged,
475 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
476 GraphEvent((LogSource, LogOrder), GitGraphEvent),
477}
478
479#[derive(Clone, Debug)]
480pub struct JobsUpdated;
481
482#[derive(Debug)]
483pub enum GitStoreEvent {
484 ActiveRepositoryChanged(Option<RepositoryId>),
485 /// Bool is true when the repository that's updated is the active repository
486 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
487 RepositoryAdded,
488 RepositoryRemoved(RepositoryId),
489 IndexWriteError(anyhow::Error),
490 JobsUpdated,
491 ConflictsUpdated,
492}
493
494impl EventEmitter<RepositoryEvent> for Repository {}
495impl EventEmitter<JobsUpdated> for Repository {}
496impl EventEmitter<GitStoreEvent> for GitStore {}
497
498pub struct GitJob {
499 id: JobId,
500 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
501 key: Option<GitJobKey>,
502}
503
504#[derive(PartialEq, Eq)]
505enum GitJobKey {
506 WriteIndex(Vec<RepoPath>),
507 ReloadBufferDiffBases,
508 RefreshStatuses,
509 ReloadGitState,
510}
511
512impl GitStore {
513 pub fn local(
514 worktree_store: &Entity<WorktreeStore>,
515 buffer_store: Entity<BufferStore>,
516 environment: Entity<ProjectEnvironment>,
517 fs: Arc<dyn Fs>,
518 cx: &mut Context<Self>,
519 ) -> Self {
520 Self::new(
521 worktree_store.clone(),
522 buffer_store,
523 GitStoreState::Local {
524 next_repository_id: Arc::new(AtomicU64::new(1)),
525 downstream: None,
526 project_environment: environment,
527 fs,
528 },
529 cx,
530 )
531 }
532
533 pub fn remote(
534 worktree_store: &Entity<WorktreeStore>,
535 buffer_store: Entity<BufferStore>,
536 upstream_client: AnyProtoClient,
537 project_id: u64,
538 cx: &mut Context<Self>,
539 ) -> Self {
540 Self::new(
541 worktree_store.clone(),
542 buffer_store,
543 GitStoreState::Remote {
544 upstream_client,
545 upstream_project_id: project_id,
546 downstream: None,
547 },
548 cx,
549 )
550 }
551
552 fn new(
553 worktree_store: Entity<WorktreeStore>,
554 buffer_store: Entity<BufferStore>,
555 state: GitStoreState,
556 cx: &mut Context<Self>,
557 ) -> Self {
558 let mut _subscriptions = vec![
559 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
560 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
561 ];
562
563 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
564 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
565 }
566
567 GitStore {
568 state,
569 buffer_store,
570 worktree_store,
571 repositories: HashMap::default(),
572 worktree_ids: HashMap::default(),
573 active_repo_id: None,
574 _subscriptions,
575 loading_diffs: HashMap::default(),
576 shared_diffs: HashMap::default(),
577 diffs: HashMap::default(),
578 }
579 }
580
581 pub fn init(client: &AnyProtoClient) {
582 client.add_entity_request_handler(Self::handle_get_remotes);
583 client.add_entity_request_handler(Self::handle_get_branches);
584 client.add_entity_request_handler(Self::handle_get_default_branch);
585 client.add_entity_request_handler(Self::handle_change_branch);
586 client.add_entity_request_handler(Self::handle_create_branch);
587 client.add_entity_request_handler(Self::handle_rename_branch);
588 client.add_entity_request_handler(Self::handle_create_remote);
589 client.add_entity_request_handler(Self::handle_remove_remote);
590 client.add_entity_request_handler(Self::handle_delete_branch);
591 client.add_entity_request_handler(Self::handle_git_init);
592 client.add_entity_request_handler(Self::handle_push);
593 client.add_entity_request_handler(Self::handle_pull);
594 client.add_entity_request_handler(Self::handle_fetch);
595 client.add_entity_request_handler(Self::handle_stage);
596 client.add_entity_request_handler(Self::handle_unstage);
597 client.add_entity_request_handler(Self::handle_stash);
598 client.add_entity_request_handler(Self::handle_stash_pop);
599 client.add_entity_request_handler(Self::handle_stash_apply);
600 client.add_entity_request_handler(Self::handle_stash_drop);
601 client.add_entity_request_handler(Self::handle_commit);
602 client.add_entity_request_handler(Self::handle_run_hook);
603 client.add_entity_request_handler(Self::handle_reset);
604 client.add_entity_request_handler(Self::handle_show);
605 client.add_entity_request_handler(Self::handle_load_commit_diff);
606 client.add_entity_request_handler(Self::handle_file_history);
607 client.add_entity_request_handler(Self::handle_checkout_files);
608 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
609 client.add_entity_request_handler(Self::handle_set_index_text);
610 client.add_entity_request_handler(Self::handle_askpass);
611 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
612 client.add_entity_request_handler(Self::handle_git_diff);
613 client.add_entity_request_handler(Self::handle_tree_diff);
614 client.add_entity_request_handler(Self::handle_get_blob_content);
615 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
616 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
617 client.add_entity_message_handler(Self::handle_update_diff_bases);
618 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
619 client.add_entity_request_handler(Self::handle_blame_buffer);
620 client.add_entity_message_handler(Self::handle_update_repository);
621 client.add_entity_message_handler(Self::handle_remove_repository);
622 client.add_entity_request_handler(Self::handle_git_clone);
623 client.add_entity_request_handler(Self::handle_get_worktrees);
624 client.add_entity_request_handler(Self::handle_create_worktree);
625 client.add_entity_request_handler(Self::handle_remove_worktree);
626 client.add_entity_request_handler(Self::handle_rename_worktree);
627 }
628
629 pub fn is_local(&self) -> bool {
630 matches!(self.state, GitStoreState::Local { .. })
631 }
632 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
633 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
634 let id = repo.read(cx).id;
635 if self.active_repo_id != Some(id) {
636 self.active_repo_id = Some(id);
637 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
638 }
639 }
640 }
641
642 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
643 match &mut self.state {
644 GitStoreState::Remote {
645 downstream: downstream_client,
646 ..
647 } => {
648 for repo in self.repositories.values() {
649 let update = repo.read(cx).snapshot.initial_update(project_id);
650 for update in split_repository_update(update) {
651 client.send(update).log_err();
652 }
653 }
654 *downstream_client = Some((client, ProjectId(project_id)));
655 }
656 GitStoreState::Local {
657 downstream: downstream_client,
658 ..
659 } => {
660 let mut snapshots = HashMap::default();
661 let (updates_tx, mut updates_rx) = mpsc::unbounded();
662 for repo in self.repositories.values() {
663 updates_tx
664 .unbounded_send(DownstreamUpdate::UpdateRepository(
665 repo.read(cx).snapshot.clone(),
666 ))
667 .ok();
668 }
669 *downstream_client = Some(LocalDownstreamState {
670 client: client.clone(),
671 project_id: ProjectId(project_id),
672 updates_tx,
673 _task: cx.spawn(async move |this, cx| {
674 cx.background_spawn(async move {
675 while let Some(update) = updates_rx.next().await {
676 match update {
677 DownstreamUpdate::UpdateRepository(snapshot) => {
678 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
679 {
680 let update =
681 snapshot.build_update(old_snapshot, project_id);
682 *old_snapshot = snapshot;
683 for update in split_repository_update(update) {
684 client.send(update)?;
685 }
686 } else {
687 let update = snapshot.initial_update(project_id);
688 for update in split_repository_update(update) {
689 client.send(update)?;
690 }
691 snapshots.insert(snapshot.id, snapshot);
692 }
693 }
694 DownstreamUpdate::RemoveRepository(id) => {
695 client.send(proto::RemoveRepository {
696 project_id,
697 id: id.to_proto(),
698 })?;
699 }
700 }
701 }
702 anyhow::Ok(())
703 })
704 .await
705 .ok();
706 this.update(cx, |this, _| {
707 if let GitStoreState::Local {
708 downstream: downstream_client,
709 ..
710 } = &mut this.state
711 {
712 downstream_client.take();
713 } else {
714 unreachable!("unshared called on remote store");
715 }
716 })
717 }),
718 });
719 }
720 }
721 }
722
723 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
724 match &mut self.state {
725 GitStoreState::Local {
726 downstream: downstream_client,
727 ..
728 } => {
729 downstream_client.take();
730 }
731 GitStoreState::Remote {
732 downstream: downstream_client,
733 ..
734 } => {
735 downstream_client.take();
736 }
737 }
738 self.shared_diffs.clear();
739 }
740
741 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
742 self.shared_diffs.remove(peer_id);
743 }
744
745 pub fn active_repository(&self) -> Option<Entity<Repository>> {
746 self.active_repo_id
747 .as_ref()
748 .map(|id| self.repositories[id].clone())
749 }
750
751 pub fn open_unstaged_diff(
752 &mut self,
753 buffer: Entity<Buffer>,
754 cx: &mut Context<Self>,
755 ) -> Task<Result<Entity<BufferDiff>>> {
756 let buffer_id = buffer.read(cx).remote_id();
757 if let Some(diff_state) = self.diffs.get(&buffer_id)
758 && let Some(unstaged_diff) = diff_state
759 .read(cx)
760 .unstaged_diff
761 .as_ref()
762 .and_then(|weak| weak.upgrade())
763 {
764 if let Some(task) =
765 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
766 {
767 return cx.background_executor().spawn(async move {
768 task.await;
769 Ok(unstaged_diff)
770 });
771 }
772 return Task::ready(Ok(unstaged_diff));
773 }
774
775 let Some((repo, repo_path)) =
776 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
777 else {
778 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
779 };
780
781 let task = self
782 .loading_diffs
783 .entry((buffer_id, DiffKind::Unstaged))
784 .or_insert_with(|| {
785 let staged_text = repo.update(cx, |repo, cx| {
786 repo.load_staged_text(buffer_id, repo_path, cx)
787 });
788 cx.spawn(async move |this, cx| {
789 Self::open_diff_internal(
790 this,
791 DiffKind::Unstaged,
792 staged_text.await.map(DiffBasesChange::SetIndex),
793 buffer,
794 cx,
795 )
796 .await
797 .map_err(Arc::new)
798 })
799 .shared()
800 })
801 .clone();
802
803 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
804 }
805
806 pub fn open_diff_since(
807 &mut self,
808 oid: Option<git::Oid>,
809 buffer: Entity<Buffer>,
810 repo: Entity<Repository>,
811 cx: &mut Context<Self>,
812 ) -> Task<Result<Entity<BufferDiff>>> {
813 let buffer_id = buffer.read(cx).remote_id();
814
815 if let Some(diff_state) = self.diffs.get(&buffer_id)
816 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
817 {
818 if let Some(task) =
819 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
820 {
821 return cx.background_executor().spawn(async move {
822 task.await;
823 Ok(oid_diff)
824 });
825 }
826 return Task::ready(Ok(oid_diff));
827 }
828
829 let diff_kind = DiffKind::SinceOid(oid);
830 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
831 let task = task.clone();
832 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
833 }
834
835 let task = cx
836 .spawn(async move |this, cx| {
837 let result: Result<Entity<BufferDiff>> = async {
838 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
839 let language_registry =
840 buffer.update(cx, |buffer, _| buffer.language_registry());
841 let content: Option<Arc<str>> = match oid {
842 None => None,
843 Some(oid) => Some(
844 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
845 .await?
846 .into(),
847 ),
848 };
849 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
850
851 buffer_diff
852 .update(cx, |buffer_diff, cx| {
853 buffer_diff.language_changed(
854 buffer_snapshot.language().cloned(),
855 language_registry,
856 cx,
857 );
858 buffer_diff.set_base_text(
859 content.clone(),
860 buffer_snapshot.language().cloned(),
861 buffer_snapshot.text,
862 cx,
863 )
864 })
865 .await?;
866 let unstaged_diff = this
867 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
868 .await?;
869 buffer_diff.update(cx, |buffer_diff, _| {
870 buffer_diff.set_secondary_diff(unstaged_diff);
871 });
872
873 this.update(cx, |this, cx| {
874 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
875 .detach();
876
877 this.loading_diffs.remove(&(buffer_id, diff_kind));
878
879 let git_store = cx.weak_entity();
880 let diff_state = this
881 .diffs
882 .entry(buffer_id)
883 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
884
885 diff_state.update(cx, |state, _| {
886 if let Some(oid) = oid {
887 if let Some(content) = content {
888 state.oid_texts.insert(oid, content);
889 }
890 }
891 state.oid_diffs.insert(oid, buffer_diff.downgrade());
892 });
893 })?;
894
895 Ok(buffer_diff)
896 }
897 .await;
898 result.map_err(Arc::new)
899 })
900 .shared();
901
902 self.loading_diffs
903 .insert((buffer_id, diff_kind), task.clone());
904 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
905 }
906
907 #[ztracing::instrument(skip_all)]
908 pub fn open_uncommitted_diff(
909 &mut self,
910 buffer: Entity<Buffer>,
911 cx: &mut Context<Self>,
912 ) -> Task<Result<Entity<BufferDiff>>> {
913 let buffer_id = buffer.read(cx).remote_id();
914
915 if let Some(diff_state) = self.diffs.get(&buffer_id)
916 && let Some(uncommitted_diff) = diff_state
917 .read(cx)
918 .uncommitted_diff
919 .as_ref()
920 .and_then(|weak| weak.upgrade())
921 {
922 if let Some(task) =
923 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
924 {
925 return cx.background_executor().spawn(async move {
926 task.await;
927 Ok(uncommitted_diff)
928 });
929 }
930 return Task::ready(Ok(uncommitted_diff));
931 }
932
933 let Some((repo, repo_path)) =
934 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
935 else {
936 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
937 };
938
939 let task = self
940 .loading_diffs
941 .entry((buffer_id, DiffKind::Uncommitted))
942 .or_insert_with(|| {
943 let changes = repo.update(cx, |repo, cx| {
944 repo.load_committed_text(buffer_id, repo_path, cx)
945 });
946
947 // todo(lw): hot foreground spawn
948 cx.spawn(async move |this, cx| {
949 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
950 .await
951 .map_err(Arc::new)
952 })
953 .shared()
954 })
955 .clone();
956
957 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
958 }
959
960 #[ztracing::instrument(skip_all)]
961 async fn open_diff_internal(
962 this: WeakEntity<Self>,
963 kind: DiffKind,
964 texts: Result<DiffBasesChange>,
965 buffer_entity: Entity<Buffer>,
966 cx: &mut AsyncApp,
967 ) -> Result<Entity<BufferDiff>> {
968 let diff_bases_change = match texts {
969 Err(e) => {
970 this.update(cx, |this, cx| {
971 let buffer = buffer_entity.read(cx);
972 let buffer_id = buffer.remote_id();
973 this.loading_diffs.remove(&(buffer_id, kind));
974 })?;
975 return Err(e);
976 }
977 Ok(change) => change,
978 };
979
980 this.update(cx, |this, cx| {
981 let buffer = buffer_entity.read(cx);
982 let buffer_id = buffer.remote_id();
983 let language = buffer.language().cloned();
984 let language_registry = buffer.language_registry();
985 let text_snapshot = buffer.text_snapshot();
986 this.loading_diffs.remove(&(buffer_id, kind));
987
988 let git_store = cx.weak_entity();
989 let diff_state = this
990 .diffs
991 .entry(buffer_id)
992 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
993
994 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
995
996 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
997 diff_state.update(cx, |diff_state, cx| {
998 diff_state.language_changed = true;
999 diff_state.language = language;
1000 diff_state.language_registry = language_registry;
1001
1002 match kind {
1003 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
1004 DiffKind::Uncommitted => {
1005 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1006 diff
1007 } else {
1008 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1009 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1010 unstaged_diff
1011 };
1012
1013 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1014 diff_state.uncommitted_diff = Some(diff.downgrade())
1015 }
1016 DiffKind::SinceOid(_) => {
1017 unreachable!("open_diff_internal is not used for OID diffs")
1018 }
1019 }
1020
1021 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
1022 let rx = diff_state.wait_for_recalculation();
1023
1024 anyhow::Ok(async move {
1025 if let Some(rx) = rx {
1026 rx.await;
1027 }
1028 Ok(diff)
1029 })
1030 })
1031 })??
1032 .await
1033 }
1034
1035 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1036 let diff_state = self.diffs.get(&buffer_id)?;
1037 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1038 }
1039
1040 pub fn get_uncommitted_diff(
1041 &self,
1042 buffer_id: BufferId,
1043 cx: &App,
1044 ) -> Option<Entity<BufferDiff>> {
1045 let diff_state = self.diffs.get(&buffer_id)?;
1046 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1047 }
1048
1049 pub fn get_diff_since_oid(
1050 &self,
1051 buffer_id: BufferId,
1052 oid: Option<git::Oid>,
1053 cx: &App,
1054 ) -> Option<Entity<BufferDiff>> {
1055 let diff_state = self.diffs.get(&buffer_id)?;
1056 diff_state.read(cx).oid_diff(oid)
1057 }
1058
1059 pub fn open_conflict_set(
1060 &mut self,
1061 buffer: Entity<Buffer>,
1062 cx: &mut Context<Self>,
1063 ) -> Entity<ConflictSet> {
1064 log::debug!("open conflict set");
1065 let buffer_id = buffer.read(cx).remote_id();
1066
1067 if let Some(git_state) = self.diffs.get(&buffer_id)
1068 && let Some(conflict_set) = git_state
1069 .read(cx)
1070 .conflict_set
1071 .as_ref()
1072 .and_then(|weak| weak.upgrade())
1073 {
1074 let conflict_set = conflict_set;
1075 let buffer_snapshot = buffer.read(cx).text_snapshot();
1076
1077 git_state.update(cx, |state, cx| {
1078 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1079 });
1080
1081 return conflict_set;
1082 }
1083
1084 let is_unmerged = self
1085 .repository_and_path_for_buffer_id(buffer_id, cx)
1086 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1087 let git_store = cx.weak_entity();
1088 let buffer_git_state = self
1089 .diffs
1090 .entry(buffer_id)
1091 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1092 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1093
1094 self._subscriptions
1095 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1096 cx.emit(GitStoreEvent::ConflictsUpdated);
1097 }));
1098
1099 buffer_git_state.update(cx, |state, cx| {
1100 state.conflict_set = Some(conflict_set.downgrade());
1101 let buffer_snapshot = buffer.read(cx).text_snapshot();
1102 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1103 });
1104
1105 conflict_set
1106 }
1107
1108 pub fn project_path_git_status(
1109 &self,
1110 project_path: &ProjectPath,
1111 cx: &App,
1112 ) -> Option<FileStatus> {
1113 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1114 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1115 }
1116
1117 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1118 let mut work_directory_abs_paths = Vec::new();
1119 let mut checkpoints = Vec::new();
1120 for repository in self.repositories.values() {
1121 repository.update(cx, |repository, _| {
1122 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1123 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1124 });
1125 }
1126
1127 cx.background_executor().spawn(async move {
1128 let checkpoints = future::try_join_all(checkpoints).await?;
1129 Ok(GitStoreCheckpoint {
1130 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1131 .into_iter()
1132 .zip(checkpoints)
1133 .collect(),
1134 })
1135 })
1136 }
1137
1138 pub fn restore_checkpoint(
1139 &self,
1140 checkpoint: GitStoreCheckpoint,
1141 cx: &mut App,
1142 ) -> Task<Result<()>> {
1143 let repositories_by_work_dir_abs_path = self
1144 .repositories
1145 .values()
1146 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1147 .collect::<HashMap<_, _>>();
1148
1149 let mut tasks = Vec::new();
1150 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1151 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1152 let restore = repository.update(cx, |repository, _| {
1153 repository.restore_checkpoint(checkpoint)
1154 });
1155 tasks.push(async move { restore.await? });
1156 }
1157 }
1158 cx.background_spawn(async move {
1159 future::try_join_all(tasks).await?;
1160 Ok(())
1161 })
1162 }
1163
1164 /// Compares two checkpoints, returning true if they are equal.
1165 pub fn compare_checkpoints(
1166 &self,
1167 left: GitStoreCheckpoint,
1168 mut right: GitStoreCheckpoint,
1169 cx: &mut App,
1170 ) -> Task<Result<bool>> {
1171 let repositories_by_work_dir_abs_path = self
1172 .repositories
1173 .values()
1174 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1175 .collect::<HashMap<_, _>>();
1176
1177 let mut tasks = Vec::new();
1178 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1179 if let Some(right_checkpoint) = right
1180 .checkpoints_by_work_dir_abs_path
1181 .remove(&work_dir_abs_path)
1182 {
1183 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1184 {
1185 let compare = repository.update(cx, |repository, _| {
1186 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1187 });
1188
1189 tasks.push(async move { compare.await? });
1190 }
1191 } else {
1192 return Task::ready(Ok(false));
1193 }
1194 }
1195 cx.background_spawn(async move {
1196 Ok(future::try_join_all(tasks)
1197 .await?
1198 .into_iter()
1199 .all(|result| result))
1200 })
1201 }
1202
1203 /// Blames a buffer.
1204 pub fn blame_buffer(
1205 &self,
1206 buffer: &Entity<Buffer>,
1207 version: Option<clock::Global>,
1208 cx: &mut Context<Self>,
1209 ) -> Task<Result<Option<Blame>>> {
1210 let buffer = buffer.read(cx);
1211 let Some((repo, repo_path)) =
1212 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1213 else {
1214 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1215 };
1216 let content = match &version {
1217 Some(version) => buffer.rope_for_version(version),
1218 None => buffer.as_rope().clone(),
1219 };
1220 let line_ending = buffer.line_ending();
1221 let version = version.unwrap_or(buffer.version());
1222 let buffer_id = buffer.remote_id();
1223
1224 let repo = repo.downgrade();
1225 cx.spawn(async move |_, cx| {
1226 let repository_state = repo
1227 .update(cx, |repo, _| repo.repository_state.clone())?
1228 .await
1229 .map_err(|err| anyhow::anyhow!(err))?;
1230 match repository_state {
1231 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1232 .blame(repo_path.clone(), content, line_ending)
1233 .await
1234 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1235 .map(Some),
1236 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1237 let response = client
1238 .request(proto::BlameBuffer {
1239 project_id: project_id.to_proto(),
1240 buffer_id: buffer_id.into(),
1241 version: serialize_version(&version),
1242 })
1243 .await?;
1244 Ok(deserialize_blame_buffer_response(response))
1245 }
1246 }
1247 })
1248 }
1249
1250 pub fn file_history(
1251 &self,
1252 repo: &Entity<Repository>,
1253 path: RepoPath,
1254 cx: &mut App,
1255 ) -> Task<Result<git::repository::FileHistory>> {
1256 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1257
1258 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1259 }
1260
1261 pub fn file_history_paginated(
1262 &self,
1263 repo: &Entity<Repository>,
1264 path: RepoPath,
1265 skip: usize,
1266 limit: Option<usize>,
1267 cx: &mut App,
1268 ) -> Task<Result<git::repository::FileHistory>> {
1269 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1270
1271 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1272 }
1273
1274 pub fn get_permalink_to_line(
1275 &self,
1276 buffer: &Entity<Buffer>,
1277 selection: Range<u32>,
1278 cx: &mut App,
1279 ) -> Task<Result<url::Url>> {
1280 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1281 return Task::ready(Err(anyhow!("buffer has no file")));
1282 };
1283
1284 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1285 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1286 cx,
1287 ) else {
1288 // If we're not in a Git repo, check whether this is a Rust source
1289 // file in the Cargo registry (presumably opened with go-to-definition
1290 // from a normal Rust file). If so, we can put together a permalink
1291 // using crate metadata.
1292 if buffer
1293 .read(cx)
1294 .language()
1295 .is_none_or(|lang| lang.name() != "Rust")
1296 {
1297 return Task::ready(Err(anyhow!("no permalink available")));
1298 }
1299 let file_path = file.worktree.read(cx).absolutize(&file.path);
1300 return cx.spawn(async move |cx| {
1301 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1302 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1303 .context("no permalink available")
1304 });
1305 };
1306
1307 let buffer_id = buffer.read(cx).remote_id();
1308 let branch = repo.read(cx).branch.clone();
1309 let remote = branch
1310 .as_ref()
1311 .and_then(|b| b.upstream.as_ref())
1312 .and_then(|b| b.remote_name())
1313 .unwrap_or("origin")
1314 .to_string();
1315
1316 let rx = repo.update(cx, |repo, _| {
1317 repo.send_job(None, move |state, cx| async move {
1318 match state {
1319 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1320 let origin_url = backend
1321 .remote_url(&remote)
1322 .await
1323 .with_context(|| format!("remote \"{remote}\" not found"))?;
1324
1325 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1326
1327 let provider_registry =
1328 cx.update(GitHostingProviderRegistry::default_global);
1329
1330 let (provider, remote) =
1331 parse_git_remote_url(provider_registry, &origin_url)
1332 .context("parsing Git remote URL")?;
1333
1334 Ok(provider.build_permalink(
1335 remote,
1336 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1337 ))
1338 }
1339 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1340 let response = client
1341 .request(proto::GetPermalinkToLine {
1342 project_id: project_id.to_proto(),
1343 buffer_id: buffer_id.into(),
1344 selection: Some(proto::Range {
1345 start: selection.start as u64,
1346 end: selection.end as u64,
1347 }),
1348 })
1349 .await?;
1350
1351 url::Url::parse(&response.permalink).context("failed to parse permalink")
1352 }
1353 }
1354 })
1355 });
1356 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1357 }
1358
1359 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1360 match &self.state {
1361 GitStoreState::Local {
1362 downstream: downstream_client,
1363 ..
1364 } => downstream_client
1365 .as_ref()
1366 .map(|state| (state.client.clone(), state.project_id)),
1367 GitStoreState::Remote {
1368 downstream: downstream_client,
1369 ..
1370 } => downstream_client.clone(),
1371 }
1372 }
1373
1374 fn upstream_client(&self) -> Option<AnyProtoClient> {
1375 match &self.state {
1376 GitStoreState::Local { .. } => None,
1377 GitStoreState::Remote {
1378 upstream_client, ..
1379 } => Some(upstream_client.clone()),
1380 }
1381 }
1382
1383 fn on_worktree_store_event(
1384 &mut self,
1385 worktree_store: Entity<WorktreeStore>,
1386 event: &WorktreeStoreEvent,
1387 cx: &mut Context<Self>,
1388 ) {
1389 let GitStoreState::Local {
1390 project_environment,
1391 downstream,
1392 next_repository_id,
1393 fs,
1394 } = &self.state
1395 else {
1396 return;
1397 };
1398
1399 match event {
1400 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1401 if let Some(worktree) = self
1402 .worktree_store
1403 .read(cx)
1404 .worktree_for_id(*worktree_id, cx)
1405 {
1406 let paths_by_git_repo =
1407 self.process_updated_entries(&worktree, updated_entries, cx);
1408 let downstream = downstream
1409 .as_ref()
1410 .map(|downstream| downstream.updates_tx.clone());
1411 cx.spawn(async move |_, cx| {
1412 let paths_by_git_repo = paths_by_git_repo.await;
1413 for (repo, paths) in paths_by_git_repo {
1414 repo.update(cx, |repo, cx| {
1415 repo.paths_changed(paths, downstream.clone(), cx);
1416 });
1417 }
1418 })
1419 .detach();
1420 }
1421 }
1422 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1423 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1424 else {
1425 return;
1426 };
1427 if !worktree.read(cx).is_visible() {
1428 log::debug!(
1429 "not adding repositories for local worktree {:?} because it's not visible",
1430 worktree.read(cx).abs_path()
1431 );
1432 return;
1433 }
1434 self.update_repositories_from_worktree(
1435 *worktree_id,
1436 project_environment.clone(),
1437 next_repository_id.clone(),
1438 downstream
1439 .as_ref()
1440 .map(|downstream| downstream.updates_tx.clone()),
1441 changed_repos.clone(),
1442 fs.clone(),
1443 cx,
1444 );
1445 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1446 }
1447 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1448 let repos_without_worktree: Vec<RepositoryId> = self
1449 .worktree_ids
1450 .iter_mut()
1451 .filter_map(|(repo_id, worktree_ids)| {
1452 worktree_ids.remove(worktree_id);
1453 if worktree_ids.is_empty() {
1454 Some(*repo_id)
1455 } else {
1456 None
1457 }
1458 })
1459 .collect();
1460 let is_active_repo_removed = repos_without_worktree
1461 .iter()
1462 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1463
1464 for repo_id in repos_without_worktree {
1465 self.repositories.remove(&repo_id);
1466 self.worktree_ids.remove(&repo_id);
1467 if let Some(updates_tx) =
1468 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1469 {
1470 updates_tx
1471 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1472 .ok();
1473 }
1474 }
1475
1476 if is_active_repo_removed {
1477 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1478 self.active_repo_id = Some(repo_id);
1479 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1480 } else {
1481 self.active_repo_id = None;
1482 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1483 }
1484 }
1485 }
1486 _ => {}
1487 }
1488 }
1489 fn on_repository_event(
1490 &mut self,
1491 repo: Entity<Repository>,
1492 event: &RepositoryEvent,
1493 cx: &mut Context<Self>,
1494 ) {
1495 let id = repo.read(cx).id;
1496 let repo_snapshot = repo.read(cx).snapshot.clone();
1497 for (buffer_id, diff) in self.diffs.iter() {
1498 if let Some((buffer_repo, repo_path)) =
1499 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1500 && buffer_repo == repo
1501 {
1502 diff.update(cx, |diff, cx| {
1503 if let Some(conflict_set) = &diff.conflict_set {
1504 let conflict_status_changed =
1505 conflict_set.update(cx, |conflict_set, cx| {
1506 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1507 conflict_set.set_has_conflict(has_conflict, cx)
1508 })?;
1509 if conflict_status_changed {
1510 let buffer_store = self.buffer_store.read(cx);
1511 if let Some(buffer) = buffer_store.get(*buffer_id) {
1512 let _ = diff
1513 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1514 }
1515 }
1516 }
1517 anyhow::Ok(())
1518 })
1519 .ok();
1520 }
1521 }
1522 cx.emit(GitStoreEvent::RepositoryUpdated(
1523 id,
1524 event.clone(),
1525 self.active_repo_id == Some(id),
1526 ))
1527 }
1528
1529 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1530 cx.emit(GitStoreEvent::JobsUpdated)
1531 }
1532
1533 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1534 fn update_repositories_from_worktree(
1535 &mut self,
1536 worktree_id: WorktreeId,
1537 project_environment: Entity<ProjectEnvironment>,
1538 next_repository_id: Arc<AtomicU64>,
1539 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1540 updated_git_repositories: UpdatedGitRepositoriesSet,
1541 fs: Arc<dyn Fs>,
1542 cx: &mut Context<Self>,
1543 ) {
1544 let mut removed_ids = Vec::new();
1545 for update in updated_git_repositories.iter() {
1546 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1547 let existing_work_directory_abs_path =
1548 repo.read(cx).work_directory_abs_path.clone();
1549 Some(&existing_work_directory_abs_path)
1550 == update.old_work_directory_abs_path.as_ref()
1551 || Some(&existing_work_directory_abs_path)
1552 == update.new_work_directory_abs_path.as_ref()
1553 }) {
1554 let repo_id = *id;
1555 if let Some(new_work_directory_abs_path) =
1556 update.new_work_directory_abs_path.clone()
1557 {
1558 self.worktree_ids
1559 .entry(repo_id)
1560 .or_insert_with(HashSet::new)
1561 .insert(worktree_id);
1562 existing.update(cx, |existing, cx| {
1563 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1564 existing.schedule_scan(updates_tx.clone(), cx);
1565 });
1566 } else {
1567 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1568 worktree_ids.remove(&worktree_id);
1569 if worktree_ids.is_empty() {
1570 removed_ids.push(repo_id);
1571 }
1572 }
1573 }
1574 } else if let UpdatedGitRepository {
1575 new_work_directory_abs_path: Some(work_directory_abs_path),
1576 dot_git_abs_path: Some(dot_git_abs_path),
1577 repository_dir_abs_path: Some(_repository_dir_abs_path),
1578 common_dir_abs_path: Some(common_dir_abs_path),
1579 ..
1580 } = update
1581 {
1582 let original_repo_abs_path: Arc<Path> =
1583 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1584 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1585 let is_trusted = TrustedWorktrees::try_get_global(cx)
1586 .map(|trusted_worktrees| {
1587 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1588 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1589 })
1590 })
1591 .unwrap_or(false);
1592 let git_store = cx.weak_entity();
1593 let repo = cx.new(|cx| {
1594 let mut repo = Repository::local(
1595 id,
1596 work_directory_abs_path.clone(),
1597 original_repo_abs_path.clone(),
1598 dot_git_abs_path.clone(),
1599 project_environment.downgrade(),
1600 fs.clone(),
1601 is_trusted,
1602 git_store,
1603 cx,
1604 );
1605 if let Some(updates_tx) = updates_tx.as_ref() {
1606 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1607 updates_tx
1608 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1609 .ok();
1610 }
1611 repo.schedule_scan(updates_tx.clone(), cx);
1612 repo
1613 });
1614 self._subscriptions
1615 .push(cx.subscribe(&repo, Self::on_repository_event));
1616 self._subscriptions
1617 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1618 self.repositories.insert(id, repo);
1619 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1620 cx.emit(GitStoreEvent::RepositoryAdded);
1621 self.active_repo_id.get_or_insert_with(|| {
1622 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1623 id
1624 });
1625 }
1626 }
1627
1628 for id in removed_ids {
1629 if self.active_repo_id == Some(id) {
1630 self.active_repo_id = None;
1631 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1632 }
1633 self.repositories.remove(&id);
1634 if let Some(updates_tx) = updates_tx.as_ref() {
1635 updates_tx
1636 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1637 .ok();
1638 }
1639 }
1640 }
1641
1642 fn on_trusted_worktrees_event(
1643 &mut self,
1644 _: Entity<TrustedWorktreesStore>,
1645 event: &TrustedWorktreesEvent,
1646 cx: &mut Context<Self>,
1647 ) {
1648 if !matches!(self.state, GitStoreState::Local { .. }) {
1649 return;
1650 }
1651
1652 let (is_trusted, event_paths) = match event {
1653 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1654 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1655 };
1656
1657 for (repo_id, worktree_ids) in &self.worktree_ids {
1658 if worktree_ids
1659 .iter()
1660 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1661 {
1662 if let Some(repo) = self.repositories.get(repo_id) {
1663 let repository_state = repo.read(cx).repository_state.clone();
1664 cx.background_spawn(async move {
1665 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1666 state.backend.set_trusted(is_trusted);
1667 }
1668 })
1669 .detach();
1670 }
1671 }
1672 }
1673 }
1674
1675 fn on_buffer_store_event(
1676 &mut self,
1677 _: Entity<BufferStore>,
1678 event: &BufferStoreEvent,
1679 cx: &mut Context<Self>,
1680 ) {
1681 match event {
1682 BufferStoreEvent::BufferAdded(buffer) => {
1683 cx.subscribe(buffer, |this, buffer, event, cx| {
1684 if let BufferEvent::LanguageChanged(_) = event {
1685 let buffer_id = buffer.read(cx).remote_id();
1686 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1687 diff_state.update(cx, |diff_state, cx| {
1688 diff_state.buffer_language_changed(buffer, cx);
1689 });
1690 }
1691 }
1692 })
1693 .detach();
1694 }
1695 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1696 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1697 diffs.remove(buffer_id);
1698 }
1699 }
1700 BufferStoreEvent::BufferDropped(buffer_id) => {
1701 self.diffs.remove(buffer_id);
1702 for diffs in self.shared_diffs.values_mut() {
1703 diffs.remove(buffer_id);
1704 }
1705 }
1706 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1707 // Whenever a buffer's file path changes, it's possible that the
1708 // new path is actually a path that is being tracked by a git
1709 // repository. In that case, we'll want to update the buffer's
1710 // `BufferDiffState`, in case it already has one.
1711 let buffer_id = buffer.read(cx).remote_id();
1712 let diff_state = self.diffs.get(&buffer_id);
1713 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1714
1715 if let Some(diff_state) = diff_state
1716 && let Some((repo, repo_path)) = repo
1717 {
1718 let buffer = buffer.clone();
1719 let diff_state = diff_state.clone();
1720
1721 cx.spawn(async move |_git_store, cx| {
1722 async {
1723 let diff_bases_change = repo
1724 .update(cx, |repo, cx| {
1725 repo.load_committed_text(buffer_id, repo_path, cx)
1726 })
1727 .await?;
1728
1729 diff_state.update(cx, |diff_state, cx| {
1730 let buffer_snapshot = buffer.read(cx).text_snapshot();
1731 diff_state.diff_bases_changed(
1732 buffer_snapshot,
1733 Some(diff_bases_change),
1734 cx,
1735 );
1736 });
1737 anyhow::Ok(())
1738 }
1739 .await
1740 .log_err();
1741 })
1742 .detach();
1743 }
1744 }
1745 }
1746 }
1747
1748 pub fn recalculate_buffer_diffs(
1749 &mut self,
1750 buffers: Vec<Entity<Buffer>>,
1751 cx: &mut Context<Self>,
1752 ) -> impl Future<Output = ()> + use<> {
1753 let mut futures = Vec::new();
1754 for buffer in buffers {
1755 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1756 let buffer = buffer.read(cx).text_snapshot();
1757 diff_state.update(cx, |diff_state, cx| {
1758 diff_state.recalculate_diffs(buffer.clone(), cx);
1759 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1760 });
1761 futures.push(diff_state.update(cx, |diff_state, cx| {
1762 diff_state
1763 .reparse_conflict_markers(buffer, cx)
1764 .map(|_| {})
1765 .boxed()
1766 }));
1767 }
1768 }
1769 async move {
1770 futures::future::join_all(futures).await;
1771 }
1772 }
1773
1774 fn on_buffer_diff_event(
1775 &mut self,
1776 diff: Entity<buffer_diff::BufferDiff>,
1777 event: &BufferDiffEvent,
1778 cx: &mut Context<Self>,
1779 ) {
1780 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1781 let buffer_id = diff.read(cx).buffer_id;
1782 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1783 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1784 diff_state.hunk_staging_operation_count += 1;
1785 diff_state.hunk_staging_operation_count
1786 });
1787 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1788 let recv = repo.update(cx, |repo, cx| {
1789 log::debug!("hunks changed for {}", path.as_unix_str());
1790 repo.spawn_set_index_text_job(
1791 path,
1792 new_index_text.as_ref().map(|rope| rope.to_string()),
1793 Some(hunk_staging_operation_count),
1794 cx,
1795 )
1796 });
1797 let diff = diff.downgrade();
1798 cx.spawn(async move |this, cx| {
1799 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1800 diff.update(cx, |diff, cx| {
1801 diff.clear_pending_hunks(cx);
1802 })
1803 .ok();
1804 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1805 .ok();
1806 }
1807 })
1808 .detach();
1809 }
1810 }
1811 }
1812 }
1813
1814 fn local_worktree_git_repos_changed(
1815 &mut self,
1816 worktree: Entity<Worktree>,
1817 changed_repos: &UpdatedGitRepositoriesSet,
1818 cx: &mut Context<Self>,
1819 ) {
1820 log::debug!("local worktree repos changed");
1821 debug_assert!(worktree.read(cx).is_local());
1822
1823 for repository in self.repositories.values() {
1824 repository.update(cx, |repository, cx| {
1825 let repo_abs_path = &repository.work_directory_abs_path;
1826 if changed_repos.iter().any(|update| {
1827 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1828 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1829 }) {
1830 repository.reload_buffer_diff_bases(cx);
1831 }
1832 });
1833 }
1834 }
1835
1836 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1837 &self.repositories
1838 }
1839
1840 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1841 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1842 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1843 Some(status.status)
1844 }
1845
1846 pub fn repository_and_path_for_buffer_id(
1847 &self,
1848 buffer_id: BufferId,
1849 cx: &App,
1850 ) -> Option<(Entity<Repository>, RepoPath)> {
1851 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1852 let project_path = buffer.read(cx).project_path(cx)?;
1853 self.repository_and_path_for_project_path(&project_path, cx)
1854 }
1855
1856 pub fn repository_and_path_for_project_path(
1857 &self,
1858 path: &ProjectPath,
1859 cx: &App,
1860 ) -> Option<(Entity<Repository>, RepoPath)> {
1861 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1862 self.repositories
1863 .values()
1864 .filter_map(|repo| {
1865 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1866 Some((repo.clone(), repo_path))
1867 })
1868 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1869 }
1870
1871 pub fn git_init(
1872 &self,
1873 path: Arc<Path>,
1874 fallback_branch_name: String,
1875 cx: &App,
1876 ) -> Task<Result<()>> {
1877 match &self.state {
1878 GitStoreState::Local { fs, .. } => {
1879 let fs = fs.clone();
1880 cx.background_executor()
1881 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1882 }
1883 GitStoreState::Remote {
1884 upstream_client,
1885 upstream_project_id: project_id,
1886 ..
1887 } => {
1888 let client = upstream_client.clone();
1889 let project_id = *project_id;
1890 cx.background_executor().spawn(async move {
1891 client
1892 .request(proto::GitInit {
1893 project_id: project_id,
1894 abs_path: path.to_string_lossy().into_owned(),
1895 fallback_branch_name,
1896 })
1897 .await?;
1898 Ok(())
1899 })
1900 }
1901 }
1902 }
1903
1904 pub fn git_clone(
1905 &self,
1906 repo: String,
1907 path: impl Into<Arc<std::path::Path>>,
1908 cx: &App,
1909 ) -> Task<Result<()>> {
1910 let path = path.into();
1911 match &self.state {
1912 GitStoreState::Local { fs, .. } => {
1913 let fs = fs.clone();
1914 cx.background_executor()
1915 .spawn(async move { fs.git_clone(&repo, &path).await })
1916 }
1917 GitStoreState::Remote {
1918 upstream_client,
1919 upstream_project_id,
1920 ..
1921 } => {
1922 if upstream_client.is_via_collab() {
1923 return Task::ready(Err(anyhow!(
1924 "Git Clone isn't supported for project guests"
1925 )));
1926 }
1927 let request = upstream_client.request(proto::GitClone {
1928 project_id: *upstream_project_id,
1929 abs_path: path.to_string_lossy().into_owned(),
1930 remote_repo: repo,
1931 });
1932
1933 cx.background_spawn(async move {
1934 let result = request.await?;
1935
1936 match result.success {
1937 true => Ok(()),
1938 false => Err(anyhow!("Git Clone failed")),
1939 }
1940 })
1941 }
1942 }
1943 }
1944
1945 async fn handle_update_repository(
1946 this: Entity<Self>,
1947 envelope: TypedEnvelope<proto::UpdateRepository>,
1948 mut cx: AsyncApp,
1949 ) -> Result<()> {
1950 this.update(&mut cx, |this, cx| {
1951 let path_style = this.worktree_store.read(cx).path_style();
1952 let mut update = envelope.payload;
1953
1954 let id = RepositoryId::from_proto(update.id);
1955 let client = this.upstream_client().context("no upstream client")?;
1956
1957 let original_repo_abs_path: Option<Arc<Path>> = update
1958 .original_repo_abs_path
1959 .as_deref()
1960 .map(|p| Path::new(p).into());
1961
1962 let mut repo_subscription = None;
1963 let repo = this.repositories.entry(id).or_insert_with(|| {
1964 let git_store = cx.weak_entity();
1965 let repo = cx.new(|cx| {
1966 Repository::remote(
1967 id,
1968 Path::new(&update.abs_path).into(),
1969 original_repo_abs_path.clone(),
1970 path_style,
1971 ProjectId(update.project_id),
1972 client,
1973 git_store,
1974 cx,
1975 )
1976 });
1977 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1978 cx.emit(GitStoreEvent::RepositoryAdded);
1979 repo
1980 });
1981 this._subscriptions.extend(repo_subscription);
1982
1983 repo.update(cx, {
1984 let update = update.clone();
1985 |repo, cx| repo.apply_remote_update(update, cx)
1986 })?;
1987
1988 this.active_repo_id.get_or_insert_with(|| {
1989 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1990 id
1991 });
1992
1993 if let Some((client, project_id)) = this.downstream_client() {
1994 update.project_id = project_id.to_proto();
1995 client.send(update).log_err();
1996 }
1997 Ok(())
1998 })
1999 }
2000
2001 async fn handle_remove_repository(
2002 this: Entity<Self>,
2003 envelope: TypedEnvelope<proto::RemoveRepository>,
2004 mut cx: AsyncApp,
2005 ) -> Result<()> {
2006 this.update(&mut cx, |this, cx| {
2007 let mut update = envelope.payload;
2008 let id = RepositoryId::from_proto(update.id);
2009 this.repositories.remove(&id);
2010 if let Some((client, project_id)) = this.downstream_client() {
2011 update.project_id = project_id.to_proto();
2012 client.send(update).log_err();
2013 }
2014 if this.active_repo_id == Some(id) {
2015 this.active_repo_id = None;
2016 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2017 }
2018 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2019 });
2020 Ok(())
2021 }
2022
2023 async fn handle_git_init(
2024 this: Entity<Self>,
2025 envelope: TypedEnvelope<proto::GitInit>,
2026 cx: AsyncApp,
2027 ) -> Result<proto::Ack> {
2028 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2029 let name = envelope.payload.fallback_branch_name;
2030 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2031 .await?;
2032
2033 Ok(proto::Ack {})
2034 }
2035
2036 async fn handle_git_clone(
2037 this: Entity<Self>,
2038 envelope: TypedEnvelope<proto::GitClone>,
2039 cx: AsyncApp,
2040 ) -> Result<proto::GitCloneResponse> {
2041 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2042 let repo_name = envelope.payload.remote_repo;
2043 let result = cx
2044 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2045 .await;
2046
2047 Ok(proto::GitCloneResponse {
2048 success: result.is_ok(),
2049 })
2050 }
2051
2052 async fn handle_fetch(
2053 this: Entity<Self>,
2054 envelope: TypedEnvelope<proto::Fetch>,
2055 mut cx: AsyncApp,
2056 ) -> Result<proto::RemoteMessageResponse> {
2057 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2058 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2059 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2060 let askpass_id = envelope.payload.askpass_id;
2061
2062 let askpass = make_remote_delegate(
2063 this,
2064 envelope.payload.project_id,
2065 repository_id,
2066 askpass_id,
2067 &mut cx,
2068 );
2069
2070 let remote_output = repository_handle
2071 .update(&mut cx, |repository_handle, cx| {
2072 repository_handle.fetch(fetch_options, askpass, cx)
2073 })
2074 .await??;
2075
2076 Ok(proto::RemoteMessageResponse {
2077 stdout: remote_output.stdout,
2078 stderr: remote_output.stderr,
2079 })
2080 }
2081
2082 async fn handle_push(
2083 this: Entity<Self>,
2084 envelope: TypedEnvelope<proto::Push>,
2085 mut cx: AsyncApp,
2086 ) -> Result<proto::RemoteMessageResponse> {
2087 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2088 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2089
2090 let askpass_id = envelope.payload.askpass_id;
2091 let askpass = make_remote_delegate(
2092 this,
2093 envelope.payload.project_id,
2094 repository_id,
2095 askpass_id,
2096 &mut cx,
2097 );
2098
2099 let options = envelope
2100 .payload
2101 .options
2102 .as_ref()
2103 .map(|_| match envelope.payload.options() {
2104 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2105 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2106 });
2107
2108 let branch_name = envelope.payload.branch_name.into();
2109 let remote_branch_name = envelope.payload.remote_branch_name.into();
2110 let remote_name = envelope.payload.remote_name.into();
2111
2112 let remote_output = repository_handle
2113 .update(&mut cx, |repository_handle, cx| {
2114 repository_handle.push(
2115 branch_name,
2116 remote_branch_name,
2117 remote_name,
2118 options,
2119 askpass,
2120 cx,
2121 )
2122 })
2123 .await??;
2124 Ok(proto::RemoteMessageResponse {
2125 stdout: remote_output.stdout,
2126 stderr: remote_output.stderr,
2127 })
2128 }
2129
2130 async fn handle_pull(
2131 this: Entity<Self>,
2132 envelope: TypedEnvelope<proto::Pull>,
2133 mut cx: AsyncApp,
2134 ) -> Result<proto::RemoteMessageResponse> {
2135 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2136 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2137 let askpass_id = envelope.payload.askpass_id;
2138 let askpass = make_remote_delegate(
2139 this,
2140 envelope.payload.project_id,
2141 repository_id,
2142 askpass_id,
2143 &mut cx,
2144 );
2145
2146 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2147 let remote_name = envelope.payload.remote_name.into();
2148 let rebase = envelope.payload.rebase;
2149
2150 let remote_message = repository_handle
2151 .update(&mut cx, |repository_handle, cx| {
2152 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2153 })
2154 .await??;
2155
2156 Ok(proto::RemoteMessageResponse {
2157 stdout: remote_message.stdout,
2158 stderr: remote_message.stderr,
2159 })
2160 }
2161
2162 async fn handle_stage(
2163 this: Entity<Self>,
2164 envelope: TypedEnvelope<proto::Stage>,
2165 mut cx: AsyncApp,
2166 ) -> Result<proto::Ack> {
2167 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2168 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2169
2170 let entries = envelope
2171 .payload
2172 .paths
2173 .into_iter()
2174 .map(|path| RepoPath::new(&path))
2175 .collect::<Result<Vec<_>>>()?;
2176
2177 repository_handle
2178 .update(&mut cx, |repository_handle, cx| {
2179 repository_handle.stage_entries(entries, cx)
2180 })
2181 .await?;
2182 Ok(proto::Ack {})
2183 }
2184
2185 async fn handle_unstage(
2186 this: Entity<Self>,
2187 envelope: TypedEnvelope<proto::Unstage>,
2188 mut cx: AsyncApp,
2189 ) -> Result<proto::Ack> {
2190 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2191 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2192
2193 let entries = envelope
2194 .payload
2195 .paths
2196 .into_iter()
2197 .map(|path| RepoPath::new(&path))
2198 .collect::<Result<Vec<_>>>()?;
2199
2200 repository_handle
2201 .update(&mut cx, |repository_handle, cx| {
2202 repository_handle.unstage_entries(entries, cx)
2203 })
2204 .await?;
2205
2206 Ok(proto::Ack {})
2207 }
2208
2209 async fn handle_stash(
2210 this: Entity<Self>,
2211 envelope: TypedEnvelope<proto::Stash>,
2212 mut cx: AsyncApp,
2213 ) -> Result<proto::Ack> {
2214 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2215 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2216
2217 let entries = envelope
2218 .payload
2219 .paths
2220 .into_iter()
2221 .map(|path| RepoPath::new(&path))
2222 .collect::<Result<Vec<_>>>()?;
2223
2224 repository_handle
2225 .update(&mut cx, |repository_handle, cx| {
2226 repository_handle.stash_entries(entries, cx)
2227 })
2228 .await?;
2229
2230 Ok(proto::Ack {})
2231 }
2232
2233 async fn handle_stash_pop(
2234 this: Entity<Self>,
2235 envelope: TypedEnvelope<proto::StashPop>,
2236 mut cx: AsyncApp,
2237 ) -> Result<proto::Ack> {
2238 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2239 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2240 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2241
2242 repository_handle
2243 .update(&mut cx, |repository_handle, cx| {
2244 repository_handle.stash_pop(stash_index, cx)
2245 })
2246 .await?;
2247
2248 Ok(proto::Ack {})
2249 }
2250
2251 async fn handle_stash_apply(
2252 this: Entity<Self>,
2253 envelope: TypedEnvelope<proto::StashApply>,
2254 mut cx: AsyncApp,
2255 ) -> Result<proto::Ack> {
2256 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2257 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2258 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2259
2260 repository_handle
2261 .update(&mut cx, |repository_handle, cx| {
2262 repository_handle.stash_apply(stash_index, cx)
2263 })
2264 .await?;
2265
2266 Ok(proto::Ack {})
2267 }
2268
2269 async fn handle_stash_drop(
2270 this: Entity<Self>,
2271 envelope: TypedEnvelope<proto::StashDrop>,
2272 mut cx: AsyncApp,
2273 ) -> Result<proto::Ack> {
2274 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2275 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2276 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2277
2278 repository_handle
2279 .update(&mut cx, |repository_handle, cx| {
2280 repository_handle.stash_drop(stash_index, cx)
2281 })
2282 .await??;
2283
2284 Ok(proto::Ack {})
2285 }
2286
2287 async fn handle_set_index_text(
2288 this: Entity<Self>,
2289 envelope: TypedEnvelope<proto::SetIndexText>,
2290 mut cx: AsyncApp,
2291 ) -> Result<proto::Ack> {
2292 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2293 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2294 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2295
2296 repository_handle
2297 .update(&mut cx, |repository_handle, cx| {
2298 repository_handle.spawn_set_index_text_job(
2299 repo_path,
2300 envelope.payload.text,
2301 None,
2302 cx,
2303 )
2304 })
2305 .await??;
2306 Ok(proto::Ack {})
2307 }
2308
2309 async fn handle_run_hook(
2310 this: Entity<Self>,
2311 envelope: TypedEnvelope<proto::RunGitHook>,
2312 mut cx: AsyncApp,
2313 ) -> Result<proto::Ack> {
2314 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2315 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2316 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2317 repository_handle
2318 .update(&mut cx, |repository_handle, cx| {
2319 repository_handle.run_hook(hook, cx)
2320 })
2321 .await??;
2322 Ok(proto::Ack {})
2323 }
2324
2325 async fn handle_commit(
2326 this: Entity<Self>,
2327 envelope: TypedEnvelope<proto::Commit>,
2328 mut cx: AsyncApp,
2329 ) -> Result<proto::Ack> {
2330 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2331 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2332 let askpass_id = envelope.payload.askpass_id;
2333
2334 let askpass = make_remote_delegate(
2335 this,
2336 envelope.payload.project_id,
2337 repository_id,
2338 askpass_id,
2339 &mut cx,
2340 );
2341
2342 let message = SharedString::from(envelope.payload.message);
2343 let name = envelope.payload.name.map(SharedString::from);
2344 let email = envelope.payload.email.map(SharedString::from);
2345 let options = envelope.payload.options.unwrap_or_default();
2346
2347 repository_handle
2348 .update(&mut cx, |repository_handle, cx| {
2349 repository_handle.commit(
2350 message,
2351 name.zip(email),
2352 CommitOptions {
2353 amend: options.amend,
2354 signoff: options.signoff,
2355 },
2356 askpass,
2357 cx,
2358 )
2359 })
2360 .await??;
2361 Ok(proto::Ack {})
2362 }
2363
2364 async fn handle_get_remotes(
2365 this: Entity<Self>,
2366 envelope: TypedEnvelope<proto::GetRemotes>,
2367 mut cx: AsyncApp,
2368 ) -> Result<proto::GetRemotesResponse> {
2369 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2370 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2371
2372 let branch_name = envelope.payload.branch_name;
2373 let is_push = envelope.payload.is_push;
2374
2375 let remotes = repository_handle
2376 .update(&mut cx, |repository_handle, _| {
2377 repository_handle.get_remotes(branch_name, is_push)
2378 })
2379 .await??;
2380
2381 Ok(proto::GetRemotesResponse {
2382 remotes: remotes
2383 .into_iter()
2384 .map(|remotes| proto::get_remotes_response::Remote {
2385 name: remotes.name.to_string(),
2386 })
2387 .collect::<Vec<_>>(),
2388 })
2389 }
2390
2391 async fn handle_get_worktrees(
2392 this: Entity<Self>,
2393 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2394 mut cx: AsyncApp,
2395 ) -> Result<proto::GitWorktreesResponse> {
2396 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2397 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2398
2399 let worktrees = repository_handle
2400 .update(&mut cx, |repository_handle, _| {
2401 repository_handle.worktrees()
2402 })
2403 .await??;
2404
2405 Ok(proto::GitWorktreesResponse {
2406 worktrees: worktrees
2407 .into_iter()
2408 .map(|worktree| worktree_to_proto(&worktree))
2409 .collect::<Vec<_>>(),
2410 })
2411 }
2412
2413 async fn handle_create_worktree(
2414 this: Entity<Self>,
2415 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2416 mut cx: AsyncApp,
2417 ) -> Result<proto::Ack> {
2418 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2419 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2420 let directory = PathBuf::from(envelope.payload.directory);
2421 let name = envelope.payload.name;
2422 let commit = envelope.payload.commit;
2423
2424 repository_handle
2425 .update(&mut cx, |repository_handle, _| {
2426 repository_handle.create_worktree(name, directory, commit)
2427 })
2428 .await??;
2429
2430 Ok(proto::Ack {})
2431 }
2432
2433 async fn handle_remove_worktree(
2434 this: Entity<Self>,
2435 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2436 mut cx: AsyncApp,
2437 ) -> Result<proto::Ack> {
2438 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2439 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2440 let path = PathBuf::from(envelope.payload.path);
2441 let force = envelope.payload.force;
2442
2443 repository_handle
2444 .update(&mut cx, |repository_handle, _| {
2445 repository_handle.remove_worktree(path, force)
2446 })
2447 .await??;
2448
2449 Ok(proto::Ack {})
2450 }
2451
2452 async fn handle_rename_worktree(
2453 this: Entity<Self>,
2454 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2455 mut cx: AsyncApp,
2456 ) -> Result<proto::Ack> {
2457 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2458 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2459 let old_path = PathBuf::from(envelope.payload.old_path);
2460 let new_path = PathBuf::from(envelope.payload.new_path);
2461
2462 repository_handle
2463 .update(&mut cx, |repository_handle, _| {
2464 repository_handle.rename_worktree(old_path, new_path)
2465 })
2466 .await??;
2467
2468 Ok(proto::Ack {})
2469 }
2470
2471 async fn handle_get_branches(
2472 this: Entity<Self>,
2473 envelope: TypedEnvelope<proto::GitGetBranches>,
2474 mut cx: AsyncApp,
2475 ) -> Result<proto::GitBranchesResponse> {
2476 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2477 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2478
2479 let branches = repository_handle
2480 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2481 .await??;
2482
2483 Ok(proto::GitBranchesResponse {
2484 branches: branches
2485 .into_iter()
2486 .map(|branch| branch_to_proto(&branch))
2487 .collect::<Vec<_>>(),
2488 })
2489 }
2490 async fn handle_get_default_branch(
2491 this: Entity<Self>,
2492 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2493 mut cx: AsyncApp,
2494 ) -> Result<proto::GetDefaultBranchResponse> {
2495 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2496 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2497
2498 let branch = repository_handle
2499 .update(&mut cx, |repository_handle, _| {
2500 repository_handle.default_branch(false)
2501 })
2502 .await??
2503 .map(Into::into);
2504
2505 Ok(proto::GetDefaultBranchResponse { branch })
2506 }
2507 async fn handle_create_branch(
2508 this: Entity<Self>,
2509 envelope: TypedEnvelope<proto::GitCreateBranch>,
2510 mut cx: AsyncApp,
2511 ) -> Result<proto::Ack> {
2512 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2513 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2514 let branch_name = envelope.payload.branch_name;
2515
2516 repository_handle
2517 .update(&mut cx, |repository_handle, _| {
2518 repository_handle.create_branch(branch_name, None)
2519 })
2520 .await??;
2521
2522 Ok(proto::Ack {})
2523 }
2524
2525 async fn handle_change_branch(
2526 this: Entity<Self>,
2527 envelope: TypedEnvelope<proto::GitChangeBranch>,
2528 mut cx: AsyncApp,
2529 ) -> Result<proto::Ack> {
2530 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2531 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2532 let branch_name = envelope.payload.branch_name;
2533
2534 repository_handle
2535 .update(&mut cx, |repository_handle, _| {
2536 repository_handle.change_branch(branch_name)
2537 })
2538 .await??;
2539
2540 Ok(proto::Ack {})
2541 }
2542
2543 async fn handle_rename_branch(
2544 this: Entity<Self>,
2545 envelope: TypedEnvelope<proto::GitRenameBranch>,
2546 mut cx: AsyncApp,
2547 ) -> Result<proto::Ack> {
2548 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2549 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2550 let branch = envelope.payload.branch;
2551 let new_name = envelope.payload.new_name;
2552
2553 repository_handle
2554 .update(&mut cx, |repository_handle, _| {
2555 repository_handle.rename_branch(branch, new_name)
2556 })
2557 .await??;
2558
2559 Ok(proto::Ack {})
2560 }
2561
2562 async fn handle_create_remote(
2563 this: Entity<Self>,
2564 envelope: TypedEnvelope<proto::GitCreateRemote>,
2565 mut cx: AsyncApp,
2566 ) -> Result<proto::Ack> {
2567 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2568 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2569 let remote_name = envelope.payload.remote_name;
2570 let remote_url = envelope.payload.remote_url;
2571
2572 repository_handle
2573 .update(&mut cx, |repository_handle, _| {
2574 repository_handle.create_remote(remote_name, remote_url)
2575 })
2576 .await??;
2577
2578 Ok(proto::Ack {})
2579 }
2580
2581 async fn handle_delete_branch(
2582 this: Entity<Self>,
2583 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2584 mut cx: AsyncApp,
2585 ) -> Result<proto::Ack> {
2586 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2587 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2588 let is_remote = envelope.payload.is_remote;
2589 let branch_name = envelope.payload.branch_name;
2590
2591 repository_handle
2592 .update(&mut cx, |repository_handle, _| {
2593 repository_handle.delete_branch(is_remote, branch_name)
2594 })
2595 .await??;
2596
2597 Ok(proto::Ack {})
2598 }
2599
2600 async fn handle_remove_remote(
2601 this: Entity<Self>,
2602 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2603 mut cx: AsyncApp,
2604 ) -> Result<proto::Ack> {
2605 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2606 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2607 let remote_name = envelope.payload.remote_name;
2608
2609 repository_handle
2610 .update(&mut cx, |repository_handle, _| {
2611 repository_handle.remove_remote(remote_name)
2612 })
2613 .await??;
2614
2615 Ok(proto::Ack {})
2616 }
2617
2618 async fn handle_show(
2619 this: Entity<Self>,
2620 envelope: TypedEnvelope<proto::GitShow>,
2621 mut cx: AsyncApp,
2622 ) -> Result<proto::GitCommitDetails> {
2623 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2624 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2625
2626 let commit = repository_handle
2627 .update(&mut cx, |repository_handle, _| {
2628 repository_handle.show(envelope.payload.commit)
2629 })
2630 .await??;
2631 Ok(proto::GitCommitDetails {
2632 sha: commit.sha.into(),
2633 message: commit.message.into(),
2634 commit_timestamp: commit.commit_timestamp,
2635 author_email: commit.author_email.into(),
2636 author_name: commit.author_name.into(),
2637 })
2638 }
2639
2640 async fn handle_load_commit_diff(
2641 this: Entity<Self>,
2642 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2643 mut cx: AsyncApp,
2644 ) -> Result<proto::LoadCommitDiffResponse> {
2645 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2646 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2647
2648 let commit_diff = repository_handle
2649 .update(&mut cx, |repository_handle, _| {
2650 repository_handle.load_commit_diff(envelope.payload.commit)
2651 })
2652 .await??;
2653 Ok(proto::LoadCommitDiffResponse {
2654 files: commit_diff
2655 .files
2656 .into_iter()
2657 .map(|file| proto::CommitFile {
2658 path: file.path.to_proto(),
2659 old_text: file.old_text,
2660 new_text: file.new_text,
2661 is_binary: file.is_binary,
2662 })
2663 .collect(),
2664 })
2665 }
2666
2667 async fn handle_file_history(
2668 this: Entity<Self>,
2669 envelope: TypedEnvelope<proto::GitFileHistory>,
2670 mut cx: AsyncApp,
2671 ) -> Result<proto::GitFileHistoryResponse> {
2672 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2673 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2674 let path = RepoPath::from_proto(&envelope.payload.path)?;
2675 let skip = envelope.payload.skip as usize;
2676 let limit = envelope.payload.limit.map(|l| l as usize);
2677
2678 let file_history = repository_handle
2679 .update(&mut cx, |repository_handle, _| {
2680 repository_handle.file_history_paginated(path, skip, limit)
2681 })
2682 .await??;
2683
2684 Ok(proto::GitFileHistoryResponse {
2685 entries: file_history
2686 .entries
2687 .into_iter()
2688 .map(|entry| proto::FileHistoryEntry {
2689 sha: entry.sha.to_string(),
2690 subject: entry.subject.to_string(),
2691 message: entry.message.to_string(),
2692 commit_timestamp: entry.commit_timestamp,
2693 author_name: entry.author_name.to_string(),
2694 author_email: entry.author_email.to_string(),
2695 })
2696 .collect(),
2697 path: file_history.path.to_proto(),
2698 })
2699 }
2700
2701 async fn handle_reset(
2702 this: Entity<Self>,
2703 envelope: TypedEnvelope<proto::GitReset>,
2704 mut cx: AsyncApp,
2705 ) -> Result<proto::Ack> {
2706 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2707 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2708
2709 let mode = match envelope.payload.mode() {
2710 git_reset::ResetMode::Soft => ResetMode::Soft,
2711 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2712 };
2713
2714 repository_handle
2715 .update(&mut cx, |repository_handle, cx| {
2716 repository_handle.reset(envelope.payload.commit, mode, cx)
2717 })
2718 .await??;
2719 Ok(proto::Ack {})
2720 }
2721
2722 async fn handle_checkout_files(
2723 this: Entity<Self>,
2724 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2725 mut cx: AsyncApp,
2726 ) -> Result<proto::Ack> {
2727 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2728 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2729 let paths = envelope
2730 .payload
2731 .paths
2732 .iter()
2733 .map(|s| RepoPath::from_proto(s))
2734 .collect::<Result<Vec<_>>>()?;
2735
2736 repository_handle
2737 .update(&mut cx, |repository_handle, cx| {
2738 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2739 })
2740 .await?;
2741 Ok(proto::Ack {})
2742 }
2743
2744 async fn handle_open_commit_message_buffer(
2745 this: Entity<Self>,
2746 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2747 mut cx: AsyncApp,
2748 ) -> Result<proto::OpenBufferResponse> {
2749 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2750 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2751 let buffer = repository
2752 .update(&mut cx, |repository, cx| {
2753 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2754 })
2755 .await?;
2756
2757 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2758 this.update(&mut cx, |this, cx| {
2759 this.buffer_store.update(cx, |buffer_store, cx| {
2760 buffer_store
2761 .create_buffer_for_peer(
2762 &buffer,
2763 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2764 cx,
2765 )
2766 .detach_and_log_err(cx);
2767 })
2768 });
2769
2770 Ok(proto::OpenBufferResponse {
2771 buffer_id: buffer_id.to_proto(),
2772 })
2773 }
2774
2775 async fn handle_askpass(
2776 this: Entity<Self>,
2777 envelope: TypedEnvelope<proto::AskPassRequest>,
2778 mut cx: AsyncApp,
2779 ) -> Result<proto::AskPassResponse> {
2780 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2781 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2782
2783 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2784 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2785 debug_panic!("no askpass found");
2786 anyhow::bail!("no askpass found");
2787 };
2788
2789 let response = askpass
2790 .ask_password(envelope.payload.prompt)
2791 .await
2792 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2793
2794 delegates
2795 .lock()
2796 .insert(envelope.payload.askpass_id, askpass);
2797
2798 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2799 Ok(proto::AskPassResponse {
2800 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2801 })
2802 }
2803
2804 async fn handle_check_for_pushed_commits(
2805 this: Entity<Self>,
2806 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2807 mut cx: AsyncApp,
2808 ) -> Result<proto::CheckForPushedCommitsResponse> {
2809 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2810 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2811
2812 let branches = repository_handle
2813 .update(&mut cx, |repository_handle, _| {
2814 repository_handle.check_for_pushed_commits()
2815 })
2816 .await??;
2817 Ok(proto::CheckForPushedCommitsResponse {
2818 pushed_to: branches
2819 .into_iter()
2820 .map(|commit| commit.to_string())
2821 .collect(),
2822 })
2823 }
2824
2825 async fn handle_git_diff(
2826 this: Entity<Self>,
2827 envelope: TypedEnvelope<proto::GitDiff>,
2828 mut cx: AsyncApp,
2829 ) -> Result<proto::GitDiffResponse> {
2830 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2831 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2832 let diff_type = match envelope.payload.diff_type() {
2833 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2834 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2835 proto::git_diff::DiffType::MergeBase => {
2836 let base_ref = envelope
2837 .payload
2838 .merge_base_ref
2839 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2840 DiffType::MergeBase {
2841 base_ref: base_ref.into(),
2842 }
2843 }
2844 };
2845
2846 let mut diff = repository_handle
2847 .update(&mut cx, |repository_handle, cx| {
2848 repository_handle.diff(diff_type, cx)
2849 })
2850 .await??;
2851 const ONE_MB: usize = 1_000_000;
2852 if diff.len() > ONE_MB {
2853 diff = diff.chars().take(ONE_MB).collect()
2854 }
2855
2856 Ok(proto::GitDiffResponse { diff })
2857 }
2858
2859 async fn handle_tree_diff(
2860 this: Entity<Self>,
2861 request: TypedEnvelope<proto::GetTreeDiff>,
2862 mut cx: AsyncApp,
2863 ) -> Result<proto::GetTreeDiffResponse> {
2864 let repository_id = RepositoryId(request.payload.repository_id);
2865 let diff_type = if request.payload.is_merge {
2866 DiffTreeType::MergeBase {
2867 base: request.payload.base.into(),
2868 head: request.payload.head.into(),
2869 }
2870 } else {
2871 DiffTreeType::Since {
2872 base: request.payload.base.into(),
2873 head: request.payload.head.into(),
2874 }
2875 };
2876
2877 let diff = this
2878 .update(&mut cx, |this, cx| {
2879 let repository = this.repositories().get(&repository_id)?;
2880 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2881 })
2882 .context("missing repository")?
2883 .await??;
2884
2885 Ok(proto::GetTreeDiffResponse {
2886 entries: diff
2887 .entries
2888 .into_iter()
2889 .map(|(path, status)| proto::TreeDiffStatus {
2890 path: path.as_ref().to_proto(),
2891 status: match status {
2892 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2893 TreeDiffStatus::Modified { .. } => {
2894 proto::tree_diff_status::Status::Modified.into()
2895 }
2896 TreeDiffStatus::Deleted { .. } => {
2897 proto::tree_diff_status::Status::Deleted.into()
2898 }
2899 },
2900 oid: match status {
2901 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2902 Some(old.to_string())
2903 }
2904 TreeDiffStatus::Added => None,
2905 },
2906 })
2907 .collect(),
2908 })
2909 }
2910
2911 async fn handle_get_blob_content(
2912 this: Entity<Self>,
2913 request: TypedEnvelope<proto::GetBlobContent>,
2914 mut cx: AsyncApp,
2915 ) -> Result<proto::GetBlobContentResponse> {
2916 let oid = git::Oid::from_str(&request.payload.oid)?;
2917 let repository_id = RepositoryId(request.payload.repository_id);
2918 let content = this
2919 .update(&mut cx, |this, cx| {
2920 let repository = this.repositories().get(&repository_id)?;
2921 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2922 })
2923 .context("missing repository")?
2924 .await?;
2925 Ok(proto::GetBlobContentResponse { content })
2926 }
2927
2928 async fn handle_open_unstaged_diff(
2929 this: Entity<Self>,
2930 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2931 mut cx: AsyncApp,
2932 ) -> Result<proto::OpenUnstagedDiffResponse> {
2933 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2934 let diff = this
2935 .update(&mut cx, |this, cx| {
2936 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2937 Some(this.open_unstaged_diff(buffer, cx))
2938 })
2939 .context("missing buffer")?
2940 .await?;
2941 this.update(&mut cx, |this, _| {
2942 let shared_diffs = this
2943 .shared_diffs
2944 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2945 .or_default();
2946 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2947 });
2948 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2949 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2950 }
2951
2952 async fn handle_open_uncommitted_diff(
2953 this: Entity<Self>,
2954 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2955 mut cx: AsyncApp,
2956 ) -> Result<proto::OpenUncommittedDiffResponse> {
2957 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2958 let diff = this
2959 .update(&mut cx, |this, cx| {
2960 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2961 Some(this.open_uncommitted_diff(buffer, cx))
2962 })
2963 .context("missing buffer")?
2964 .await?;
2965 this.update(&mut cx, |this, _| {
2966 let shared_diffs = this
2967 .shared_diffs
2968 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2969 .or_default();
2970 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2971 });
2972 Ok(diff.read_with(&cx, |diff, cx| {
2973 use proto::open_uncommitted_diff_response::Mode;
2974
2975 let unstaged_diff = diff.secondary_diff();
2976 let index_snapshot = unstaged_diff.and_then(|diff| {
2977 let diff = diff.read(cx);
2978 diff.base_text_exists().then(|| diff.base_text(cx))
2979 });
2980
2981 let mode;
2982 let staged_text;
2983 let committed_text;
2984 if diff.base_text_exists() {
2985 let committed_snapshot = diff.base_text(cx);
2986 committed_text = Some(committed_snapshot.text());
2987 if let Some(index_text) = index_snapshot {
2988 if index_text.remote_id() == committed_snapshot.remote_id() {
2989 mode = Mode::IndexMatchesHead;
2990 staged_text = None;
2991 } else {
2992 mode = Mode::IndexAndHead;
2993 staged_text = Some(index_text.text());
2994 }
2995 } else {
2996 mode = Mode::IndexAndHead;
2997 staged_text = None;
2998 }
2999 } else {
3000 mode = Mode::IndexAndHead;
3001 committed_text = None;
3002 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3003 }
3004
3005 proto::OpenUncommittedDiffResponse {
3006 committed_text,
3007 staged_text,
3008 mode: mode.into(),
3009 }
3010 }))
3011 }
3012
3013 async fn handle_update_diff_bases(
3014 this: Entity<Self>,
3015 request: TypedEnvelope<proto::UpdateDiffBases>,
3016 mut cx: AsyncApp,
3017 ) -> Result<()> {
3018 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3019 this.update(&mut cx, |this, cx| {
3020 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3021 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3022 {
3023 let buffer = buffer.read(cx).text_snapshot();
3024 diff_state.update(cx, |diff_state, cx| {
3025 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3026 })
3027 }
3028 });
3029 Ok(())
3030 }
3031
3032 async fn handle_blame_buffer(
3033 this: Entity<Self>,
3034 envelope: TypedEnvelope<proto::BlameBuffer>,
3035 mut cx: AsyncApp,
3036 ) -> Result<proto::BlameBufferResponse> {
3037 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3038 let version = deserialize_version(&envelope.payload.version);
3039 let buffer = this.read_with(&cx, |this, cx| {
3040 this.buffer_store.read(cx).get_existing(buffer_id)
3041 })?;
3042 buffer
3043 .update(&mut cx, |buffer, _| {
3044 buffer.wait_for_version(version.clone())
3045 })
3046 .await?;
3047 let blame = this
3048 .update(&mut cx, |this, cx| {
3049 this.blame_buffer(&buffer, Some(version), cx)
3050 })
3051 .await?;
3052 Ok(serialize_blame_buffer_response(blame))
3053 }
3054
3055 async fn handle_get_permalink_to_line(
3056 this: Entity<Self>,
3057 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3058 mut cx: AsyncApp,
3059 ) -> Result<proto::GetPermalinkToLineResponse> {
3060 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3061 // let version = deserialize_version(&envelope.payload.version);
3062 let selection = {
3063 let proto_selection = envelope
3064 .payload
3065 .selection
3066 .context("no selection to get permalink for defined")?;
3067 proto_selection.start as u32..proto_selection.end as u32
3068 };
3069 let buffer = this.read_with(&cx, |this, cx| {
3070 this.buffer_store.read(cx).get_existing(buffer_id)
3071 })?;
3072 let permalink = this
3073 .update(&mut cx, |this, cx| {
3074 this.get_permalink_to_line(&buffer, selection, cx)
3075 })
3076 .await?;
3077 Ok(proto::GetPermalinkToLineResponse {
3078 permalink: permalink.to_string(),
3079 })
3080 }
3081
3082 fn repository_for_request(
3083 this: &Entity<Self>,
3084 id: RepositoryId,
3085 cx: &mut AsyncApp,
3086 ) -> Result<Entity<Repository>> {
3087 this.read_with(cx, |this, _| {
3088 this.repositories
3089 .get(&id)
3090 .context("missing repository handle")
3091 .cloned()
3092 })
3093 }
3094
3095 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3096 self.repositories
3097 .iter()
3098 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3099 .collect()
3100 }
3101
3102 fn process_updated_entries(
3103 &self,
3104 worktree: &Entity<Worktree>,
3105 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3106 cx: &mut App,
3107 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3108 let path_style = worktree.read(cx).path_style();
3109 let mut repo_paths = self
3110 .repositories
3111 .values()
3112 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3113 .collect::<Vec<_>>();
3114 let mut entries: Vec<_> = updated_entries
3115 .iter()
3116 .map(|(path, _, _)| path.clone())
3117 .collect();
3118 entries.sort();
3119 let worktree = worktree.read(cx);
3120
3121 let entries = entries
3122 .into_iter()
3123 .map(|path| worktree.absolutize(&path))
3124 .collect::<Arc<[_]>>();
3125
3126 let executor = cx.background_executor().clone();
3127 cx.background_executor().spawn(async move {
3128 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3129 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3130 let mut tasks = FuturesOrdered::new();
3131 for (repo_path, repo) in repo_paths.into_iter().rev() {
3132 let entries = entries.clone();
3133 let task = executor.spawn(async move {
3134 // Find all repository paths that belong to this repo
3135 let mut ix = entries.partition_point(|path| path < &*repo_path);
3136 if ix == entries.len() {
3137 return None;
3138 };
3139
3140 let mut paths = Vec::new();
3141 // All paths prefixed by a given repo will constitute a continuous range.
3142 while let Some(path) = entries.get(ix)
3143 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3144 &repo_path, path, path_style,
3145 )
3146 {
3147 paths.push((repo_path, ix));
3148 ix += 1;
3149 }
3150 if paths.is_empty() {
3151 None
3152 } else {
3153 Some((repo, paths))
3154 }
3155 });
3156 tasks.push_back(task);
3157 }
3158
3159 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3160 let mut path_was_used = vec![false; entries.len()];
3161 let tasks = tasks.collect::<Vec<_>>().await;
3162 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3163 // We always want to assign a path to it's innermost repository.
3164 for t in tasks {
3165 let Some((repo, paths)) = t else {
3166 continue;
3167 };
3168 let entry = paths_by_git_repo.entry(repo).or_default();
3169 for (repo_path, ix) in paths {
3170 if path_was_used[ix] {
3171 continue;
3172 }
3173 path_was_used[ix] = true;
3174 entry.push(repo_path);
3175 }
3176 }
3177
3178 paths_by_git_repo
3179 })
3180 }
3181}
3182
3183impl BufferGitState {
3184 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3185 Self {
3186 unstaged_diff: Default::default(),
3187 uncommitted_diff: Default::default(),
3188 oid_diffs: Default::default(),
3189 recalculate_diff_task: Default::default(),
3190 language: Default::default(),
3191 language_registry: Default::default(),
3192 recalculating_tx: postage::watch::channel_with(false).0,
3193 hunk_staging_operation_count: 0,
3194 hunk_staging_operation_count_as_of_write: 0,
3195 head_text: Default::default(),
3196 index_text: Default::default(),
3197 oid_texts: Default::default(),
3198 head_changed: Default::default(),
3199 index_changed: Default::default(),
3200 language_changed: Default::default(),
3201 conflict_updated_futures: Default::default(),
3202 conflict_set: Default::default(),
3203 reparse_conflict_markers_task: Default::default(),
3204 }
3205 }
3206
3207 #[ztracing::instrument(skip_all)]
3208 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3209 self.language = buffer.read(cx).language().cloned();
3210 self.language_changed = true;
3211 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3212 }
3213
3214 fn reparse_conflict_markers(
3215 &mut self,
3216 buffer: text::BufferSnapshot,
3217 cx: &mut Context<Self>,
3218 ) -> oneshot::Receiver<()> {
3219 let (tx, rx) = oneshot::channel();
3220
3221 let Some(conflict_set) = self
3222 .conflict_set
3223 .as_ref()
3224 .and_then(|conflict_set| conflict_set.upgrade())
3225 else {
3226 return rx;
3227 };
3228
3229 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3230 if conflict_set.has_conflict {
3231 Some(conflict_set.snapshot())
3232 } else {
3233 None
3234 }
3235 });
3236
3237 if let Some(old_snapshot) = old_snapshot {
3238 self.conflict_updated_futures.push(tx);
3239 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3240 let (snapshot, changed_range) = cx
3241 .background_spawn(async move {
3242 let new_snapshot = ConflictSet::parse(&buffer);
3243 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3244 (new_snapshot, changed_range)
3245 })
3246 .await;
3247 this.update(cx, |this, cx| {
3248 if let Some(conflict_set) = &this.conflict_set {
3249 conflict_set
3250 .update(cx, |conflict_set, cx| {
3251 conflict_set.set_snapshot(snapshot, changed_range, cx);
3252 })
3253 .ok();
3254 }
3255 let futures = std::mem::take(&mut this.conflict_updated_futures);
3256 for tx in futures {
3257 tx.send(()).ok();
3258 }
3259 })
3260 }))
3261 }
3262
3263 rx
3264 }
3265
3266 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3267 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3268 }
3269
3270 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3271 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3272 }
3273
3274 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3275 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3276 }
3277
3278 fn handle_base_texts_updated(
3279 &mut self,
3280 buffer: text::BufferSnapshot,
3281 message: proto::UpdateDiffBases,
3282 cx: &mut Context<Self>,
3283 ) {
3284 use proto::update_diff_bases::Mode;
3285
3286 let Some(mode) = Mode::from_i32(message.mode) else {
3287 return;
3288 };
3289
3290 let diff_bases_change = match mode {
3291 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3292 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3293 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3294 Mode::IndexAndHead => DiffBasesChange::SetEach {
3295 index: message.staged_text,
3296 head: message.committed_text,
3297 },
3298 };
3299
3300 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3301 }
3302
3303 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3304 if *self.recalculating_tx.borrow() {
3305 let mut rx = self.recalculating_tx.subscribe();
3306 Some(async move {
3307 loop {
3308 let is_recalculating = rx.recv().await;
3309 if is_recalculating != Some(true) {
3310 break;
3311 }
3312 }
3313 })
3314 } else {
3315 None
3316 }
3317 }
3318
3319 fn diff_bases_changed(
3320 &mut self,
3321 buffer: text::BufferSnapshot,
3322 diff_bases_change: Option<DiffBasesChange>,
3323 cx: &mut Context<Self>,
3324 ) {
3325 match diff_bases_change {
3326 Some(DiffBasesChange::SetIndex(index)) => {
3327 self.index_text = index.map(|mut index| {
3328 text::LineEnding::normalize(&mut index);
3329 Arc::from(index.as_str())
3330 });
3331 self.index_changed = true;
3332 }
3333 Some(DiffBasesChange::SetHead(head)) => {
3334 self.head_text = head.map(|mut head| {
3335 text::LineEnding::normalize(&mut head);
3336 Arc::from(head.as_str())
3337 });
3338 self.head_changed = true;
3339 }
3340 Some(DiffBasesChange::SetBoth(text)) => {
3341 let text = text.map(|mut text| {
3342 text::LineEnding::normalize(&mut text);
3343 Arc::from(text.as_str())
3344 });
3345 self.head_text = text.clone();
3346 self.index_text = text;
3347 self.head_changed = true;
3348 self.index_changed = true;
3349 }
3350 Some(DiffBasesChange::SetEach { index, head }) => {
3351 self.index_text = index.map(|mut index| {
3352 text::LineEnding::normalize(&mut index);
3353 Arc::from(index.as_str())
3354 });
3355 self.index_changed = true;
3356 self.head_text = head.map(|mut head| {
3357 text::LineEnding::normalize(&mut head);
3358 Arc::from(head.as_str())
3359 });
3360 self.head_changed = true;
3361 }
3362 None => {}
3363 }
3364
3365 self.recalculate_diffs(buffer, cx)
3366 }
3367
3368 #[ztracing::instrument(skip_all)]
3369 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3370 *self.recalculating_tx.borrow_mut() = true;
3371
3372 let language = self.language.clone();
3373 let language_registry = self.language_registry.clone();
3374 let unstaged_diff = self.unstaged_diff();
3375 let uncommitted_diff = self.uncommitted_diff();
3376 let head = self.head_text.clone();
3377 let index = self.index_text.clone();
3378 let index_changed = self.index_changed;
3379 let head_changed = self.head_changed;
3380 let language_changed = self.language_changed;
3381 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3382 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3383 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3384 (None, None) => true,
3385 _ => false,
3386 };
3387
3388 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3389 .oid_diffs
3390 .iter()
3391 .filter_map(|(oid, weak)| {
3392 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3393 weak.upgrade().map(|diff| (*oid, diff, base_text))
3394 })
3395 .collect();
3396
3397 self.oid_diffs.retain(|oid, weak| {
3398 let alive = weak.upgrade().is_some();
3399 if !alive {
3400 if let Some(oid) = oid {
3401 self.oid_texts.remove(oid);
3402 }
3403 }
3404 alive
3405 });
3406 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3407 log::debug!(
3408 "start recalculating diffs for buffer {}",
3409 buffer.remote_id()
3410 );
3411
3412 let mut new_unstaged_diff = None;
3413 if let Some(unstaged_diff) = &unstaged_diff {
3414 new_unstaged_diff = Some(
3415 cx.update(|cx| {
3416 unstaged_diff.read(cx).update_diff(
3417 buffer.clone(),
3418 index,
3419 index_changed.then_some(false),
3420 language.clone(),
3421 cx,
3422 )
3423 })
3424 .await,
3425 );
3426 }
3427
3428 // Dropping BufferDiff can be expensive, so yield back to the event loop
3429 // for a bit
3430 yield_now().await;
3431
3432 let mut new_uncommitted_diff = None;
3433 if let Some(uncommitted_diff) = &uncommitted_diff {
3434 new_uncommitted_diff = if index_matches_head {
3435 new_unstaged_diff.clone()
3436 } else {
3437 Some(
3438 cx.update(|cx| {
3439 uncommitted_diff.read(cx).update_diff(
3440 buffer.clone(),
3441 head,
3442 head_changed.then_some(true),
3443 language.clone(),
3444 cx,
3445 )
3446 })
3447 .await,
3448 )
3449 }
3450 }
3451
3452 // Dropping BufferDiff can be expensive, so yield back to the event loop
3453 // for a bit
3454 yield_now().await;
3455
3456 let cancel = this.update(cx, |this, _| {
3457 // This checks whether all pending stage/unstage operations
3458 // have quiesced (i.e. both the corresponding write and the
3459 // read of that write have completed). If not, then we cancel
3460 // this recalculation attempt to avoid invalidating pending
3461 // state too quickly; another recalculation will come along
3462 // later and clear the pending state once the state of the index has settled.
3463 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3464 *this.recalculating_tx.borrow_mut() = false;
3465 true
3466 } else {
3467 false
3468 }
3469 })?;
3470 if cancel {
3471 log::debug!(
3472 concat!(
3473 "aborting recalculating diffs for buffer {}",
3474 "due to subsequent hunk operations",
3475 ),
3476 buffer.remote_id()
3477 );
3478 return Ok(());
3479 }
3480
3481 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3482 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3483 {
3484 let task = unstaged_diff.update(cx, |diff, cx| {
3485 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3486 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3487 // view multibuffers.
3488 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3489 });
3490 Some(task.await)
3491 } else {
3492 None
3493 };
3494
3495 yield_now().await;
3496
3497 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3498 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3499 {
3500 uncommitted_diff
3501 .update(cx, |diff, cx| {
3502 if language_changed {
3503 diff.language_changed(language.clone(), language_registry, cx);
3504 }
3505 diff.set_snapshot_with_secondary(
3506 new_uncommitted_diff,
3507 &buffer,
3508 unstaged_changed_range.flatten(),
3509 true,
3510 cx,
3511 )
3512 })
3513 .await;
3514 }
3515
3516 yield_now().await;
3517
3518 for (oid, oid_diff, base_text) in oid_diffs {
3519 let new_oid_diff = cx
3520 .update(|cx| {
3521 oid_diff.read(cx).update_diff(
3522 buffer.clone(),
3523 base_text,
3524 None,
3525 language.clone(),
3526 cx,
3527 )
3528 })
3529 .await;
3530
3531 oid_diff
3532 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3533 .await;
3534
3535 log::debug!(
3536 "finished recalculating oid diff for buffer {} oid {:?}",
3537 buffer.remote_id(),
3538 oid
3539 );
3540
3541 yield_now().await;
3542 }
3543
3544 log::debug!(
3545 "finished recalculating diffs for buffer {}",
3546 buffer.remote_id()
3547 );
3548
3549 if let Some(this) = this.upgrade() {
3550 this.update(cx, |this, _| {
3551 this.index_changed = false;
3552 this.head_changed = false;
3553 this.language_changed = false;
3554 *this.recalculating_tx.borrow_mut() = false;
3555 });
3556 }
3557
3558 Ok(())
3559 }));
3560 }
3561}
3562
3563fn make_remote_delegate(
3564 this: Entity<GitStore>,
3565 project_id: u64,
3566 repository_id: RepositoryId,
3567 askpass_id: u64,
3568 cx: &mut AsyncApp,
3569) -> AskPassDelegate {
3570 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3571 this.update(cx, |this, cx| {
3572 let Some((client, _)) = this.downstream_client() else {
3573 return;
3574 };
3575 let response = client.request(proto::AskPassRequest {
3576 project_id,
3577 repository_id: repository_id.to_proto(),
3578 askpass_id,
3579 prompt,
3580 });
3581 cx.spawn(async move |_, _| {
3582 let mut response = response.await?.response;
3583 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3584 .ok();
3585 response.zeroize();
3586 anyhow::Ok(())
3587 })
3588 .detach_and_log_err(cx);
3589 });
3590 })
3591}
3592
3593impl RepositoryId {
3594 pub fn to_proto(self) -> u64 {
3595 self.0
3596 }
3597
3598 pub fn from_proto(id: u64) -> Self {
3599 RepositoryId(id)
3600 }
3601}
3602
3603impl RepositorySnapshot {
3604 fn empty(
3605 id: RepositoryId,
3606 work_directory_abs_path: Arc<Path>,
3607 original_repo_abs_path: Option<Arc<Path>>,
3608 path_style: PathStyle,
3609 ) -> Self {
3610 Self {
3611 id,
3612 statuses_by_path: Default::default(),
3613 original_repo_abs_path: original_repo_abs_path
3614 .unwrap_or_else(|| work_directory_abs_path.clone()),
3615 work_directory_abs_path,
3616 branch: None,
3617 head_commit: None,
3618 scan_id: 0,
3619 merge: Default::default(),
3620 remote_origin_url: None,
3621 remote_upstream_url: None,
3622 stash_entries: Default::default(),
3623 linked_worktrees: Arc::from([]),
3624 path_style,
3625 }
3626 }
3627
3628 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3629 proto::UpdateRepository {
3630 branch_summary: self.branch.as_ref().map(branch_to_proto),
3631 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3632 updated_statuses: self
3633 .statuses_by_path
3634 .iter()
3635 .map(|entry| entry.to_proto())
3636 .collect(),
3637 removed_statuses: Default::default(),
3638 current_merge_conflicts: self
3639 .merge
3640 .merge_heads_by_conflicted_path
3641 .iter()
3642 .map(|(repo_path, _)| repo_path.to_proto())
3643 .collect(),
3644 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3645 project_id,
3646 id: self.id.to_proto(),
3647 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3648 entry_ids: vec![self.id.to_proto()],
3649 scan_id: self.scan_id,
3650 is_last_update: true,
3651 stash_entries: self
3652 .stash_entries
3653 .entries
3654 .iter()
3655 .map(stash_to_proto)
3656 .collect(),
3657 remote_upstream_url: self.remote_upstream_url.clone(),
3658 remote_origin_url: self.remote_origin_url.clone(),
3659 original_repo_abs_path: Some(
3660 self.original_repo_abs_path.to_string_lossy().into_owned(),
3661 ),
3662 linked_worktrees: self
3663 .linked_worktrees
3664 .iter()
3665 .map(worktree_to_proto)
3666 .collect(),
3667 }
3668 }
3669
3670 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3671 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3672 let mut removed_statuses: Vec<String> = Vec::new();
3673
3674 let mut new_statuses = self.statuses_by_path.iter().peekable();
3675 let mut old_statuses = old.statuses_by_path.iter().peekable();
3676
3677 let mut current_new_entry = new_statuses.next();
3678 let mut current_old_entry = old_statuses.next();
3679 loop {
3680 match (current_new_entry, current_old_entry) {
3681 (Some(new_entry), Some(old_entry)) => {
3682 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3683 Ordering::Less => {
3684 updated_statuses.push(new_entry.to_proto());
3685 current_new_entry = new_statuses.next();
3686 }
3687 Ordering::Equal => {
3688 if new_entry.status != old_entry.status
3689 || new_entry.diff_stat != old_entry.diff_stat
3690 {
3691 updated_statuses.push(new_entry.to_proto());
3692 }
3693 current_old_entry = old_statuses.next();
3694 current_new_entry = new_statuses.next();
3695 }
3696 Ordering::Greater => {
3697 removed_statuses.push(old_entry.repo_path.to_proto());
3698 current_old_entry = old_statuses.next();
3699 }
3700 }
3701 }
3702 (None, Some(old_entry)) => {
3703 removed_statuses.push(old_entry.repo_path.to_proto());
3704 current_old_entry = old_statuses.next();
3705 }
3706 (Some(new_entry), None) => {
3707 updated_statuses.push(new_entry.to_proto());
3708 current_new_entry = new_statuses.next();
3709 }
3710 (None, None) => break,
3711 }
3712 }
3713
3714 proto::UpdateRepository {
3715 branch_summary: self.branch.as_ref().map(branch_to_proto),
3716 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3717 updated_statuses,
3718 removed_statuses,
3719 current_merge_conflicts: self
3720 .merge
3721 .merge_heads_by_conflicted_path
3722 .iter()
3723 .map(|(path, _)| path.to_proto())
3724 .collect(),
3725 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3726 project_id,
3727 id: self.id.to_proto(),
3728 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3729 entry_ids: vec![],
3730 scan_id: self.scan_id,
3731 is_last_update: true,
3732 stash_entries: self
3733 .stash_entries
3734 .entries
3735 .iter()
3736 .map(stash_to_proto)
3737 .collect(),
3738 remote_upstream_url: self.remote_upstream_url.clone(),
3739 remote_origin_url: self.remote_origin_url.clone(),
3740 original_repo_abs_path: Some(
3741 self.original_repo_abs_path.to_string_lossy().into_owned(),
3742 ),
3743 linked_worktrees: self
3744 .linked_worktrees
3745 .iter()
3746 .map(worktree_to_proto)
3747 .collect(),
3748 }
3749 }
3750
3751 /// The main worktree is the original checkout that other worktrees were
3752 /// created from.
3753 ///
3754 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3755 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3756 pub fn is_main_worktree(&self) -> bool {
3757 self.work_directory_abs_path == self.original_repo_abs_path
3758 }
3759
3760 /// Returns true if this repository is a linked worktree, that is, one that
3761 /// was created from another worktree.
3762 ///
3763 /// This is by definition the opposite of [`Self::is_main_worktree`].
3764 pub fn is_linked_worktree(&self) -> bool {
3765 !self.is_main_worktree()
3766 }
3767
3768 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3769 &self.linked_worktrees
3770 }
3771
3772 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3773 self.statuses_by_path.iter().cloned()
3774 }
3775
3776 pub fn status_summary(&self) -> GitSummary {
3777 self.statuses_by_path.summary().item_summary
3778 }
3779
3780 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3781 self.statuses_by_path
3782 .get(&PathKey(path.as_ref().clone()), ())
3783 .cloned()
3784 }
3785
3786 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3787 self.statuses_by_path
3788 .get(&PathKey(path.as_ref().clone()), ())
3789 .and_then(|entry| entry.diff_stat)
3790 }
3791
3792 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3793 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3794 }
3795
3796 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3797 self.path_style
3798 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3799 .unwrap()
3800 .into()
3801 }
3802
3803 #[inline]
3804 fn abs_path_to_repo_path_inner(
3805 work_directory_abs_path: &Path,
3806 abs_path: &Path,
3807 path_style: PathStyle,
3808 ) -> Option<RepoPath> {
3809 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3810 Some(RepoPath::from_rel_path(&rel_path))
3811 }
3812
3813 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3814 self.merge
3815 .merge_heads_by_conflicted_path
3816 .contains_key(repo_path)
3817 }
3818
3819 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3820 let had_conflict_on_last_merge_head_change = self
3821 .merge
3822 .merge_heads_by_conflicted_path
3823 .contains_key(repo_path);
3824 let has_conflict_currently = self
3825 .status_for_path(repo_path)
3826 .is_some_and(|entry| entry.status.is_conflicted());
3827 had_conflict_on_last_merge_head_change || has_conflict_currently
3828 }
3829
3830 /// This is the name that will be displayed in the repository selector for this repository.
3831 pub fn display_name(&self) -> SharedString {
3832 self.work_directory_abs_path
3833 .file_name()
3834 .unwrap_or_default()
3835 .to_string_lossy()
3836 .to_string()
3837 .into()
3838 }
3839}
3840
3841pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3842 proto::StashEntry {
3843 oid: entry.oid.as_bytes().to_vec(),
3844 message: entry.message.clone(),
3845 branch: entry.branch.clone(),
3846 index: entry.index as u64,
3847 timestamp: entry.timestamp,
3848 }
3849}
3850
3851pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3852 Ok(StashEntry {
3853 oid: Oid::from_bytes(&entry.oid)?,
3854 message: entry.message.clone(),
3855 index: entry.index as usize,
3856 branch: entry.branch.clone(),
3857 timestamp: entry.timestamp,
3858 })
3859}
3860
3861impl MergeDetails {
3862 async fn update(
3863 &mut self,
3864 backend: &Arc<dyn GitRepository>,
3865 current_conflicted_paths: Vec<RepoPath>,
3866 ) -> Result<bool> {
3867 log::debug!("load merge details");
3868 self.message = backend.merge_message().await.map(SharedString::from);
3869 let heads = backend
3870 .revparse_batch(vec![
3871 "MERGE_HEAD".into(),
3872 "CHERRY_PICK_HEAD".into(),
3873 "REBASE_HEAD".into(),
3874 "REVERT_HEAD".into(),
3875 "APPLY_HEAD".into(),
3876 ])
3877 .await
3878 .log_err()
3879 .unwrap_or_default()
3880 .into_iter()
3881 .map(|opt| opt.map(SharedString::from))
3882 .collect::<Vec<_>>();
3883
3884 let mut conflicts_changed = false;
3885
3886 // Record the merge state for newly conflicted paths
3887 for path in ¤t_conflicted_paths {
3888 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3889 conflicts_changed = true;
3890 self.merge_heads_by_conflicted_path
3891 .insert(path.clone(), heads.clone());
3892 }
3893 }
3894
3895 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3896 self.merge_heads_by_conflicted_path
3897 .retain(|path, old_merge_heads| {
3898 let keep = current_conflicted_paths.contains(path)
3899 || (old_merge_heads == &heads
3900 && old_merge_heads.iter().any(|head| head.is_some()));
3901 if !keep {
3902 conflicts_changed = true;
3903 }
3904 keep
3905 });
3906
3907 Ok(conflicts_changed)
3908 }
3909}
3910
3911impl Repository {
3912 pub fn is_trusted(&self) -> bool {
3913 match self.repository_state.peek() {
3914 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3915 _ => false,
3916 }
3917 }
3918
3919 pub fn snapshot(&self) -> RepositorySnapshot {
3920 self.snapshot.clone()
3921 }
3922
3923 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3924 self.pending_ops.iter().cloned()
3925 }
3926
3927 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3928 self.pending_ops.summary().clone()
3929 }
3930
3931 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3932 self.pending_ops
3933 .get(&PathKey(path.as_ref().clone()), ())
3934 .cloned()
3935 }
3936
3937 fn local(
3938 id: RepositoryId,
3939 work_directory_abs_path: Arc<Path>,
3940 original_repo_abs_path: Arc<Path>,
3941 dot_git_abs_path: Arc<Path>,
3942 project_environment: WeakEntity<ProjectEnvironment>,
3943 fs: Arc<dyn Fs>,
3944 is_trusted: bool,
3945 git_store: WeakEntity<GitStore>,
3946 cx: &mut Context<Self>,
3947 ) -> Self {
3948 let snapshot = RepositorySnapshot::empty(
3949 id,
3950 work_directory_abs_path.clone(),
3951 Some(original_repo_abs_path),
3952 PathStyle::local(),
3953 );
3954 let state = cx
3955 .spawn(async move |_, cx| {
3956 LocalRepositoryState::new(
3957 work_directory_abs_path,
3958 dot_git_abs_path,
3959 project_environment,
3960 fs,
3961 is_trusted,
3962 cx,
3963 )
3964 .await
3965 .map_err(|err| err.to_string())
3966 })
3967 .shared();
3968 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3969 let state = cx
3970 .spawn(async move |_, _| {
3971 let state = state.await?;
3972 Ok(RepositoryState::Local(state))
3973 })
3974 .shared();
3975
3976 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3977 RepositoryEvent::BranchChanged => {
3978 if this.scan_id > 1 {
3979 this.initial_graph_data.clear();
3980 }
3981 }
3982 _ => {}
3983 })
3984 .detach();
3985
3986 Repository {
3987 this: cx.weak_entity(),
3988 git_store,
3989 snapshot,
3990 pending_ops: Default::default(),
3991 repository_state: state,
3992 commit_message_buffer: None,
3993 askpass_delegates: Default::default(),
3994 paths_needing_status_update: Default::default(),
3995 latest_askpass_id: 0,
3996 job_sender,
3997 job_id: 0,
3998 active_jobs: Default::default(),
3999 initial_graph_data: Default::default(),
4000 commit_data: Default::default(),
4001 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4002 }
4003 }
4004
4005 fn remote(
4006 id: RepositoryId,
4007 work_directory_abs_path: Arc<Path>,
4008 original_repo_abs_path: Option<Arc<Path>>,
4009 path_style: PathStyle,
4010 project_id: ProjectId,
4011 client: AnyProtoClient,
4012 git_store: WeakEntity<GitStore>,
4013 cx: &mut Context<Self>,
4014 ) -> Self {
4015 let snapshot = RepositorySnapshot::empty(
4016 id,
4017 work_directory_abs_path,
4018 original_repo_abs_path,
4019 path_style,
4020 );
4021 let repository_state = RemoteRepositoryState { project_id, client };
4022 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4023 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4024 Self {
4025 this: cx.weak_entity(),
4026 snapshot,
4027 commit_message_buffer: None,
4028 git_store,
4029 pending_ops: Default::default(),
4030 paths_needing_status_update: Default::default(),
4031 job_sender,
4032 repository_state,
4033 askpass_delegates: Default::default(),
4034 latest_askpass_id: 0,
4035 active_jobs: Default::default(),
4036 job_id: 0,
4037 initial_graph_data: Default::default(),
4038 commit_data: Default::default(),
4039 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4040 }
4041 }
4042
4043 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4044 self.git_store.upgrade()
4045 }
4046
4047 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4048 let this = cx.weak_entity();
4049 let git_store = self.git_store.clone();
4050 let _ = self.send_keyed_job(
4051 Some(GitJobKey::ReloadBufferDiffBases),
4052 None,
4053 |state, mut cx| async move {
4054 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4055 log::error!("tried to recompute diffs for a non-local repository");
4056 return Ok(());
4057 };
4058
4059 let Some(this) = this.upgrade() else {
4060 return Ok(());
4061 };
4062
4063 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4064 git_store.update(cx, |git_store, cx| {
4065 git_store
4066 .diffs
4067 .iter()
4068 .filter_map(|(buffer_id, diff_state)| {
4069 let buffer_store = git_store.buffer_store.read(cx);
4070 let buffer = buffer_store.get(*buffer_id)?;
4071 let file = File::from_dyn(buffer.read(cx).file())?;
4072 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4073 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4074 log::debug!(
4075 "start reload diff bases for repo path {}",
4076 repo_path.as_unix_str()
4077 );
4078 diff_state.update(cx, |diff_state, _| {
4079 let has_unstaged_diff = diff_state
4080 .unstaged_diff
4081 .as_ref()
4082 .is_some_and(|diff| diff.is_upgradable());
4083 let has_uncommitted_diff = diff_state
4084 .uncommitted_diff
4085 .as_ref()
4086 .is_some_and(|set| set.is_upgradable());
4087
4088 Some((
4089 buffer,
4090 repo_path,
4091 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4092 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4093 ))
4094 })
4095 })
4096 .collect::<Vec<_>>()
4097 })
4098 })?;
4099
4100 let buffer_diff_base_changes = cx
4101 .background_spawn(async move {
4102 let mut changes = Vec::new();
4103 for (buffer, repo_path, current_index_text, current_head_text) in
4104 &repo_diff_state_updates
4105 {
4106 let index_text = if current_index_text.is_some() {
4107 backend.load_index_text(repo_path.clone()).await
4108 } else {
4109 None
4110 };
4111 let head_text = if current_head_text.is_some() {
4112 backend.load_committed_text(repo_path.clone()).await
4113 } else {
4114 None
4115 };
4116
4117 let change =
4118 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4119 (Some(current_index), Some(current_head)) => {
4120 let index_changed =
4121 index_text.as_deref() != current_index.as_deref();
4122 let head_changed =
4123 head_text.as_deref() != current_head.as_deref();
4124 if index_changed && head_changed {
4125 if index_text == head_text {
4126 Some(DiffBasesChange::SetBoth(head_text))
4127 } else {
4128 Some(DiffBasesChange::SetEach {
4129 index: index_text,
4130 head: head_text,
4131 })
4132 }
4133 } else if index_changed {
4134 Some(DiffBasesChange::SetIndex(index_text))
4135 } else if head_changed {
4136 Some(DiffBasesChange::SetHead(head_text))
4137 } else {
4138 None
4139 }
4140 }
4141 (Some(current_index), None) => {
4142 let index_changed =
4143 index_text.as_deref() != current_index.as_deref();
4144 index_changed
4145 .then_some(DiffBasesChange::SetIndex(index_text))
4146 }
4147 (None, Some(current_head)) => {
4148 let head_changed =
4149 head_text.as_deref() != current_head.as_deref();
4150 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4151 }
4152 (None, None) => None,
4153 };
4154
4155 changes.push((buffer.clone(), change))
4156 }
4157 changes
4158 })
4159 .await;
4160
4161 git_store.update(&mut cx, |git_store, cx| {
4162 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4163 let buffer_snapshot = buffer.read(cx).text_snapshot();
4164 let buffer_id = buffer_snapshot.remote_id();
4165 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4166 continue;
4167 };
4168
4169 let downstream_client = git_store.downstream_client();
4170 diff_state.update(cx, |diff_state, cx| {
4171 use proto::update_diff_bases::Mode;
4172
4173 if let Some((diff_bases_change, (client, project_id))) =
4174 diff_bases_change.clone().zip(downstream_client)
4175 {
4176 let (staged_text, committed_text, mode) = match diff_bases_change {
4177 DiffBasesChange::SetIndex(index) => {
4178 (index, None, Mode::IndexOnly)
4179 }
4180 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4181 DiffBasesChange::SetEach { index, head } => {
4182 (index, head, Mode::IndexAndHead)
4183 }
4184 DiffBasesChange::SetBoth(text) => {
4185 (None, text, Mode::IndexMatchesHead)
4186 }
4187 };
4188 client
4189 .send(proto::UpdateDiffBases {
4190 project_id: project_id.to_proto(),
4191 buffer_id: buffer_id.to_proto(),
4192 staged_text,
4193 committed_text,
4194 mode: mode as i32,
4195 })
4196 .log_err();
4197 }
4198
4199 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4200 });
4201 }
4202 })
4203 },
4204 );
4205 }
4206
4207 pub fn send_job<F, Fut, R>(
4208 &mut self,
4209 status: Option<SharedString>,
4210 job: F,
4211 ) -> oneshot::Receiver<R>
4212 where
4213 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4214 Fut: Future<Output = R> + 'static,
4215 R: Send + 'static,
4216 {
4217 self.send_keyed_job(None, status, job)
4218 }
4219
4220 pub fn send_cancellable_job<F, Fut, R>(
4221 &mut self,
4222 status: Option<SharedString>,
4223 job: F,
4224 ) -> (oneshot::Receiver<R>, CancellationToken)
4225 where
4226 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4227 Fut: Future<Output = R> + 'static,
4228 R: Send + 'static,
4229 {
4230 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4231 let job_id = post_inc(&mut self.job_id);
4232 let this = self.this.clone();
4233 let token = CancellationToken {
4234 id: job_id,
4235 job_sender: self.job_sender.clone(),
4236 };
4237 self.job_sender
4238 .unbounded_send(GitWorkerMessage::Job(GitJob {
4239 id: job_id,
4240 key: None,
4241 job: Box::new(move |state, cx: &mut AsyncApp| {
4242 let job = job(state, cx.clone());
4243 cx.spawn(async move |cx| {
4244 if let Some(s) = status.clone() {
4245 this.update(cx, |this, cx| {
4246 this.active_jobs.insert(
4247 job_id,
4248 JobInfo {
4249 start: Instant::now(),
4250 message: s.clone(),
4251 },
4252 );
4253 cx.notify();
4254 })
4255 .ok();
4256 }
4257 let result = job.await;
4258 this.update(cx, |this, cx| {
4259 this.active_jobs.remove(&job_id);
4260 cx.notify();
4261 })
4262 .ok();
4263 result_tx.send(result).ok();
4264 })
4265 }),
4266 }))
4267 .ok();
4268 (result_rx, token)
4269 }
4270
4271 fn send_keyed_job<F, Fut, R>(
4272 &mut self,
4273 key: Option<GitJobKey>,
4274 status: Option<SharedString>,
4275 job: F,
4276 ) -> oneshot::Receiver<R>
4277 where
4278 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4279 Fut: Future<Output = R> + 'static,
4280 R: Send + 'static,
4281 {
4282 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4283 let job_id = post_inc(&mut self.job_id);
4284 let this = self.this.clone();
4285 self.job_sender
4286 .unbounded_send(GitWorkerMessage::Job(GitJob {
4287 id: job_id,
4288 key,
4289 job: Box::new(move |state, cx: &mut AsyncApp| {
4290 let job = job(state, cx.clone());
4291 cx.spawn(async move |cx| {
4292 if let Some(s) = status.clone() {
4293 this.update(cx, |this, cx| {
4294 this.active_jobs.insert(
4295 job_id,
4296 JobInfo {
4297 start: Instant::now(),
4298 message: s.clone(),
4299 },
4300 );
4301
4302 cx.notify();
4303 })
4304 .ok();
4305 }
4306 let result = job.await;
4307
4308 this.update(cx, |this, cx| {
4309 this.active_jobs.remove(&job_id);
4310 cx.notify();
4311 })
4312 .ok();
4313
4314 result_tx.send(result).ok();
4315 })
4316 }),
4317 }))
4318 .ok();
4319 result_rx
4320 }
4321
4322 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4323 let Some(git_store) = self.git_store.upgrade() else {
4324 return;
4325 };
4326 let entity = cx.entity();
4327 git_store.update(cx, |git_store, cx| {
4328 let Some((&id, _)) = git_store
4329 .repositories
4330 .iter()
4331 .find(|(_, handle)| *handle == &entity)
4332 else {
4333 return;
4334 };
4335 git_store.active_repo_id = Some(id);
4336 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4337 });
4338 }
4339
4340 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4341 self.snapshot.status()
4342 }
4343
4344 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4345 self.snapshot.diff_stat_for_path(path)
4346 }
4347
4348 pub fn cached_stash(&self) -> GitStash {
4349 self.snapshot.stash_entries.clone()
4350 }
4351
4352 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4353 let git_store = self.git_store.upgrade()?;
4354 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4355 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4356 let abs_path = SanitizedPath::new(&abs_path);
4357 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4358 Some(ProjectPath {
4359 worktree_id: worktree.read(cx).id(),
4360 path: relative_path,
4361 })
4362 }
4363
4364 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4365 let git_store = self.git_store.upgrade()?;
4366 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4367 let abs_path = worktree_store.absolutize(path, cx)?;
4368 self.snapshot.abs_path_to_repo_path(&abs_path)
4369 }
4370
4371 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4372 other
4373 .read(cx)
4374 .snapshot
4375 .work_directory_abs_path
4376 .starts_with(&self.snapshot.work_directory_abs_path)
4377 }
4378
4379 pub fn open_commit_buffer(
4380 &mut self,
4381 languages: Option<Arc<LanguageRegistry>>,
4382 buffer_store: Entity<BufferStore>,
4383 cx: &mut Context<Self>,
4384 ) -> Task<Result<Entity<Buffer>>> {
4385 let id = self.id;
4386 if let Some(buffer) = self.commit_message_buffer.clone() {
4387 return Task::ready(Ok(buffer));
4388 }
4389 let this = cx.weak_entity();
4390
4391 let rx = self.send_job(None, move |state, mut cx| async move {
4392 let Some(this) = this.upgrade() else {
4393 bail!("git store was dropped");
4394 };
4395 match state {
4396 RepositoryState::Local(..) => {
4397 this.update(&mut cx, |_, cx| {
4398 Self::open_local_commit_buffer(languages, buffer_store, cx)
4399 })
4400 .await
4401 }
4402 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4403 let request = client.request(proto::OpenCommitMessageBuffer {
4404 project_id: project_id.0,
4405 repository_id: id.to_proto(),
4406 });
4407 let response = request.await.context("requesting to open commit buffer")?;
4408 let buffer_id = BufferId::new(response.buffer_id)?;
4409 let buffer = buffer_store
4410 .update(&mut cx, |buffer_store, cx| {
4411 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4412 })
4413 .await?;
4414 if let Some(language_registry) = languages {
4415 let git_commit_language =
4416 language_registry.language_for_name("Git Commit").await?;
4417 buffer.update(&mut cx, |buffer, cx| {
4418 buffer.set_language(Some(git_commit_language), cx);
4419 });
4420 }
4421 this.update(&mut cx, |this, _| {
4422 this.commit_message_buffer = Some(buffer.clone());
4423 });
4424 Ok(buffer)
4425 }
4426 }
4427 });
4428
4429 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4430 }
4431
4432 fn open_local_commit_buffer(
4433 language_registry: Option<Arc<LanguageRegistry>>,
4434 buffer_store: Entity<BufferStore>,
4435 cx: &mut Context<Self>,
4436 ) -> Task<Result<Entity<Buffer>>> {
4437 cx.spawn(async move |repository, cx| {
4438 let git_commit_language = match language_registry {
4439 Some(language_registry) => {
4440 Some(language_registry.language_for_name("Git Commit").await?)
4441 }
4442 None => None,
4443 };
4444 let buffer = buffer_store
4445 .update(cx, |buffer_store, cx| {
4446 buffer_store.create_buffer(git_commit_language, false, cx)
4447 })
4448 .await?;
4449
4450 repository.update(cx, |repository, _| {
4451 repository.commit_message_buffer = Some(buffer.clone());
4452 })?;
4453 Ok(buffer)
4454 })
4455 }
4456
4457 pub fn checkout_files(
4458 &mut self,
4459 commit: &str,
4460 paths: Vec<RepoPath>,
4461 cx: &mut Context<Self>,
4462 ) -> Task<Result<()>> {
4463 let commit = commit.to_string();
4464 let id = self.id;
4465
4466 self.spawn_job_with_tracking(
4467 paths.clone(),
4468 pending_op::GitStatus::Reverted,
4469 cx,
4470 async move |this, cx| {
4471 this.update(cx, |this, _cx| {
4472 this.send_job(
4473 Some(format!("git checkout {}", commit).into()),
4474 move |git_repo, _| async move {
4475 match git_repo {
4476 RepositoryState::Local(LocalRepositoryState {
4477 backend,
4478 environment,
4479 ..
4480 }) => {
4481 backend
4482 .checkout_files(commit, paths, environment.clone())
4483 .await
4484 }
4485 RepositoryState::Remote(RemoteRepositoryState {
4486 project_id,
4487 client,
4488 }) => {
4489 client
4490 .request(proto::GitCheckoutFiles {
4491 project_id: project_id.0,
4492 repository_id: id.to_proto(),
4493 commit,
4494 paths: paths
4495 .into_iter()
4496 .map(|p| p.to_proto())
4497 .collect(),
4498 })
4499 .await?;
4500
4501 Ok(())
4502 }
4503 }
4504 },
4505 )
4506 })?
4507 .await?
4508 },
4509 )
4510 }
4511
4512 pub fn reset(
4513 &mut self,
4514 commit: String,
4515 reset_mode: ResetMode,
4516 _cx: &mut App,
4517 ) -> oneshot::Receiver<Result<()>> {
4518 let id = self.id;
4519
4520 self.send_job(None, move |git_repo, _| async move {
4521 match git_repo {
4522 RepositoryState::Local(LocalRepositoryState {
4523 backend,
4524 environment,
4525 ..
4526 }) => backend.reset(commit, reset_mode, environment).await,
4527 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4528 client
4529 .request(proto::GitReset {
4530 project_id: project_id.0,
4531 repository_id: id.to_proto(),
4532 commit,
4533 mode: match reset_mode {
4534 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4535 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4536 },
4537 })
4538 .await?;
4539
4540 Ok(())
4541 }
4542 }
4543 })
4544 }
4545
4546 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4547 let id = self.id;
4548 self.send_job(None, move |git_repo, _cx| async move {
4549 match git_repo {
4550 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4551 backend.show(commit).await
4552 }
4553 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4554 let resp = client
4555 .request(proto::GitShow {
4556 project_id: project_id.0,
4557 repository_id: id.to_proto(),
4558 commit,
4559 })
4560 .await?;
4561
4562 Ok(CommitDetails {
4563 sha: resp.sha.into(),
4564 message: resp.message.into(),
4565 commit_timestamp: resp.commit_timestamp,
4566 author_email: resp.author_email.into(),
4567 author_name: resp.author_name.into(),
4568 })
4569 }
4570 }
4571 })
4572 }
4573
4574 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4575 let id = self.id;
4576 self.send_job(None, move |git_repo, cx| async move {
4577 match git_repo {
4578 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4579 backend.load_commit(commit, cx).await
4580 }
4581 RepositoryState::Remote(RemoteRepositoryState {
4582 client, project_id, ..
4583 }) => {
4584 let response = client
4585 .request(proto::LoadCommitDiff {
4586 project_id: project_id.0,
4587 repository_id: id.to_proto(),
4588 commit,
4589 })
4590 .await?;
4591 Ok(CommitDiff {
4592 files: response
4593 .files
4594 .into_iter()
4595 .map(|file| {
4596 Ok(CommitFile {
4597 path: RepoPath::from_proto(&file.path)?,
4598 old_text: file.old_text,
4599 new_text: file.new_text,
4600 is_binary: file.is_binary,
4601 })
4602 })
4603 .collect::<Result<Vec<_>>>()?,
4604 })
4605 }
4606 }
4607 })
4608 }
4609
4610 pub fn file_history(
4611 &mut self,
4612 path: RepoPath,
4613 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4614 self.file_history_paginated(path, 0, None)
4615 }
4616
4617 pub fn file_history_paginated(
4618 &mut self,
4619 path: RepoPath,
4620 skip: usize,
4621 limit: Option<usize>,
4622 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4623 let id = self.id;
4624 self.send_job(None, move |git_repo, _cx| async move {
4625 match git_repo {
4626 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4627 backend.file_history_paginated(path, skip, limit).await
4628 }
4629 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4630 let response = client
4631 .request(proto::GitFileHistory {
4632 project_id: project_id.0,
4633 repository_id: id.to_proto(),
4634 path: path.to_proto(),
4635 skip: skip as u64,
4636 limit: limit.map(|l| l as u64),
4637 })
4638 .await?;
4639 Ok(git::repository::FileHistory {
4640 entries: response
4641 .entries
4642 .into_iter()
4643 .map(|entry| git::repository::FileHistoryEntry {
4644 sha: entry.sha.into(),
4645 subject: entry.subject.into(),
4646 message: entry.message.into(),
4647 commit_timestamp: entry.commit_timestamp,
4648 author_name: entry.author_name.into(),
4649 author_email: entry.author_email.into(),
4650 })
4651 .collect(),
4652 path: RepoPath::from_proto(&response.path)?,
4653 })
4654 }
4655 }
4656 })
4657 }
4658
4659 pub fn get_graph_data(
4660 &self,
4661 log_source: LogSource,
4662 log_order: LogOrder,
4663 ) -> Option<&InitialGitGraphData> {
4664 self.initial_graph_data.get(&(log_source, log_order))
4665 }
4666
4667 pub fn search_commits(
4668 &mut self,
4669 log_source: LogSource,
4670 search_args: SearchCommitArgs,
4671 request_tx: smol::channel::Sender<Oid>,
4672 cx: &mut Context<Self>,
4673 ) {
4674 let repository_state = self.repository_state.clone();
4675
4676 cx.background_spawn(async move {
4677 let repo_state = repository_state.await;
4678
4679 match repo_state {
4680 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4681 backend
4682 .search_commits(log_source, search_args, request_tx)
4683 .await
4684 .log_err();
4685 }
4686 Ok(RepositoryState::Remote(_)) => {}
4687 Err(_) => {}
4688 };
4689 })
4690 .detach();
4691 }
4692
4693 pub fn graph_data(
4694 &mut self,
4695 log_source: LogSource,
4696 log_order: LogOrder,
4697 range: Range<usize>,
4698 cx: &mut Context<Self>,
4699 ) -> GraphDataResponse<'_> {
4700 let initial_commit_data = self
4701 .initial_graph_data
4702 .entry((log_source.clone(), log_order))
4703 .or_insert_with(|| {
4704 let state = self.repository_state.clone();
4705 let log_source = log_source.clone();
4706
4707 let fetch_task = cx.spawn(async move |repository, cx| {
4708 let state = state.await;
4709 let result = match state {
4710 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4711 Self::local_git_graph_data(
4712 repository.clone(),
4713 backend,
4714 log_source.clone(),
4715 log_order,
4716 cx,
4717 )
4718 .await
4719 }
4720 Ok(RepositoryState::Remote(_)) => {
4721 Err("Git graph is not supported for collab yet".into())
4722 }
4723 Err(e) => Err(SharedString::from(e)),
4724 };
4725
4726 if let Err(fetch_task_error) = result {
4727 repository
4728 .update(cx, |repository, _| {
4729 if let Some(data) = repository
4730 .initial_graph_data
4731 .get_mut(&(log_source, log_order))
4732 {
4733 data.error = Some(fetch_task_error);
4734 } else {
4735 debug_panic!(
4736 "This task would be dropped if this entry doesn't exist"
4737 );
4738 }
4739 })
4740 .ok();
4741 }
4742 });
4743
4744 InitialGitGraphData {
4745 fetch_task,
4746 error: None,
4747 commit_data: Vec::new(),
4748 commit_oid_to_index: HashMap::default(),
4749 }
4750 });
4751
4752 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4753 let max_end = initial_commit_data.commit_data.len();
4754
4755 GraphDataResponse {
4756 commits: &initial_commit_data.commit_data
4757 [range.start.min(max_start)..range.end.min(max_end)],
4758 is_loading: !initial_commit_data.fetch_task.is_ready(),
4759 error: initial_commit_data.error.clone(),
4760 }
4761 }
4762
4763 async fn local_git_graph_data(
4764 this: WeakEntity<Self>,
4765 backend: Arc<dyn GitRepository>,
4766 log_source: LogSource,
4767 log_order: LogOrder,
4768 cx: &mut AsyncApp,
4769 ) -> Result<(), SharedString> {
4770 let (request_tx, request_rx) =
4771 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4772
4773 let task = cx.background_executor().spawn({
4774 let log_source = log_source.clone();
4775 async move {
4776 backend
4777 .initial_graph_data(log_source, log_order, request_tx)
4778 .await
4779 .map_err(|err| SharedString::from(err.to_string()))
4780 }
4781 });
4782
4783 let graph_data_key = (log_source, log_order);
4784
4785 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4786 this.update(cx, |repository, cx| {
4787 let graph_data = repository
4788 .initial_graph_data
4789 .entry(graph_data_key.clone())
4790 .and_modify(|graph_data| {
4791 for commit_data in initial_graph_commit_data {
4792 graph_data
4793 .commit_oid_to_index
4794 .insert(commit_data.sha, graph_data.commit_data.len());
4795 graph_data.commit_data.push(commit_data);
4796
4797 cx.emit(RepositoryEvent::GraphEvent(
4798 graph_data_key.clone(),
4799 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4800 ));
4801 }
4802 });
4803
4804 match &graph_data {
4805 Entry::Occupied(_) => {}
4806 Entry::Vacant(_) => {
4807 debug_panic!("This task should be dropped if data doesn't exist");
4808 }
4809 }
4810 })
4811 .ok();
4812 }
4813
4814 task.await?;
4815 Ok(())
4816 }
4817
4818 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4819 if !self.commit_data.contains_key(&sha) {
4820 match &self.graph_commit_data_handler {
4821 GraphCommitHandlerState::Open(handler) => {
4822 if handler.commit_data_request.try_send(sha).is_ok() {
4823 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4824 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4825 }
4826 }
4827 GraphCommitHandlerState::Closed => {
4828 self.open_graph_commit_data_handler(cx);
4829 }
4830 GraphCommitHandlerState::Starting => {}
4831 }
4832 }
4833
4834 self.commit_data
4835 .get(&sha)
4836 .unwrap_or(&CommitDataState::Loading)
4837 }
4838
4839 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4840 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4841
4842 let state = self.repository_state.clone();
4843 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4844 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4845
4846 let foreground_task = cx.spawn(async move |this, cx| {
4847 while let Ok((sha, commit_data)) = result_rx.recv().await {
4848 let result = this.update(cx, |this, cx| {
4849 let old_value = this
4850 .commit_data
4851 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4852 debug_assert!(
4853 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4854 "We should never overwrite commit data"
4855 );
4856
4857 cx.notify();
4858 });
4859 if result.is_err() {
4860 break;
4861 }
4862 }
4863
4864 this.update(cx, |this, _cx| {
4865 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4866 })
4867 .ok();
4868 });
4869
4870 let request_tx_for_handler = request_tx;
4871 let background_executor = cx.background_executor().clone();
4872
4873 cx.background_spawn(async move {
4874 let backend = match state.await {
4875 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4876 Ok(RepositoryState::Remote(_)) => {
4877 log::error!("commit_data_reader not supported for remote repositories");
4878 return;
4879 }
4880 Err(error) => {
4881 log::error!("failed to get repository state: {error}");
4882 return;
4883 }
4884 };
4885
4886 let reader = match backend.commit_data_reader() {
4887 Ok(reader) => reader,
4888 Err(error) => {
4889 log::error!("failed to create commit data reader: {error:?}");
4890 return;
4891 }
4892 };
4893
4894 loop {
4895 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4896
4897 futures::select_biased! {
4898 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4899 let Ok(sha) = sha else {
4900 break;
4901 };
4902
4903 match reader.read(sha).await {
4904 Ok(commit_data) => {
4905 if result_tx.send((sha, commit_data)).await.is_err() {
4906 break;
4907 }
4908 }
4909 Err(error) => {
4910 log::error!("failed to read commit data for {sha}: {error:?}");
4911 }
4912 }
4913 }
4914 _ = futures::FutureExt::fuse(timeout) => {
4915 break;
4916 }
4917 }
4918 }
4919
4920 drop(result_tx);
4921 })
4922 .detach();
4923
4924 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4925 _task: foreground_task,
4926 commit_data_request: request_tx_for_handler,
4927 });
4928 }
4929
4930 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4931 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4932 }
4933
4934 fn save_buffers<'a>(
4935 &self,
4936 entries: impl IntoIterator<Item = &'a RepoPath>,
4937 cx: &mut Context<Self>,
4938 ) -> Vec<Task<anyhow::Result<()>>> {
4939 let mut save_futures = Vec::new();
4940 if let Some(buffer_store) = self.buffer_store(cx) {
4941 buffer_store.update(cx, |buffer_store, cx| {
4942 for path in entries {
4943 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4944 continue;
4945 };
4946 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4947 && buffer
4948 .read(cx)
4949 .file()
4950 .is_some_and(|file| file.disk_state().exists())
4951 && buffer.read(cx).has_unsaved_edits()
4952 {
4953 save_futures.push(buffer_store.save_buffer(buffer, cx));
4954 }
4955 }
4956 })
4957 }
4958 save_futures
4959 }
4960
4961 pub fn stage_entries(
4962 &mut self,
4963 entries: Vec<RepoPath>,
4964 cx: &mut Context<Self>,
4965 ) -> Task<anyhow::Result<()>> {
4966 self.stage_or_unstage_entries(true, entries, cx)
4967 }
4968
4969 pub fn unstage_entries(
4970 &mut self,
4971 entries: Vec<RepoPath>,
4972 cx: &mut Context<Self>,
4973 ) -> Task<anyhow::Result<()>> {
4974 self.stage_or_unstage_entries(false, entries, cx)
4975 }
4976
4977 fn stage_or_unstage_entries(
4978 &mut self,
4979 stage: bool,
4980 entries: Vec<RepoPath>,
4981 cx: &mut Context<Self>,
4982 ) -> Task<anyhow::Result<()>> {
4983 if entries.is_empty() {
4984 return Task::ready(Ok(()));
4985 }
4986 let Some(git_store) = self.git_store.upgrade() else {
4987 return Task::ready(Ok(()));
4988 };
4989 let id = self.id;
4990 let save_tasks = self.save_buffers(&entries, cx);
4991 let paths = entries
4992 .iter()
4993 .map(|p| p.as_unix_str())
4994 .collect::<Vec<_>>()
4995 .join(" ");
4996 let status = if stage {
4997 format!("git add {paths}")
4998 } else {
4999 format!("git reset {paths}")
5000 };
5001 let job_key = GitJobKey::WriteIndex(entries.clone());
5002
5003 self.spawn_job_with_tracking(
5004 entries.clone(),
5005 if stage {
5006 pending_op::GitStatus::Staged
5007 } else {
5008 pending_op::GitStatus::Unstaged
5009 },
5010 cx,
5011 async move |this, cx| {
5012 for save_task in save_tasks {
5013 save_task.await?;
5014 }
5015
5016 this.update(cx, |this, cx| {
5017 let weak_this = cx.weak_entity();
5018 this.send_keyed_job(
5019 Some(job_key),
5020 Some(status.into()),
5021 move |git_repo, mut cx| async move {
5022 let hunk_staging_operation_counts = weak_this
5023 .update(&mut cx, |this, cx| {
5024 let mut hunk_staging_operation_counts = HashMap::default();
5025 for path in &entries {
5026 let Some(project_path) =
5027 this.repo_path_to_project_path(path, cx)
5028 else {
5029 continue;
5030 };
5031 let Some(buffer) = git_store
5032 .read(cx)
5033 .buffer_store
5034 .read(cx)
5035 .get_by_path(&project_path)
5036 else {
5037 continue;
5038 };
5039 let Some(diff_state) = git_store
5040 .read(cx)
5041 .diffs
5042 .get(&buffer.read(cx).remote_id())
5043 .cloned()
5044 else {
5045 continue;
5046 };
5047 let Some(uncommitted_diff) =
5048 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5049 |uncommitted_diff| uncommitted_diff.upgrade(),
5050 )
5051 else {
5052 continue;
5053 };
5054 let buffer_snapshot = buffer.read(cx).text_snapshot();
5055 let file_exists = buffer
5056 .read(cx)
5057 .file()
5058 .is_some_and(|file| file.disk_state().exists());
5059 let hunk_staging_operation_count =
5060 diff_state.update(cx, |diff_state, cx| {
5061 uncommitted_diff.update(
5062 cx,
5063 |uncommitted_diff, cx| {
5064 uncommitted_diff
5065 .stage_or_unstage_all_hunks(
5066 stage,
5067 &buffer_snapshot,
5068 file_exists,
5069 cx,
5070 );
5071 },
5072 );
5073
5074 diff_state.hunk_staging_operation_count += 1;
5075 diff_state.hunk_staging_operation_count
5076 });
5077 hunk_staging_operation_counts.insert(
5078 diff_state.downgrade(),
5079 hunk_staging_operation_count,
5080 );
5081 }
5082 hunk_staging_operation_counts
5083 })
5084 .unwrap_or_default();
5085
5086 let result = match git_repo {
5087 RepositoryState::Local(LocalRepositoryState {
5088 backend,
5089 environment,
5090 ..
5091 }) => {
5092 if stage {
5093 backend.stage_paths(entries, environment.clone()).await
5094 } else {
5095 backend.unstage_paths(entries, environment.clone()).await
5096 }
5097 }
5098 RepositoryState::Remote(RemoteRepositoryState {
5099 project_id,
5100 client,
5101 }) => {
5102 if stage {
5103 client
5104 .request(proto::Stage {
5105 project_id: project_id.0,
5106 repository_id: id.to_proto(),
5107 paths: entries
5108 .into_iter()
5109 .map(|repo_path| repo_path.to_proto())
5110 .collect(),
5111 })
5112 .await
5113 .context("sending stage request")
5114 .map(|_| ())
5115 } else {
5116 client
5117 .request(proto::Unstage {
5118 project_id: project_id.0,
5119 repository_id: id.to_proto(),
5120 paths: entries
5121 .into_iter()
5122 .map(|repo_path| repo_path.to_proto())
5123 .collect(),
5124 })
5125 .await
5126 .context("sending unstage request")
5127 .map(|_| ())
5128 }
5129 }
5130 };
5131
5132 for (diff_state, hunk_staging_operation_count) in
5133 hunk_staging_operation_counts
5134 {
5135 diff_state
5136 .update(&mut cx, |diff_state, cx| {
5137 if result.is_ok() {
5138 diff_state.hunk_staging_operation_count_as_of_write =
5139 hunk_staging_operation_count;
5140 } else if let Some(uncommitted_diff) =
5141 &diff_state.uncommitted_diff
5142 {
5143 uncommitted_diff
5144 .update(cx, |uncommitted_diff, cx| {
5145 uncommitted_diff.clear_pending_hunks(cx);
5146 })
5147 .ok();
5148 }
5149 })
5150 .ok();
5151 }
5152
5153 result
5154 },
5155 )
5156 })?
5157 .await?
5158 },
5159 )
5160 }
5161
5162 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5163 let snapshot = self.snapshot.clone();
5164 let pending_ops = self.pending_ops.clone();
5165 let to_stage = cx.background_spawn(async move {
5166 snapshot
5167 .status()
5168 .filter_map(|entry| {
5169 if let Some(ops) =
5170 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5171 {
5172 if ops.staging() || ops.staged() {
5173 None
5174 } else {
5175 Some(entry.repo_path)
5176 }
5177 } else if entry.status.staging().is_fully_staged() {
5178 None
5179 } else {
5180 Some(entry.repo_path)
5181 }
5182 })
5183 .collect()
5184 });
5185
5186 cx.spawn(async move |this, cx| {
5187 let to_stage = to_stage.await;
5188 this.update(cx, |this, cx| {
5189 this.stage_or_unstage_entries(true, to_stage, cx)
5190 })?
5191 .await
5192 })
5193 }
5194
5195 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5196 let snapshot = self.snapshot.clone();
5197 let pending_ops = self.pending_ops.clone();
5198 let to_unstage = cx.background_spawn(async move {
5199 snapshot
5200 .status()
5201 .filter_map(|entry| {
5202 if let Some(ops) =
5203 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5204 {
5205 if !ops.staging() && !ops.staged() {
5206 None
5207 } else {
5208 Some(entry.repo_path)
5209 }
5210 } else if entry.status.staging().is_fully_unstaged() {
5211 None
5212 } else {
5213 Some(entry.repo_path)
5214 }
5215 })
5216 .collect()
5217 });
5218
5219 cx.spawn(async move |this, cx| {
5220 let to_unstage = to_unstage.await;
5221 this.update(cx, |this, cx| {
5222 this.stage_or_unstage_entries(false, to_unstage, cx)
5223 })?
5224 .await
5225 })
5226 }
5227
5228 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5229 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5230
5231 self.stash_entries(to_stash, cx)
5232 }
5233
5234 pub fn stash_entries(
5235 &mut self,
5236 entries: Vec<RepoPath>,
5237 cx: &mut Context<Self>,
5238 ) -> Task<anyhow::Result<()>> {
5239 let id = self.id;
5240
5241 cx.spawn(async move |this, cx| {
5242 this.update(cx, |this, _| {
5243 this.send_job(None, move |git_repo, _cx| async move {
5244 match git_repo {
5245 RepositoryState::Local(LocalRepositoryState {
5246 backend,
5247 environment,
5248 ..
5249 }) => backend.stash_paths(entries, environment).await,
5250 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5251 client
5252 .request(proto::Stash {
5253 project_id: project_id.0,
5254 repository_id: id.to_proto(),
5255 paths: entries
5256 .into_iter()
5257 .map(|repo_path| repo_path.to_proto())
5258 .collect(),
5259 })
5260 .await?;
5261 Ok(())
5262 }
5263 }
5264 })
5265 })?
5266 .await??;
5267 Ok(())
5268 })
5269 }
5270
5271 pub fn stash_pop(
5272 &mut self,
5273 index: Option<usize>,
5274 cx: &mut Context<Self>,
5275 ) -> Task<anyhow::Result<()>> {
5276 let id = self.id;
5277 cx.spawn(async move |this, cx| {
5278 this.update(cx, |this, _| {
5279 this.send_job(None, move |git_repo, _cx| async move {
5280 match git_repo {
5281 RepositoryState::Local(LocalRepositoryState {
5282 backend,
5283 environment,
5284 ..
5285 }) => backend.stash_pop(index, environment).await,
5286 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5287 client
5288 .request(proto::StashPop {
5289 project_id: project_id.0,
5290 repository_id: id.to_proto(),
5291 stash_index: index.map(|i| i as u64),
5292 })
5293 .await
5294 .context("sending stash pop request")?;
5295 Ok(())
5296 }
5297 }
5298 })
5299 })?
5300 .await??;
5301 Ok(())
5302 })
5303 }
5304
5305 pub fn stash_apply(
5306 &mut self,
5307 index: Option<usize>,
5308 cx: &mut Context<Self>,
5309 ) -> Task<anyhow::Result<()>> {
5310 let id = self.id;
5311 cx.spawn(async move |this, cx| {
5312 this.update(cx, |this, _| {
5313 this.send_job(None, move |git_repo, _cx| async move {
5314 match git_repo {
5315 RepositoryState::Local(LocalRepositoryState {
5316 backend,
5317 environment,
5318 ..
5319 }) => backend.stash_apply(index, environment).await,
5320 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5321 client
5322 .request(proto::StashApply {
5323 project_id: project_id.0,
5324 repository_id: id.to_proto(),
5325 stash_index: index.map(|i| i as u64),
5326 })
5327 .await
5328 .context("sending stash apply request")?;
5329 Ok(())
5330 }
5331 }
5332 })
5333 })?
5334 .await??;
5335 Ok(())
5336 })
5337 }
5338
5339 pub fn stash_drop(
5340 &mut self,
5341 index: Option<usize>,
5342 cx: &mut Context<Self>,
5343 ) -> oneshot::Receiver<anyhow::Result<()>> {
5344 let id = self.id;
5345 let updates_tx = self
5346 .git_store()
5347 .and_then(|git_store| match &git_store.read(cx).state {
5348 GitStoreState::Local { downstream, .. } => downstream
5349 .as_ref()
5350 .map(|downstream| downstream.updates_tx.clone()),
5351 _ => None,
5352 });
5353 let this = cx.weak_entity();
5354 self.send_job(None, move |git_repo, mut cx| async move {
5355 match git_repo {
5356 RepositoryState::Local(LocalRepositoryState {
5357 backend,
5358 environment,
5359 ..
5360 }) => {
5361 // TODO would be nice to not have to do this manually
5362 let result = backend.stash_drop(index, environment).await;
5363 if result.is_ok()
5364 && let Ok(stash_entries) = backend.stash_entries().await
5365 {
5366 let snapshot = this.update(&mut cx, |this, cx| {
5367 this.snapshot.stash_entries = stash_entries;
5368 cx.emit(RepositoryEvent::StashEntriesChanged);
5369 this.snapshot.clone()
5370 })?;
5371 if let Some(updates_tx) = updates_tx {
5372 updates_tx
5373 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5374 .ok();
5375 }
5376 }
5377
5378 result
5379 }
5380 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5381 client
5382 .request(proto::StashDrop {
5383 project_id: project_id.0,
5384 repository_id: id.to_proto(),
5385 stash_index: index.map(|i| i as u64),
5386 })
5387 .await
5388 .context("sending stash pop request")?;
5389 Ok(())
5390 }
5391 }
5392 })
5393 }
5394
5395 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5396 let id = self.id;
5397 self.send_job(
5398 Some(format!("git hook {}", hook.as_str()).into()),
5399 move |git_repo, _cx| async move {
5400 match git_repo {
5401 RepositoryState::Local(LocalRepositoryState {
5402 backend,
5403 environment,
5404 ..
5405 }) => backend.run_hook(hook, environment.clone()).await,
5406 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5407 client
5408 .request(proto::RunGitHook {
5409 project_id: project_id.0,
5410 repository_id: id.to_proto(),
5411 hook: hook.to_proto(),
5412 })
5413 .await?;
5414
5415 Ok(())
5416 }
5417 }
5418 },
5419 )
5420 }
5421
5422 pub fn commit(
5423 &mut self,
5424 message: SharedString,
5425 name_and_email: Option<(SharedString, SharedString)>,
5426 options: CommitOptions,
5427 askpass: AskPassDelegate,
5428 cx: &mut App,
5429 ) -> oneshot::Receiver<Result<()>> {
5430 let id = self.id;
5431 let askpass_delegates = self.askpass_delegates.clone();
5432 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5433
5434 let rx = self.run_hook(RunHook::PreCommit, cx);
5435
5436 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5437 rx.await??;
5438
5439 match git_repo {
5440 RepositoryState::Local(LocalRepositoryState {
5441 backend,
5442 environment,
5443 ..
5444 }) => {
5445 backend
5446 .commit(message, name_and_email, options, askpass, environment)
5447 .await
5448 }
5449 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5450 askpass_delegates.lock().insert(askpass_id, askpass);
5451 let _defer = util::defer(|| {
5452 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5453 debug_assert!(askpass_delegate.is_some());
5454 });
5455 let (name, email) = name_and_email.unzip();
5456 client
5457 .request(proto::Commit {
5458 project_id: project_id.0,
5459 repository_id: id.to_proto(),
5460 message: String::from(message),
5461 name: name.map(String::from),
5462 email: email.map(String::from),
5463 options: Some(proto::commit::CommitOptions {
5464 amend: options.amend,
5465 signoff: options.signoff,
5466 }),
5467 askpass_id,
5468 })
5469 .await?;
5470
5471 Ok(())
5472 }
5473 }
5474 })
5475 }
5476
5477 pub fn fetch(
5478 &mut self,
5479 fetch_options: FetchOptions,
5480 askpass: AskPassDelegate,
5481 _cx: &mut App,
5482 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5483 let askpass_delegates = self.askpass_delegates.clone();
5484 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5485 let id = self.id;
5486
5487 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5488 match git_repo {
5489 RepositoryState::Local(LocalRepositoryState {
5490 backend,
5491 environment,
5492 ..
5493 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5494 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5495 askpass_delegates.lock().insert(askpass_id, askpass);
5496 let _defer = util::defer(|| {
5497 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5498 debug_assert!(askpass_delegate.is_some());
5499 });
5500
5501 let response = client
5502 .request(proto::Fetch {
5503 project_id: project_id.0,
5504 repository_id: id.to_proto(),
5505 askpass_id,
5506 remote: fetch_options.to_proto(),
5507 })
5508 .await?;
5509
5510 Ok(RemoteCommandOutput {
5511 stdout: response.stdout,
5512 stderr: response.stderr,
5513 })
5514 }
5515 }
5516 })
5517 }
5518
5519 pub fn push(
5520 &mut self,
5521 branch: SharedString,
5522 remote_branch: SharedString,
5523 remote: SharedString,
5524 options: Option<PushOptions>,
5525 askpass: AskPassDelegate,
5526 cx: &mut Context<Self>,
5527 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5528 let askpass_delegates = self.askpass_delegates.clone();
5529 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5530 let id = self.id;
5531
5532 let args = options
5533 .map(|option| match option {
5534 PushOptions::SetUpstream => " --set-upstream",
5535 PushOptions::Force => " --force-with-lease",
5536 })
5537 .unwrap_or("");
5538
5539 let updates_tx = self
5540 .git_store()
5541 .and_then(|git_store| match &git_store.read(cx).state {
5542 GitStoreState::Local { downstream, .. } => downstream
5543 .as_ref()
5544 .map(|downstream| downstream.updates_tx.clone()),
5545 _ => None,
5546 });
5547
5548 let this = cx.weak_entity();
5549 self.send_job(
5550 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5551 move |git_repo, mut cx| async move {
5552 match git_repo {
5553 RepositoryState::Local(LocalRepositoryState {
5554 backend,
5555 environment,
5556 ..
5557 }) => {
5558 let result = backend
5559 .push(
5560 branch.to_string(),
5561 remote_branch.to_string(),
5562 remote.to_string(),
5563 options,
5564 askpass,
5565 environment.clone(),
5566 cx.clone(),
5567 )
5568 .await;
5569 // TODO would be nice to not have to do this manually
5570 if result.is_ok() {
5571 let branches = backend.branches().await?;
5572 let branch = branches.into_iter().find(|branch| branch.is_head);
5573 log::info!("head branch after scan is {branch:?}");
5574 let snapshot = this.update(&mut cx, |this, cx| {
5575 this.snapshot.branch = branch;
5576 cx.emit(RepositoryEvent::BranchChanged);
5577 this.snapshot.clone()
5578 })?;
5579 if let Some(updates_tx) = updates_tx {
5580 updates_tx
5581 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5582 .ok();
5583 }
5584 }
5585 result
5586 }
5587 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5588 askpass_delegates.lock().insert(askpass_id, askpass);
5589 let _defer = util::defer(|| {
5590 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5591 debug_assert!(askpass_delegate.is_some());
5592 });
5593 let response = client
5594 .request(proto::Push {
5595 project_id: project_id.0,
5596 repository_id: id.to_proto(),
5597 askpass_id,
5598 branch_name: branch.to_string(),
5599 remote_branch_name: remote_branch.to_string(),
5600 remote_name: remote.to_string(),
5601 options: options.map(|options| match options {
5602 PushOptions::Force => proto::push::PushOptions::Force,
5603 PushOptions::SetUpstream => {
5604 proto::push::PushOptions::SetUpstream
5605 }
5606 }
5607 as i32),
5608 })
5609 .await?;
5610
5611 Ok(RemoteCommandOutput {
5612 stdout: response.stdout,
5613 stderr: response.stderr,
5614 })
5615 }
5616 }
5617 },
5618 )
5619 }
5620
5621 pub fn pull(
5622 &mut self,
5623 branch: Option<SharedString>,
5624 remote: SharedString,
5625 rebase: bool,
5626 askpass: AskPassDelegate,
5627 _cx: &mut App,
5628 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5629 let askpass_delegates = self.askpass_delegates.clone();
5630 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5631 let id = self.id;
5632
5633 let mut status = "git pull".to_string();
5634 if rebase {
5635 status.push_str(" --rebase");
5636 }
5637 status.push_str(&format!(" {}", remote));
5638 if let Some(b) = &branch {
5639 status.push_str(&format!(" {}", b));
5640 }
5641
5642 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5643 match git_repo {
5644 RepositoryState::Local(LocalRepositoryState {
5645 backend,
5646 environment,
5647 ..
5648 }) => {
5649 backend
5650 .pull(
5651 branch.as_ref().map(|b| b.to_string()),
5652 remote.to_string(),
5653 rebase,
5654 askpass,
5655 environment.clone(),
5656 cx,
5657 )
5658 .await
5659 }
5660 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5661 askpass_delegates.lock().insert(askpass_id, askpass);
5662 let _defer = util::defer(|| {
5663 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5664 debug_assert!(askpass_delegate.is_some());
5665 });
5666 let response = client
5667 .request(proto::Pull {
5668 project_id: project_id.0,
5669 repository_id: id.to_proto(),
5670 askpass_id,
5671 rebase,
5672 branch_name: branch.as_ref().map(|b| b.to_string()),
5673 remote_name: remote.to_string(),
5674 })
5675 .await?;
5676
5677 Ok(RemoteCommandOutput {
5678 stdout: response.stdout,
5679 stderr: response.stderr,
5680 })
5681 }
5682 }
5683 })
5684 }
5685
5686 fn spawn_set_index_text_job(
5687 &mut self,
5688 path: RepoPath,
5689 content: Option<String>,
5690 hunk_staging_operation_count: Option<usize>,
5691 cx: &mut Context<Self>,
5692 ) -> oneshot::Receiver<anyhow::Result<()>> {
5693 let id = self.id;
5694 let this = cx.weak_entity();
5695 let git_store = self.git_store.clone();
5696 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5697 self.send_keyed_job(
5698 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5699 None,
5700 move |git_repo, mut cx| async move {
5701 log::debug!(
5702 "start updating index text for buffer {}",
5703 path.as_unix_str()
5704 );
5705
5706 match git_repo {
5707 RepositoryState::Local(LocalRepositoryState {
5708 fs,
5709 backend,
5710 environment,
5711 ..
5712 }) => {
5713 let executable = match fs.metadata(&abs_path).await {
5714 Ok(Some(meta)) => meta.is_executable,
5715 Ok(None) => false,
5716 Err(_err) => false,
5717 };
5718 backend
5719 .set_index_text(path.clone(), content, environment.clone(), executable)
5720 .await?;
5721 }
5722 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5723 client
5724 .request(proto::SetIndexText {
5725 project_id: project_id.0,
5726 repository_id: id.to_proto(),
5727 path: path.to_proto(),
5728 text: content,
5729 })
5730 .await?;
5731 }
5732 }
5733 log::debug!(
5734 "finish updating index text for buffer {}",
5735 path.as_unix_str()
5736 );
5737
5738 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5739 let project_path = this
5740 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5741 .ok()
5742 .flatten();
5743 git_store
5744 .update(&mut cx, |git_store, cx| {
5745 let buffer_id = git_store
5746 .buffer_store
5747 .read(cx)
5748 .get_by_path(&project_path?)?
5749 .read(cx)
5750 .remote_id();
5751 let diff_state = git_store.diffs.get(&buffer_id)?;
5752 diff_state.update(cx, |diff_state, _| {
5753 diff_state.hunk_staging_operation_count_as_of_write =
5754 hunk_staging_operation_count;
5755 });
5756 Some(())
5757 })
5758 .context("Git store dropped")?;
5759 }
5760 Ok(())
5761 },
5762 )
5763 }
5764
5765 pub fn create_remote(
5766 &mut self,
5767 remote_name: String,
5768 remote_url: String,
5769 ) -> oneshot::Receiver<Result<()>> {
5770 let id = self.id;
5771 self.send_job(
5772 Some(format!("git remote add {remote_name} {remote_url}").into()),
5773 move |repo, _cx| async move {
5774 match repo {
5775 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5776 backend.create_remote(remote_name, remote_url).await
5777 }
5778 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5779 client
5780 .request(proto::GitCreateRemote {
5781 project_id: project_id.0,
5782 repository_id: id.to_proto(),
5783 remote_name,
5784 remote_url,
5785 })
5786 .await?;
5787
5788 Ok(())
5789 }
5790 }
5791 },
5792 )
5793 }
5794
5795 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5796 let id = self.id;
5797 self.send_job(
5798 Some(format!("git remove remote {remote_name}").into()),
5799 move |repo, _cx| async move {
5800 match repo {
5801 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5802 backend.remove_remote(remote_name).await
5803 }
5804 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5805 client
5806 .request(proto::GitRemoveRemote {
5807 project_id: project_id.0,
5808 repository_id: id.to_proto(),
5809 remote_name,
5810 })
5811 .await?;
5812
5813 Ok(())
5814 }
5815 }
5816 },
5817 )
5818 }
5819
5820 pub fn get_remotes(
5821 &mut self,
5822 branch_name: Option<String>,
5823 is_push: bool,
5824 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5825 let id = self.id;
5826 self.send_job(None, move |repo, _cx| async move {
5827 match repo {
5828 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5829 let remote = if let Some(branch_name) = branch_name {
5830 if is_push {
5831 backend.get_push_remote(branch_name).await?
5832 } else {
5833 backend.get_branch_remote(branch_name).await?
5834 }
5835 } else {
5836 None
5837 };
5838
5839 match remote {
5840 Some(remote) => Ok(vec![remote]),
5841 None => backend.get_all_remotes().await,
5842 }
5843 }
5844 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5845 let response = client
5846 .request(proto::GetRemotes {
5847 project_id: project_id.0,
5848 repository_id: id.to_proto(),
5849 branch_name,
5850 is_push,
5851 })
5852 .await?;
5853
5854 let remotes = response
5855 .remotes
5856 .into_iter()
5857 .map(|remotes| Remote {
5858 name: remotes.name.into(),
5859 })
5860 .collect();
5861
5862 Ok(remotes)
5863 }
5864 }
5865 })
5866 }
5867
5868 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5869 let id = self.id;
5870 self.send_job(None, move |repo, _| async move {
5871 match repo {
5872 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5873 backend.branches().await
5874 }
5875 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5876 let response = client
5877 .request(proto::GitGetBranches {
5878 project_id: project_id.0,
5879 repository_id: id.to_proto(),
5880 })
5881 .await?;
5882
5883 let branches = response
5884 .branches
5885 .into_iter()
5886 .map(|branch| proto_to_branch(&branch))
5887 .collect();
5888
5889 Ok(branches)
5890 }
5891 }
5892 })
5893 }
5894
5895 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5896 /// returns the pathed for the linked worktree.
5897 ///
5898 /// Returns None if this is the main checkout.
5899 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5900 if self.work_directory_abs_path != self.original_repo_abs_path {
5901 Some(&self.work_directory_abs_path)
5902 } else {
5903 None
5904 }
5905 }
5906
5907 pub fn path_for_new_linked_worktree(
5908 &self,
5909 branch_name: &str,
5910 worktree_directory_setting: &str,
5911 ) -> Result<PathBuf> {
5912 let original_repo = self.original_repo_abs_path.clone();
5913 let project_name = original_repo
5914 .file_name()
5915 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5916 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5917 Ok(directory.join(branch_name).join(project_name))
5918 }
5919
5920 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5921 let id = self.id;
5922 self.send_job(None, move |repo, _| async move {
5923 match repo {
5924 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5925 backend.worktrees().await
5926 }
5927 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5928 let response = client
5929 .request(proto::GitGetWorktrees {
5930 project_id: project_id.0,
5931 repository_id: id.to_proto(),
5932 })
5933 .await?;
5934
5935 let worktrees = response
5936 .worktrees
5937 .into_iter()
5938 .map(|worktree| proto_to_worktree(&worktree))
5939 .collect();
5940
5941 Ok(worktrees)
5942 }
5943 }
5944 })
5945 }
5946
5947 pub fn create_worktree(
5948 &mut self,
5949 branch_name: String,
5950 path: PathBuf,
5951 commit: Option<String>,
5952 ) -> oneshot::Receiver<Result<()>> {
5953 let id = self.id;
5954 self.send_job(
5955 Some(format!("git worktree add: {}", branch_name).into()),
5956 move |repo, _cx| async move {
5957 match repo {
5958 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5959 backend.create_worktree(branch_name, path, commit).await
5960 }
5961 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5962 client
5963 .request(proto::GitCreateWorktree {
5964 project_id: project_id.0,
5965 repository_id: id.to_proto(),
5966 name: branch_name,
5967 directory: path.to_string_lossy().to_string(),
5968 commit,
5969 })
5970 .await?;
5971
5972 Ok(())
5973 }
5974 }
5975 },
5976 )
5977 }
5978
5979 pub fn create_cancellable_worktree(
5980 &mut self,
5981 branch_name: String,
5982 path: PathBuf,
5983 commit: Option<String>,
5984 ) -> (oneshot::Receiver<Result<()>>, CancellationToken) {
5985 let id = self.id;
5986 self.send_cancellable_job(
5987 Some(format!("git worktree add: {}", branch_name).into()),
5988 move |repo, _cx| async move {
5989 match repo {
5990 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5991 backend.create_worktree(branch_name, path, commit).await
5992 }
5993 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5994 client
5995 .request(proto::GitCreateWorktree {
5996 project_id: project_id.0,
5997 repository_id: id.to_proto(),
5998 name: branch_name,
5999 directory: path.to_string_lossy().to_string(),
6000 commit,
6001 })
6002 .await?;
6003 Ok(())
6004 }
6005 }
6006 },
6007 )
6008 }
6009
6010 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6011 let id = self.id;
6012 self.send_job(
6013 Some(format!("git worktree remove: {}", path.display()).into()),
6014 move |repo, _cx| async move {
6015 match repo {
6016 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6017 backend.remove_worktree(path, force).await
6018 }
6019 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6020 client
6021 .request(proto::GitRemoveWorktree {
6022 project_id: project_id.0,
6023 repository_id: id.to_proto(),
6024 path: path.to_string_lossy().to_string(),
6025 force,
6026 })
6027 .await?;
6028
6029 Ok(())
6030 }
6031 }
6032 },
6033 )
6034 }
6035
6036 pub fn rename_worktree(
6037 &mut self,
6038 old_path: PathBuf,
6039 new_path: PathBuf,
6040 ) -> oneshot::Receiver<Result<()>> {
6041 let id = self.id;
6042 self.send_job(
6043 Some(format!("git worktree move: {}", old_path.display()).into()),
6044 move |repo, _cx| async move {
6045 match repo {
6046 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6047 backend.rename_worktree(old_path, new_path).await
6048 }
6049 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6050 client
6051 .request(proto::GitRenameWorktree {
6052 project_id: project_id.0,
6053 repository_id: id.to_proto(),
6054 old_path: old_path.to_string_lossy().to_string(),
6055 new_path: new_path.to_string_lossy().to_string(),
6056 })
6057 .await?;
6058
6059 Ok(())
6060 }
6061 }
6062 },
6063 )
6064 }
6065
6066 pub fn default_branch(
6067 &mut self,
6068 include_remote_name: bool,
6069 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6070 let id = self.id;
6071 self.send_job(None, move |repo, _| async move {
6072 match repo {
6073 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6074 backend.default_branch(include_remote_name).await
6075 }
6076 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6077 let response = client
6078 .request(proto::GetDefaultBranch {
6079 project_id: project_id.0,
6080 repository_id: id.to_proto(),
6081 })
6082 .await?;
6083
6084 anyhow::Ok(response.branch.map(SharedString::from))
6085 }
6086 }
6087 })
6088 }
6089
6090 pub fn diff_tree(
6091 &mut self,
6092 diff_type: DiffTreeType,
6093 _cx: &App,
6094 ) -> oneshot::Receiver<Result<TreeDiff>> {
6095 let repository_id = self.snapshot.id;
6096 self.send_job(None, move |repo, _cx| async move {
6097 match repo {
6098 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6099 backend.diff_tree(diff_type).await
6100 }
6101 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6102 let response = client
6103 .request(proto::GetTreeDiff {
6104 project_id: project_id.0,
6105 repository_id: repository_id.0,
6106 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6107 base: diff_type.base().to_string(),
6108 head: diff_type.head().to_string(),
6109 })
6110 .await?;
6111
6112 let entries = response
6113 .entries
6114 .into_iter()
6115 .filter_map(|entry| {
6116 let status = match entry.status() {
6117 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6118 proto::tree_diff_status::Status::Modified => {
6119 TreeDiffStatus::Modified {
6120 old: git::Oid::from_str(
6121 &entry.oid.context("missing oid").log_err()?,
6122 )
6123 .log_err()?,
6124 }
6125 }
6126 proto::tree_diff_status::Status::Deleted => {
6127 TreeDiffStatus::Deleted {
6128 old: git::Oid::from_str(
6129 &entry.oid.context("missing oid").log_err()?,
6130 )
6131 .log_err()?,
6132 }
6133 }
6134 };
6135 Some((
6136 RepoPath::from_rel_path(
6137 &RelPath::from_proto(&entry.path).log_err()?,
6138 ),
6139 status,
6140 ))
6141 })
6142 .collect();
6143
6144 Ok(TreeDiff { entries })
6145 }
6146 }
6147 })
6148 }
6149
6150 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6151 let id = self.id;
6152 self.send_job(None, move |repo, _cx| async move {
6153 match repo {
6154 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6155 backend.diff(diff_type).await
6156 }
6157 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6158 let (proto_diff_type, merge_base_ref) = match &diff_type {
6159 DiffType::HeadToIndex => {
6160 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6161 }
6162 DiffType::HeadToWorktree => {
6163 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6164 }
6165 DiffType::MergeBase { base_ref } => (
6166 proto::git_diff::DiffType::MergeBase.into(),
6167 Some(base_ref.to_string()),
6168 ),
6169 };
6170 let response = client
6171 .request(proto::GitDiff {
6172 project_id: project_id.0,
6173 repository_id: id.to_proto(),
6174 diff_type: proto_diff_type,
6175 merge_base_ref,
6176 })
6177 .await?;
6178
6179 Ok(response.diff)
6180 }
6181 }
6182 })
6183 }
6184
6185 pub fn create_branch(
6186 &mut self,
6187 branch_name: String,
6188 base_branch: Option<String>,
6189 ) -> oneshot::Receiver<Result<()>> {
6190 let id = self.id;
6191 let status_msg = if let Some(ref base) = base_branch {
6192 format!("git switch -c {branch_name} {base}").into()
6193 } else {
6194 format!("git switch -c {branch_name}").into()
6195 };
6196 self.send_job(Some(status_msg), move |repo, _cx| async move {
6197 match repo {
6198 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6199 backend.create_branch(branch_name, base_branch).await
6200 }
6201 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6202 client
6203 .request(proto::GitCreateBranch {
6204 project_id: project_id.0,
6205 repository_id: id.to_proto(),
6206 branch_name,
6207 })
6208 .await?;
6209
6210 Ok(())
6211 }
6212 }
6213 })
6214 }
6215
6216 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6217 let id = self.id;
6218 self.send_job(
6219 Some(format!("git switch {branch_name}").into()),
6220 move |repo, _cx| async move {
6221 match repo {
6222 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6223 backend.change_branch(branch_name).await
6224 }
6225 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6226 client
6227 .request(proto::GitChangeBranch {
6228 project_id: project_id.0,
6229 repository_id: id.to_proto(),
6230 branch_name,
6231 })
6232 .await?;
6233
6234 Ok(())
6235 }
6236 }
6237 },
6238 )
6239 }
6240
6241 pub fn delete_branch(
6242 &mut self,
6243 is_remote: bool,
6244 branch_name: String,
6245 ) -> oneshot::Receiver<Result<()>> {
6246 let id = self.id;
6247 self.send_job(
6248 Some(
6249 format!(
6250 "git branch {} {}",
6251 if is_remote { "-dr" } else { "-d" },
6252 branch_name
6253 )
6254 .into(),
6255 ),
6256 move |repo, _cx| async move {
6257 match repo {
6258 RepositoryState::Local(state) => {
6259 state.backend.delete_branch(is_remote, branch_name).await
6260 }
6261 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6262 client
6263 .request(proto::GitDeleteBranch {
6264 project_id: project_id.0,
6265 repository_id: id.to_proto(),
6266 is_remote,
6267 branch_name,
6268 })
6269 .await?;
6270
6271 Ok(())
6272 }
6273 }
6274 },
6275 )
6276 }
6277
6278 pub fn rename_branch(
6279 &mut self,
6280 branch: String,
6281 new_name: String,
6282 ) -> oneshot::Receiver<Result<()>> {
6283 let id = self.id;
6284 self.send_job(
6285 Some(format!("git branch -m {branch} {new_name}").into()),
6286 move |repo, _cx| async move {
6287 match repo {
6288 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6289 backend.rename_branch(branch, new_name).await
6290 }
6291 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6292 client
6293 .request(proto::GitRenameBranch {
6294 project_id: project_id.0,
6295 repository_id: id.to_proto(),
6296 branch,
6297 new_name,
6298 })
6299 .await?;
6300
6301 Ok(())
6302 }
6303 }
6304 },
6305 )
6306 }
6307
6308 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6309 let id = self.id;
6310 self.send_job(None, move |repo, _cx| async move {
6311 match repo {
6312 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6313 backend.check_for_pushed_commit().await
6314 }
6315 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6316 let response = client
6317 .request(proto::CheckForPushedCommits {
6318 project_id: project_id.0,
6319 repository_id: id.to_proto(),
6320 })
6321 .await?;
6322
6323 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6324
6325 Ok(branches)
6326 }
6327 }
6328 })
6329 }
6330
6331 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6332 self.send_job(None, |repo, _cx| async move {
6333 match repo {
6334 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6335 backend.checkpoint().await
6336 }
6337 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6338 }
6339 })
6340 }
6341
6342 pub fn restore_checkpoint(
6343 &mut self,
6344 checkpoint: GitRepositoryCheckpoint,
6345 ) -> oneshot::Receiver<Result<()>> {
6346 self.send_job(None, move |repo, _cx| async move {
6347 match repo {
6348 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6349 backend.restore_checkpoint(checkpoint).await
6350 }
6351 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6352 }
6353 })
6354 }
6355
6356 pub(crate) fn apply_remote_update(
6357 &mut self,
6358 update: proto::UpdateRepository,
6359 cx: &mut Context<Self>,
6360 ) -> Result<()> {
6361 if let Some(main_path) = &update.original_repo_abs_path {
6362 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6363 }
6364
6365 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6366 let new_head_commit = update
6367 .head_commit_details
6368 .as_ref()
6369 .map(proto_to_commit_details);
6370 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6371 cx.emit(RepositoryEvent::BranchChanged)
6372 }
6373 self.snapshot.branch = new_branch;
6374 self.snapshot.head_commit = new_head_commit;
6375
6376 // We don't store any merge head state for downstream projects; the upstream
6377 // will track it and we will just get the updated conflicts
6378 let new_merge_heads = TreeMap::from_ordered_entries(
6379 update
6380 .current_merge_conflicts
6381 .into_iter()
6382 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6383 );
6384 let conflicts_changed =
6385 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6386 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6387 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6388 let new_stash_entries = GitStash {
6389 entries: update
6390 .stash_entries
6391 .iter()
6392 .filter_map(|entry| proto_to_stash(entry).ok())
6393 .collect(),
6394 };
6395 if self.snapshot.stash_entries != new_stash_entries {
6396 cx.emit(RepositoryEvent::StashEntriesChanged)
6397 }
6398 self.snapshot.stash_entries = new_stash_entries;
6399 let new_linked_worktrees: Arc<[GitWorktree]> = update
6400 .linked_worktrees
6401 .iter()
6402 .map(proto_to_worktree)
6403 .collect();
6404 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6405 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6406 }
6407 self.snapshot.linked_worktrees = new_linked_worktrees;
6408 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6409 self.snapshot.remote_origin_url = update.remote_origin_url;
6410
6411 let edits = update
6412 .removed_statuses
6413 .into_iter()
6414 .filter_map(|path| {
6415 Some(sum_tree::Edit::Remove(PathKey(
6416 RelPath::from_proto(&path).log_err()?,
6417 )))
6418 })
6419 .chain(
6420 update
6421 .updated_statuses
6422 .into_iter()
6423 .filter_map(|updated_status| {
6424 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6425 }),
6426 )
6427 .collect::<Vec<_>>();
6428 if conflicts_changed || !edits.is_empty() {
6429 cx.emit(RepositoryEvent::StatusesChanged);
6430 }
6431 self.snapshot.statuses_by_path.edit(edits, ());
6432
6433 if update.is_last_update {
6434 self.snapshot.scan_id = update.scan_id;
6435 }
6436 self.clear_pending_ops(cx);
6437 Ok(())
6438 }
6439
6440 pub fn compare_checkpoints(
6441 &mut self,
6442 left: GitRepositoryCheckpoint,
6443 right: GitRepositoryCheckpoint,
6444 ) -> oneshot::Receiver<Result<bool>> {
6445 self.send_job(None, move |repo, _cx| async move {
6446 match repo {
6447 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6448 backend.compare_checkpoints(left, right).await
6449 }
6450 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6451 }
6452 })
6453 }
6454
6455 pub fn diff_checkpoints(
6456 &mut self,
6457 base_checkpoint: GitRepositoryCheckpoint,
6458 target_checkpoint: GitRepositoryCheckpoint,
6459 ) -> oneshot::Receiver<Result<String>> {
6460 self.send_job(None, move |repo, _cx| async move {
6461 match repo {
6462 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6463 backend
6464 .diff_checkpoints(base_checkpoint, target_checkpoint)
6465 .await
6466 }
6467 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6468 }
6469 })
6470 }
6471
6472 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6473 let updated = SumTree::from_iter(
6474 self.pending_ops.iter().filter_map(|ops| {
6475 let inner_ops: Vec<PendingOp> =
6476 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6477 if inner_ops.is_empty() {
6478 None
6479 } else {
6480 Some(PendingOps {
6481 repo_path: ops.repo_path.clone(),
6482 ops: inner_ops,
6483 })
6484 }
6485 }),
6486 (),
6487 );
6488
6489 if updated != self.pending_ops {
6490 cx.emit(RepositoryEvent::PendingOpsChanged {
6491 pending_ops: self.pending_ops.clone(),
6492 })
6493 }
6494
6495 self.pending_ops = updated;
6496 }
6497
6498 fn schedule_scan(
6499 &mut self,
6500 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6501 cx: &mut Context<Self>,
6502 ) {
6503 let this = cx.weak_entity();
6504 let _ = self.send_keyed_job(
6505 Some(GitJobKey::ReloadGitState),
6506 None,
6507 |state, mut cx| async move {
6508 log::debug!("run scheduled git status scan");
6509
6510 let Some(this) = this.upgrade() else {
6511 return Ok(());
6512 };
6513 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6514 bail!("not a local repository")
6515 };
6516 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6517 this.update(&mut cx, |this, cx| {
6518 this.clear_pending_ops(cx);
6519 });
6520 if let Some(updates_tx) = updates_tx {
6521 updates_tx
6522 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6523 .ok();
6524 }
6525 Ok(())
6526 },
6527 );
6528 }
6529
6530 fn spawn_local_git_worker(
6531 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6532 cx: &mut Context<Self>,
6533 ) -> mpsc::UnboundedSender<GitWorkerMessage> {
6534 let (job_tx, mut job_rx) = mpsc::unbounded::<GitWorkerMessage>();
6535
6536 cx.spawn(async move |_, cx| {
6537 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6538 if let Some(git_hosting_provider_registry) =
6539 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6540 {
6541 git_hosting_providers::register_additional_providers(
6542 git_hosting_provider_registry,
6543 state.backend.clone(),
6544 )
6545 .await;
6546 }
6547 let state = RepositoryState::Local(state);
6548 let mut jobs: VecDeque<GitJob> = VecDeque::new();
6549 let mut completed_ids: HashSet<JobId> = HashSet::new();
6550 loop {
6551 while let Ok(Some(msg)) = job_rx.try_next() {
6552 match msg {
6553 GitWorkerMessage::Job(job) => jobs.push_back(job),
6554 GitWorkerMessage::Cancel {
6555 id,
6556 cleanup,
6557 result_tx,
6558 } => {
6559 let before = jobs.len();
6560 jobs.retain(|j| j.id != id);
6561 let outcome = if jobs.len() < before {
6562 CancelOutcome::RemovedFromQueue
6563 } else if completed_ids.contains(&id) {
6564 CancelOutcome::AlreadyFinished
6565 } else {
6566 CancelOutcome::AlreadyFinished
6567 };
6568 result_tx.send(outcome).ok();
6569 if let Some(f) = cleanup {
6570 if matches!(outcome, CancelOutcome::RemovedFromQueue) {
6571 f(state.clone(), cx).await;
6572 }
6573 }
6574 }
6575 }
6576 }
6577
6578 if let Some(job) = jobs.pop_front() {
6579 if let Some(current_key) = &job.key
6580 && jobs
6581 .iter()
6582 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6583 {
6584 continue;
6585 }
6586 let running_job_id = job.id;
6587 let task = (job.job)(state.clone(), cx);
6588 let mut task = std::pin::pin!(task);
6589
6590 loop {
6591 match futures::future::select(task, job_rx.next()).await {
6592 futures::future::Either::Left(((), _)) => {
6593 completed_ids.insert(running_job_id);
6594 if completed_ids.len() > 256 {
6595 let to_remove: Vec<_> =
6596 completed_ids.iter().copied().take(128).collect();
6597 for id in to_remove {
6598 completed_ids.remove(&id);
6599 }
6600 }
6601 break;
6602 }
6603 futures::future::Either::Right((Some(msg), ongoing_task)) => {
6604 match msg {
6605 GitWorkerMessage::Job(j) => {
6606 jobs.push_back(j);
6607 task = ongoing_task;
6608 }
6609 GitWorkerMessage::Cancel {
6610 id,
6611 cleanup,
6612 result_tx,
6613 } => {
6614 if id == running_job_id {
6615 let _ = ongoing_task;
6616 if let Some(f) = cleanup {
6617 f(state.clone(), cx).await;
6618 }
6619 result_tx.send(CancelOutcome::KilledRunning).ok();
6620 break;
6621 } else {
6622 let before = jobs.len();
6623 jobs.retain(|j| j.id != id);
6624 let outcome = if jobs.len() < before {
6625 CancelOutcome::RemovedFromQueue
6626 } else if completed_ids.contains(&id) {
6627 CancelOutcome::AlreadyFinished
6628 } else {
6629 CancelOutcome::AlreadyFinished
6630 };
6631 result_tx.send(outcome).ok();
6632 task = ongoing_task;
6633 }
6634 }
6635 }
6636 }
6637 futures::future::Either::Right((None, ongoing_task)) => {
6638 ongoing_task.await;
6639 break;
6640 }
6641 }
6642 }
6643 } else if let Some(msg) = job_rx.next().await {
6644 match msg {
6645 GitWorkerMessage::Job(job) => jobs.push_back(job),
6646 GitWorkerMessage::Cancel {
6647 id: _,
6648 cleanup: _,
6649 result_tx,
6650 } => {
6651 result_tx.send(CancelOutcome::AlreadyFinished).ok();
6652 }
6653 }
6654 } else {
6655 break;
6656 }
6657 }
6658 anyhow::Ok(())
6659 })
6660 .detach_and_log_err(cx);
6661
6662 job_tx
6663 }
6664
6665 fn spawn_remote_git_worker(
6666 state: RemoteRepositoryState,
6667 cx: &mut Context<Self>,
6668 ) -> mpsc::UnboundedSender<GitWorkerMessage> {
6669 let (job_tx, mut job_rx) = mpsc::unbounded::<GitWorkerMessage>();
6670
6671 cx.spawn(async move |_, cx| {
6672 let state = RepositoryState::Remote(state);
6673 let mut jobs: VecDeque<GitJob> = VecDeque::new();
6674 let mut completed_ids: HashSet<JobId> = HashSet::new();
6675 loop {
6676 while let Ok(Some(msg)) = job_rx.try_next() {
6677 match msg {
6678 GitWorkerMessage::Job(job) => jobs.push_back(job),
6679 GitWorkerMessage::Cancel {
6680 id,
6681 cleanup,
6682 result_tx,
6683 } => {
6684 let before = jobs.len();
6685 jobs.retain(|j| j.id != id);
6686 let outcome = if jobs.len() < before {
6687 CancelOutcome::RemovedFromQueue
6688 } else if completed_ids.contains(&id) {
6689 CancelOutcome::AlreadyFinished
6690 } else {
6691 CancelOutcome::AlreadyFinished
6692 };
6693 result_tx.send(outcome).ok();
6694 if let Some(f) = cleanup {
6695 if matches!(outcome, CancelOutcome::RemovedFromQueue) {
6696 f(state.clone(), cx).await;
6697 }
6698 }
6699 }
6700 }
6701 }
6702
6703 if let Some(job) = jobs.pop_front() {
6704 if let Some(current_key) = &job.key
6705 && jobs
6706 .iter()
6707 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6708 {
6709 continue;
6710 }
6711 let running_job_id = job.id;
6712 let task = (job.job)(state.clone(), cx);
6713 let mut task = std::pin::pin!(task);
6714
6715 loop {
6716 match futures::future::select(task, job_rx.next()).await {
6717 futures::future::Either::Left(((), _)) => {
6718 completed_ids.insert(running_job_id);
6719 if completed_ids.len() > 256 {
6720 let to_remove: Vec<_> =
6721 completed_ids.iter().copied().take(128).collect();
6722 for id in to_remove {
6723 completed_ids.remove(&id);
6724 }
6725 }
6726 break;
6727 }
6728 futures::future::Either::Right((Some(msg), ongoing_task)) => {
6729 match msg {
6730 GitWorkerMessage::Job(j) => {
6731 jobs.push_back(j);
6732 task = ongoing_task;
6733 }
6734 GitWorkerMessage::Cancel {
6735 id,
6736 cleanup,
6737 result_tx,
6738 } => {
6739 if id == running_job_id {
6740 let _ = ongoing_task;
6741 if let Some(f) = cleanup {
6742 f(state.clone(), cx).await;
6743 }
6744 result_tx.send(CancelOutcome::KilledRunning).ok();
6745 break;
6746 } else {
6747 let before = jobs.len();
6748 jobs.retain(|j| j.id != id);
6749 let outcome = if jobs.len() < before {
6750 CancelOutcome::RemovedFromQueue
6751 } else if completed_ids.contains(&id) {
6752 CancelOutcome::AlreadyFinished
6753 } else {
6754 CancelOutcome::AlreadyFinished
6755 };
6756 result_tx.send(outcome).ok();
6757 task = ongoing_task;
6758 }
6759 }
6760 }
6761 }
6762 futures::future::Either::Right((None, ongoing_task)) => {
6763 ongoing_task.await;
6764 break;
6765 }
6766 }
6767 }
6768 } else if let Some(msg) = job_rx.next().await {
6769 match msg {
6770 GitWorkerMessage::Job(job) => jobs.push_back(job),
6771 GitWorkerMessage::Cancel {
6772 id: _,
6773 cleanup: _,
6774 result_tx,
6775 } => {
6776 result_tx.send(CancelOutcome::AlreadyFinished).ok();
6777 }
6778 }
6779 } else {
6780 break;
6781 }
6782 }
6783 anyhow::Ok(())
6784 })
6785 .detach_and_log_err(cx);
6786
6787 job_tx
6788 }
6789
6790 fn load_staged_text(
6791 &mut self,
6792 buffer_id: BufferId,
6793 repo_path: RepoPath,
6794 cx: &App,
6795 ) -> Task<Result<Option<String>>> {
6796 let rx = self.send_job(None, move |state, _| async move {
6797 match state {
6798 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6799 anyhow::Ok(backend.load_index_text(repo_path).await)
6800 }
6801 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6802 let response = client
6803 .request(proto::OpenUnstagedDiff {
6804 project_id: project_id.to_proto(),
6805 buffer_id: buffer_id.to_proto(),
6806 })
6807 .await?;
6808 Ok(response.staged_text)
6809 }
6810 }
6811 });
6812 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6813 }
6814
6815 fn load_committed_text(
6816 &mut self,
6817 buffer_id: BufferId,
6818 repo_path: RepoPath,
6819 cx: &App,
6820 ) -> Task<Result<DiffBasesChange>> {
6821 let rx = self.send_job(None, move |state, _| async move {
6822 match state {
6823 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6824 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6825 let staged_text = backend.load_index_text(repo_path).await;
6826 let diff_bases_change = if committed_text == staged_text {
6827 DiffBasesChange::SetBoth(committed_text)
6828 } else {
6829 DiffBasesChange::SetEach {
6830 index: staged_text,
6831 head: committed_text,
6832 }
6833 };
6834 anyhow::Ok(diff_bases_change)
6835 }
6836 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6837 use proto::open_uncommitted_diff_response::Mode;
6838
6839 let response = client
6840 .request(proto::OpenUncommittedDiff {
6841 project_id: project_id.to_proto(),
6842 buffer_id: buffer_id.to_proto(),
6843 })
6844 .await?;
6845 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6846 let bases = match mode {
6847 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6848 Mode::IndexAndHead => DiffBasesChange::SetEach {
6849 head: response.committed_text,
6850 index: response.staged_text,
6851 },
6852 };
6853 Ok(bases)
6854 }
6855 }
6856 });
6857
6858 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6859 }
6860
6861 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6862 let repository_id = self.snapshot.id;
6863 let rx = self.send_job(None, move |state, _| async move {
6864 match state {
6865 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6866 backend.load_blob_content(oid).await
6867 }
6868 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6869 let response = client
6870 .request(proto::GetBlobContent {
6871 project_id: project_id.to_proto(),
6872 repository_id: repository_id.0,
6873 oid: oid.to_string(),
6874 })
6875 .await?;
6876 Ok(response.content)
6877 }
6878 }
6879 });
6880 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6881 }
6882
6883 fn paths_changed(
6884 &mut self,
6885 paths: Vec<RepoPath>,
6886 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6887 cx: &mut Context<Self>,
6888 ) {
6889 if !paths.is_empty() {
6890 self.paths_needing_status_update.push(paths);
6891 }
6892
6893 let this = cx.weak_entity();
6894 let _ = self.send_keyed_job(
6895 Some(GitJobKey::RefreshStatuses),
6896 None,
6897 |state, mut cx| async move {
6898 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6899 (
6900 this.snapshot.clone(),
6901 mem::take(&mut this.paths_needing_status_update),
6902 )
6903 })?;
6904 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6905 bail!("not a local repository")
6906 };
6907
6908 if changed_paths.is_empty() {
6909 return Ok(());
6910 }
6911
6912 let has_head = prev_snapshot.head_commit.is_some();
6913
6914 let stash_entries = backend.stash_entries().await?;
6915 let changed_path_statuses = cx
6916 .background_spawn(async move {
6917 let mut changed_paths =
6918 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6919 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6920
6921 let status_task = backend.status(&changed_paths_vec);
6922 let diff_stat_future = if has_head {
6923 backend.diff_stat(&changed_paths_vec)
6924 } else {
6925 future::ready(Ok(status::GitDiffStat {
6926 entries: Arc::default(),
6927 }))
6928 .boxed()
6929 };
6930
6931 let (statuses, diff_stats) =
6932 futures::future::try_join(status_task, diff_stat_future).await?;
6933
6934 let diff_stats: HashMap<RepoPath, DiffStat> =
6935 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6936
6937 let mut changed_path_statuses = Vec::new();
6938 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6939 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6940
6941 for (repo_path, status) in &*statuses.entries {
6942 let current_diff_stat = diff_stats.get(repo_path).copied();
6943
6944 changed_paths.remove(repo_path);
6945 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6946 && cursor.item().is_some_and(|entry| {
6947 entry.status == *status && entry.diff_stat == current_diff_stat
6948 })
6949 {
6950 continue;
6951 }
6952
6953 changed_path_statuses.push(Edit::Insert(StatusEntry {
6954 repo_path: repo_path.clone(),
6955 status: *status,
6956 diff_stat: current_diff_stat,
6957 }));
6958 }
6959 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6960 for path in changed_paths.into_iter() {
6961 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6962 changed_path_statuses
6963 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6964 }
6965 }
6966 anyhow::Ok(changed_path_statuses)
6967 })
6968 .await?;
6969
6970 this.update(&mut cx, |this, cx| {
6971 if this.snapshot.stash_entries != stash_entries {
6972 cx.emit(RepositoryEvent::StashEntriesChanged);
6973 this.snapshot.stash_entries = stash_entries;
6974 }
6975
6976 if !changed_path_statuses.is_empty() {
6977 cx.emit(RepositoryEvent::StatusesChanged);
6978 this.snapshot
6979 .statuses_by_path
6980 .edit(changed_path_statuses, ());
6981 this.snapshot.scan_id += 1;
6982 }
6983
6984 if let Some(updates_tx) = updates_tx {
6985 updates_tx
6986 .unbounded_send(DownstreamUpdate::UpdateRepository(
6987 this.snapshot.clone(),
6988 ))
6989 .ok();
6990 }
6991 })
6992 },
6993 );
6994 }
6995
6996 /// currently running git command and when it started
6997 pub fn current_job(&self) -> Option<JobInfo> {
6998 self.active_jobs.values().next().cloned()
6999 }
7000
7001 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7002 self.send_job(None, |_, _| async {})
7003 }
7004
7005 fn spawn_job_with_tracking<AsyncFn>(
7006 &mut self,
7007 paths: Vec<RepoPath>,
7008 git_status: pending_op::GitStatus,
7009 cx: &mut Context<Self>,
7010 f: AsyncFn,
7011 ) -> Task<Result<()>>
7012 where
7013 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7014 {
7015 let ids = self.new_pending_ops_for_paths(paths, git_status);
7016
7017 cx.spawn(async move |this, cx| {
7018 let (job_status, result) = match f(this.clone(), cx).await {
7019 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7020 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7021 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7022 };
7023
7024 this.update(cx, |this, _| {
7025 let mut edits = Vec::with_capacity(ids.len());
7026 for (id, entry) in ids {
7027 if let Some(mut ops) = this
7028 .pending_ops
7029 .get(&PathKey(entry.as_ref().clone()), ())
7030 .cloned()
7031 {
7032 if let Some(op) = ops.op_by_id_mut(id) {
7033 op.job_status = job_status;
7034 }
7035 edits.push(sum_tree::Edit::Insert(ops));
7036 }
7037 }
7038 this.pending_ops.edit(edits, ());
7039 })?;
7040
7041 result
7042 })
7043 }
7044
7045 fn new_pending_ops_for_paths(
7046 &mut self,
7047 paths: Vec<RepoPath>,
7048 git_status: pending_op::GitStatus,
7049 ) -> Vec<(PendingOpId, RepoPath)> {
7050 let mut edits = Vec::with_capacity(paths.len());
7051 let mut ids = Vec::with_capacity(paths.len());
7052 for path in paths {
7053 let mut ops = self
7054 .pending_ops
7055 .get(&PathKey(path.as_ref().clone()), ())
7056 .cloned()
7057 .unwrap_or_else(|| PendingOps::new(&path));
7058 let id = ops.max_id() + 1;
7059 ops.ops.push(PendingOp {
7060 id,
7061 git_status,
7062 job_status: pending_op::JobStatus::Running,
7063 });
7064 edits.push(sum_tree::Edit::Insert(ops));
7065 ids.push((id, path));
7066 }
7067 self.pending_ops.edit(edits, ());
7068 ids
7069 }
7070 pub fn default_remote_url(&self) -> Option<String> {
7071 self.remote_upstream_url
7072 .clone()
7073 .or(self.remote_origin_url.clone())
7074 }
7075}
7076
7077/// If `path` is a git linked worktree checkout, resolves it to the main
7078/// repository's working directory path. Returns `None` if `path` is a normal
7079/// repository, not a git repo, or if resolution fails.
7080///
7081/// Resolution works by:
7082/// 1. Reading the `.git` file to get the `gitdir:` pointer
7083/// 2. Following that to the worktree-specific git directory
7084/// 3. Reading the `commondir` file to find the shared `.git` directory
7085/// 4. Deriving the main repo's working directory from the common dir
7086pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7087 let dot_git = path.join(".git");
7088 let metadata = fs.metadata(&dot_git).await.ok()??;
7089 if metadata.is_dir {
7090 return None; // Normal repo, not a linked worktree
7091 }
7092 // It's a .git file — parse the gitdir: pointer
7093 let content = fs.load(&dot_git).await.ok()?;
7094 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7095 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7096 // Read commondir to find the main .git directory
7097 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7098 let common_dir = fs
7099 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7100 .await
7101 .ok()?;
7102 Some(git::repository::original_repo_path_from_common_dir(
7103 &common_dir,
7104 ))
7105}
7106
7107/// Validates that the resolved worktree directory is acceptable:
7108/// - The setting must not be an absolute path.
7109/// - The resolved path must be either a subdirectory of the working
7110/// directory or a subdirectory of its parent (i.e., a sibling).
7111///
7112/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7113pub fn worktrees_directory_for_repo(
7114 original_repo_abs_path: &Path,
7115 worktree_directory_setting: &str,
7116) -> Result<PathBuf> {
7117 // Check the original setting before trimming, since a path like "///"
7118 // is absolute but becomes "" after stripping trailing separators.
7119 // Also check for leading `/` or `\` explicitly, because on Windows
7120 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7121 // would slip through even though it's clearly not a relative path.
7122 if Path::new(worktree_directory_setting).is_absolute()
7123 || worktree_directory_setting.starts_with('/')
7124 || worktree_directory_setting.starts_with('\\')
7125 {
7126 anyhow::bail!(
7127 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7128 );
7129 }
7130
7131 if worktree_directory_setting.is_empty() {
7132 anyhow::bail!("git.worktree_directory must not be empty");
7133 }
7134
7135 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7136 if trimmed == ".." {
7137 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7138 }
7139
7140 let joined = original_repo_abs_path.join(trimmed);
7141 let resolved = util::normalize_path(&joined);
7142 let resolved = if resolved.starts_with(original_repo_abs_path) {
7143 resolved
7144 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7145 resolved.join(repo_dir_name)
7146 } else {
7147 resolved
7148 };
7149
7150 let parent = original_repo_abs_path
7151 .parent()
7152 .unwrap_or(original_repo_abs_path);
7153
7154 if !resolved.starts_with(parent) {
7155 anyhow::bail!(
7156 "git.worktree_directory resolved to {resolved:?}, which is outside \
7157 the project root and its parent directory. It must resolve to a \
7158 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7159 );
7160 }
7161
7162 Ok(resolved)
7163}
7164
7165/// Returns a short name for a linked worktree suitable for UI display
7166///
7167/// Uses the main worktree path to come up with a short name that disambiguates
7168/// the linked worktree from the main worktree.
7169pub fn linked_worktree_short_name(
7170 main_worktree_path: &Path,
7171 linked_worktree_path: &Path,
7172) -> Option<SharedString> {
7173 if main_worktree_path == linked_worktree_path {
7174 return None;
7175 }
7176
7177 let project_name = main_worktree_path.file_name()?.to_str()?;
7178 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7179 let name = if directory_name != project_name {
7180 directory_name.to_string()
7181 } else {
7182 linked_worktree_path
7183 .parent()?
7184 .file_name()?
7185 .to_str()?
7186 .to_string()
7187 };
7188 Some(name.into())
7189}
7190
7191fn get_permalink_in_rust_registry_src(
7192 provider_registry: Arc<GitHostingProviderRegistry>,
7193 path: PathBuf,
7194 selection: Range<u32>,
7195) -> Result<url::Url> {
7196 #[derive(Deserialize)]
7197 struct CargoVcsGit {
7198 sha1: String,
7199 }
7200
7201 #[derive(Deserialize)]
7202 struct CargoVcsInfo {
7203 git: CargoVcsGit,
7204 path_in_vcs: String,
7205 }
7206
7207 #[derive(Deserialize)]
7208 struct CargoPackage {
7209 repository: String,
7210 }
7211
7212 #[derive(Deserialize)]
7213 struct CargoToml {
7214 package: CargoPackage,
7215 }
7216
7217 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7218 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7219 Some((dir, json))
7220 }) else {
7221 bail!("No .cargo_vcs_info.json found in parent directories")
7222 };
7223 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7224 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7225 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7226 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7227 .context("parsing package.repository field of manifest")?;
7228 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7229 let permalink = provider.build_permalink(
7230 remote,
7231 BuildPermalinkParams::new(
7232 &cargo_vcs_info.git.sha1,
7233 &RepoPath::from_rel_path(
7234 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7235 ),
7236 Some(selection),
7237 ),
7238 );
7239 Ok(permalink)
7240}
7241
7242fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7243 let Some(blame) = blame else {
7244 return proto::BlameBufferResponse {
7245 blame_response: None,
7246 };
7247 };
7248
7249 let entries = blame
7250 .entries
7251 .into_iter()
7252 .map(|entry| proto::BlameEntry {
7253 sha: entry.sha.as_bytes().into(),
7254 start_line: entry.range.start,
7255 end_line: entry.range.end,
7256 original_line_number: entry.original_line_number,
7257 author: entry.author,
7258 author_mail: entry.author_mail,
7259 author_time: entry.author_time,
7260 author_tz: entry.author_tz,
7261 committer: entry.committer_name,
7262 committer_mail: entry.committer_email,
7263 committer_time: entry.committer_time,
7264 committer_tz: entry.committer_tz,
7265 summary: entry.summary,
7266 previous: entry.previous,
7267 filename: entry.filename,
7268 })
7269 .collect::<Vec<_>>();
7270
7271 let messages = blame
7272 .messages
7273 .into_iter()
7274 .map(|(oid, message)| proto::CommitMessage {
7275 oid: oid.as_bytes().into(),
7276 message,
7277 })
7278 .collect::<Vec<_>>();
7279
7280 proto::BlameBufferResponse {
7281 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7282 }
7283}
7284
7285fn deserialize_blame_buffer_response(
7286 response: proto::BlameBufferResponse,
7287) -> Option<git::blame::Blame> {
7288 let response = response.blame_response?;
7289 let entries = response
7290 .entries
7291 .into_iter()
7292 .filter_map(|entry| {
7293 Some(git::blame::BlameEntry {
7294 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7295 range: entry.start_line..entry.end_line,
7296 original_line_number: entry.original_line_number,
7297 committer_name: entry.committer,
7298 committer_time: entry.committer_time,
7299 committer_tz: entry.committer_tz,
7300 committer_email: entry.committer_mail,
7301 author: entry.author,
7302 author_mail: entry.author_mail,
7303 author_time: entry.author_time,
7304 author_tz: entry.author_tz,
7305 summary: entry.summary,
7306 previous: entry.previous,
7307 filename: entry.filename,
7308 })
7309 })
7310 .collect::<Vec<_>>();
7311
7312 let messages = response
7313 .messages
7314 .into_iter()
7315 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7316 .collect::<HashMap<_, _>>();
7317
7318 Some(Blame { entries, messages })
7319}
7320
7321fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7322 proto::Branch {
7323 is_head: branch.is_head,
7324 ref_name: branch.ref_name.to_string(),
7325 unix_timestamp: branch
7326 .most_recent_commit
7327 .as_ref()
7328 .map(|commit| commit.commit_timestamp as u64),
7329 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7330 ref_name: upstream.ref_name.to_string(),
7331 tracking: upstream
7332 .tracking
7333 .status()
7334 .map(|upstream| proto::UpstreamTracking {
7335 ahead: upstream.ahead as u64,
7336 behind: upstream.behind as u64,
7337 }),
7338 }),
7339 most_recent_commit: branch
7340 .most_recent_commit
7341 .as_ref()
7342 .map(|commit| proto::CommitSummary {
7343 sha: commit.sha.to_string(),
7344 subject: commit.subject.to_string(),
7345 commit_timestamp: commit.commit_timestamp,
7346 author_name: commit.author_name.to_string(),
7347 }),
7348 }
7349}
7350
7351fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7352 proto::Worktree {
7353 path: worktree.path.to_string_lossy().to_string(),
7354 ref_name: worktree
7355 .ref_name
7356 .as_ref()
7357 .map(|s| s.to_string())
7358 .unwrap_or_default(),
7359 sha: worktree.sha.to_string(),
7360 }
7361}
7362
7363fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7364 git::repository::Worktree {
7365 path: PathBuf::from(proto.path.clone()),
7366 ref_name: Some(SharedString::from(&proto.ref_name)),
7367 sha: proto.sha.clone().into(),
7368 }
7369}
7370
7371fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7372 git::repository::Branch {
7373 is_head: proto.is_head,
7374 ref_name: proto.ref_name.clone().into(),
7375 upstream: proto
7376 .upstream
7377 .as_ref()
7378 .map(|upstream| git::repository::Upstream {
7379 ref_name: upstream.ref_name.to_string().into(),
7380 tracking: upstream
7381 .tracking
7382 .as_ref()
7383 .map(|tracking| {
7384 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7385 ahead: tracking.ahead as u32,
7386 behind: tracking.behind as u32,
7387 })
7388 })
7389 .unwrap_or(git::repository::UpstreamTracking::Gone),
7390 }),
7391 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7392 git::repository::CommitSummary {
7393 sha: commit.sha.to_string().into(),
7394 subject: commit.subject.to_string().into(),
7395 commit_timestamp: commit.commit_timestamp,
7396 author_name: commit.author_name.to_string().into(),
7397 has_parent: true,
7398 }
7399 }),
7400 }
7401}
7402
7403fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7404 proto::GitCommitDetails {
7405 sha: commit.sha.to_string(),
7406 message: commit.message.to_string(),
7407 commit_timestamp: commit.commit_timestamp,
7408 author_email: commit.author_email.to_string(),
7409 author_name: commit.author_name.to_string(),
7410 }
7411}
7412
7413fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7414 CommitDetails {
7415 sha: proto.sha.clone().into(),
7416 message: proto.message.clone().into(),
7417 commit_timestamp: proto.commit_timestamp,
7418 author_email: proto.author_email.clone().into(),
7419 author_name: proto.author_name.clone().into(),
7420 }
7421}
7422
7423/// This snapshot computes the repository state on the foreground thread while
7424/// running the git commands on the background thread. We update branch, head,
7425/// remotes, and worktrees first so the UI can react sooner, then compute file
7426/// state and emit those events immediately after.
7427async fn compute_snapshot(
7428 this: Entity<Repository>,
7429 backend: Arc<dyn GitRepository>,
7430 cx: &mut AsyncApp,
7431) -> Result<RepositorySnapshot> {
7432 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7433 this.paths_needing_status_update.clear();
7434 (
7435 this.id,
7436 this.work_directory_abs_path.clone(),
7437 this.snapshot.clone(),
7438 )
7439 });
7440
7441 let head_commit_future = {
7442 let backend = backend.clone();
7443 async move {
7444 Ok(match backend.head_sha().await {
7445 Some(head_sha) => backend.show(head_sha).await.log_err(),
7446 None => None,
7447 })
7448 }
7449 };
7450 let (branches, head_commit, all_worktrees) = cx
7451 .background_spawn({
7452 let backend = backend.clone();
7453 async move {
7454 futures::future::try_join3(
7455 backend.branches(),
7456 head_commit_future,
7457 backend.worktrees(),
7458 )
7459 .await
7460 }
7461 })
7462 .await?;
7463 let branch = branches.into_iter().find(|branch| branch.is_head);
7464
7465 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7466 .into_iter()
7467 .filter(|wt| wt.path != *work_directory_abs_path)
7468 .collect();
7469
7470 let (remote_origin_url, remote_upstream_url) = cx
7471 .background_spawn({
7472 let backend = backend.clone();
7473 async move {
7474 Ok::<_, anyhow::Error>(
7475 futures::future::join(
7476 backend.remote_url("origin"),
7477 backend.remote_url("upstream"),
7478 )
7479 .await,
7480 )
7481 }
7482 })
7483 .await?;
7484
7485 let snapshot = this.update(cx, |this, cx| {
7486 let branch_changed =
7487 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7488 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7489
7490 this.snapshot = RepositorySnapshot {
7491 id,
7492 work_directory_abs_path,
7493 branch,
7494 head_commit,
7495 remote_origin_url,
7496 remote_upstream_url,
7497 linked_worktrees,
7498 scan_id: prev_snapshot.scan_id + 1,
7499 ..prev_snapshot
7500 };
7501
7502 if branch_changed {
7503 cx.emit(RepositoryEvent::BranchChanged);
7504 }
7505
7506 if worktrees_changed {
7507 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7508 }
7509
7510 this.snapshot.clone()
7511 });
7512
7513 let (statuses, diff_stats, stash_entries) = cx
7514 .background_spawn({
7515 let backend = backend.clone();
7516 let snapshot = snapshot.clone();
7517 async move {
7518 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7519 if snapshot.head_commit.is_some() {
7520 backend.diff_stat(&[])
7521 } else {
7522 future::ready(Ok(status::GitDiffStat {
7523 entries: Arc::default(),
7524 }))
7525 .boxed()
7526 };
7527 futures::future::try_join3(
7528 backend.status(&[RepoPath::from_rel_path(
7529 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7530 )]),
7531 diff_stat_future,
7532 backend.stash_entries(),
7533 )
7534 .await
7535 }
7536 })
7537 .await?;
7538
7539 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7540 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7541 let mut conflicted_paths = Vec::new();
7542 let statuses_by_path = SumTree::from_iter(
7543 statuses.entries.iter().map(|(repo_path, status)| {
7544 if status.is_conflicted() {
7545 conflicted_paths.push(repo_path.clone());
7546 }
7547 StatusEntry {
7548 repo_path: repo_path.clone(),
7549 status: *status,
7550 diff_stat: diff_stat_map.get(repo_path).copied(),
7551 }
7552 }),
7553 (),
7554 );
7555
7556 let merge_details = cx
7557 .background_spawn({
7558 let backend = backend.clone();
7559 let mut merge_details = snapshot.merge.clone();
7560 async move {
7561 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7562 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7563 }
7564 })
7565 .await?;
7566 let (merge_details, conflicts_changed) = merge_details;
7567 log::debug!("new merge details: {merge_details:?}");
7568
7569 Ok(this.update(cx, |this, cx| {
7570 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7571 cx.emit(RepositoryEvent::StatusesChanged);
7572 }
7573 if stash_entries != this.snapshot.stash_entries {
7574 cx.emit(RepositoryEvent::StashEntriesChanged);
7575 }
7576
7577 this.snapshot.scan_id += 1;
7578 this.snapshot.merge = merge_details;
7579 this.snapshot.statuses_by_path = statuses_by_path;
7580 this.snapshot.stash_entries = stash_entries;
7581
7582 this.snapshot.clone()
7583 }))
7584}
7585
7586fn status_from_proto(
7587 simple_status: i32,
7588 status: Option<proto::GitFileStatus>,
7589) -> anyhow::Result<FileStatus> {
7590 use proto::git_file_status::Variant;
7591
7592 let Some(variant) = status.and_then(|status| status.variant) else {
7593 let code = proto::GitStatus::from_i32(simple_status)
7594 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7595 let result = match code {
7596 proto::GitStatus::Added => TrackedStatus {
7597 worktree_status: StatusCode::Added,
7598 index_status: StatusCode::Unmodified,
7599 }
7600 .into(),
7601 proto::GitStatus::Modified => TrackedStatus {
7602 worktree_status: StatusCode::Modified,
7603 index_status: StatusCode::Unmodified,
7604 }
7605 .into(),
7606 proto::GitStatus::Conflict => UnmergedStatus {
7607 first_head: UnmergedStatusCode::Updated,
7608 second_head: UnmergedStatusCode::Updated,
7609 }
7610 .into(),
7611 proto::GitStatus::Deleted => TrackedStatus {
7612 worktree_status: StatusCode::Deleted,
7613 index_status: StatusCode::Unmodified,
7614 }
7615 .into(),
7616 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7617 };
7618 return Ok(result);
7619 };
7620
7621 let result = match variant {
7622 Variant::Untracked(_) => FileStatus::Untracked,
7623 Variant::Ignored(_) => FileStatus::Ignored,
7624 Variant::Unmerged(unmerged) => {
7625 let [first_head, second_head] =
7626 [unmerged.first_head, unmerged.second_head].map(|head| {
7627 let code = proto::GitStatus::from_i32(head)
7628 .with_context(|| format!("Invalid git status code: {head}"))?;
7629 let result = match code {
7630 proto::GitStatus::Added => UnmergedStatusCode::Added,
7631 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7632 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7633 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7634 };
7635 Ok(result)
7636 });
7637 let [first_head, second_head] = [first_head?, second_head?];
7638 UnmergedStatus {
7639 first_head,
7640 second_head,
7641 }
7642 .into()
7643 }
7644 Variant::Tracked(tracked) => {
7645 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7646 .map(|status| {
7647 let code = proto::GitStatus::from_i32(status)
7648 .with_context(|| format!("Invalid git status code: {status}"))?;
7649 let result = match code {
7650 proto::GitStatus::Modified => StatusCode::Modified,
7651 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7652 proto::GitStatus::Added => StatusCode::Added,
7653 proto::GitStatus::Deleted => StatusCode::Deleted,
7654 proto::GitStatus::Renamed => StatusCode::Renamed,
7655 proto::GitStatus::Copied => StatusCode::Copied,
7656 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7657 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7658 };
7659 Ok(result)
7660 });
7661 let [index_status, worktree_status] = [index_status?, worktree_status?];
7662 TrackedStatus {
7663 index_status,
7664 worktree_status,
7665 }
7666 .into()
7667 }
7668 };
7669 Ok(result)
7670}
7671
7672fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7673 use proto::git_file_status::{Tracked, Unmerged, Variant};
7674
7675 let variant = match status {
7676 FileStatus::Untracked => Variant::Untracked(Default::default()),
7677 FileStatus::Ignored => Variant::Ignored(Default::default()),
7678 FileStatus::Unmerged(UnmergedStatus {
7679 first_head,
7680 second_head,
7681 }) => Variant::Unmerged(Unmerged {
7682 first_head: unmerged_status_to_proto(first_head),
7683 second_head: unmerged_status_to_proto(second_head),
7684 }),
7685 FileStatus::Tracked(TrackedStatus {
7686 index_status,
7687 worktree_status,
7688 }) => Variant::Tracked(Tracked {
7689 index_status: tracked_status_to_proto(index_status),
7690 worktree_status: tracked_status_to_proto(worktree_status),
7691 }),
7692 };
7693 proto::GitFileStatus {
7694 variant: Some(variant),
7695 }
7696}
7697
7698fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7699 match code {
7700 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7701 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7702 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7703 }
7704}
7705
7706fn tracked_status_to_proto(code: StatusCode) -> i32 {
7707 match code {
7708 StatusCode::Added => proto::GitStatus::Added as _,
7709 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7710 StatusCode::Modified => proto::GitStatus::Modified as _,
7711 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7712 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7713 StatusCode::Copied => proto::GitStatus::Copied as _,
7714 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7715 }
7716}