1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
36 DiffType, FetchOptions, GitRepository, GitRepositoryCheckpoint, GraphCommitData,
37 InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RemoteCommandOutput,
38 RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332pub struct Repository {
333 this: WeakEntity<Self>,
334 snapshot: RepositorySnapshot,
335 commit_message_buffer: Option<Entity<Buffer>>,
336 git_store: WeakEntity<GitStore>,
337 // For a local repository, holds paths that have had worktree events since the last status scan completed,
338 // and that should be examined during the next status scan.
339 paths_needing_status_update: Vec<Vec<RepoPath>>,
340 job_sender: mpsc::UnboundedSender<GitJob>,
341 active_jobs: HashMap<JobId, JobInfo>,
342 pending_ops: SumTree<PendingOps>,
343 job_id: JobId,
344 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
345 latest_askpass_id: u64,
346 repository_state: Shared<Task<Result<RepositoryState, String>>>,
347 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
348 graph_commit_data_handler: GraphCommitHandlerState,
349 commit_data: HashMap<Oid, CommitDataState>,
350}
351
352impl std::ops::Deref for Repository {
353 type Target = RepositorySnapshot;
354
355 fn deref(&self) -> &Self::Target {
356 &self.snapshot
357 }
358}
359
360#[derive(Clone)]
361pub struct LocalRepositoryState {
362 pub fs: Arc<dyn Fs>,
363 pub backend: Arc<dyn GitRepository>,
364 pub environment: Arc<HashMap<String, String>>,
365}
366
367impl LocalRepositoryState {
368 async fn new(
369 work_directory_abs_path: Arc<Path>,
370 dot_git_abs_path: Arc<Path>,
371 project_environment: WeakEntity<ProjectEnvironment>,
372 fs: Arc<dyn Fs>,
373 is_trusted: bool,
374 cx: &mut AsyncApp,
375 ) -> anyhow::Result<Self> {
376 let environment = project_environment
377 .update(cx, |project_environment, cx| {
378 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
379 })?
380 .await
381 .unwrap_or_else(|| {
382 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
383 HashMap::default()
384 });
385 let search_paths = environment.get("PATH").map(|val| val.to_owned());
386 let backend = cx
387 .background_spawn({
388 let fs = fs.clone();
389 async move {
390 let system_git_binary_path = search_paths
391 .and_then(|search_paths| {
392 which::which_in("git", Some(search_paths), &work_directory_abs_path)
393 .ok()
394 })
395 .or_else(|| which::which("git").ok());
396 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
397 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
398 }
399 })
400 .await?;
401 backend.set_trusted(is_trusted);
402 Ok(LocalRepositoryState {
403 backend,
404 environment: Arc::new(environment),
405 fs,
406 })
407 }
408}
409
410#[derive(Clone)]
411pub struct RemoteRepositoryState {
412 pub project_id: ProjectId,
413 pub client: AnyProtoClient,
414}
415
416#[derive(Clone)]
417pub enum RepositoryState {
418 Local(LocalRepositoryState),
419 Remote(RemoteRepositoryState),
420}
421
422#[derive(Clone, Debug, PartialEq, Eq)]
423pub enum GitGraphEvent {
424 CountUpdated(usize),
425 FullyLoaded,
426 LoadingError,
427}
428
429#[derive(Clone, Debug, PartialEq, Eq)]
430pub enum RepositoryEvent {
431 StatusesChanged,
432 HeadChanged,
433 BranchListChanged,
434 StashEntriesChanged,
435 GitWorktreeListChanged,
436 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
437 GraphEvent((LogSource, LogOrder), GitGraphEvent),
438}
439
440#[derive(Clone, Debug)]
441pub struct JobsUpdated;
442
443#[derive(Debug)]
444pub enum GitStoreEvent {
445 ActiveRepositoryChanged(Option<RepositoryId>),
446 /// Bool is true when the repository that's updated is the active repository
447 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
448 RepositoryAdded,
449 RepositoryRemoved(RepositoryId),
450 IndexWriteError(anyhow::Error),
451 JobsUpdated,
452 ConflictsUpdated,
453}
454
455impl EventEmitter<RepositoryEvent> for Repository {}
456impl EventEmitter<JobsUpdated> for Repository {}
457impl EventEmitter<GitStoreEvent> for GitStore {}
458
459pub struct GitJob {
460 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
461 key: Option<GitJobKey>,
462}
463
464#[derive(PartialEq, Eq)]
465enum GitJobKey {
466 WriteIndex(Vec<RepoPath>),
467 ReloadBufferDiffBases,
468 RefreshStatuses,
469 ReloadGitState,
470}
471
472impl GitStore {
473 pub fn local(
474 worktree_store: &Entity<WorktreeStore>,
475 buffer_store: Entity<BufferStore>,
476 environment: Entity<ProjectEnvironment>,
477 fs: Arc<dyn Fs>,
478 cx: &mut Context<Self>,
479 ) -> Self {
480 Self::new(
481 worktree_store.clone(),
482 buffer_store,
483 GitStoreState::Local {
484 next_repository_id: Arc::new(AtomicU64::new(1)),
485 downstream: None,
486 project_environment: environment,
487 fs,
488 },
489 cx,
490 )
491 }
492
493 pub fn remote(
494 worktree_store: &Entity<WorktreeStore>,
495 buffer_store: Entity<BufferStore>,
496 upstream_client: AnyProtoClient,
497 project_id: u64,
498 cx: &mut Context<Self>,
499 ) -> Self {
500 Self::new(
501 worktree_store.clone(),
502 buffer_store,
503 GitStoreState::Remote {
504 upstream_client,
505 upstream_project_id: project_id,
506 downstream: None,
507 },
508 cx,
509 )
510 }
511
512 fn new(
513 worktree_store: Entity<WorktreeStore>,
514 buffer_store: Entity<BufferStore>,
515 state: GitStoreState,
516 cx: &mut Context<Self>,
517 ) -> Self {
518 let mut _subscriptions = vec![
519 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
520 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
521 ];
522
523 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
524 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
525 }
526
527 GitStore {
528 state,
529 buffer_store,
530 worktree_store,
531 repositories: HashMap::default(),
532 worktree_ids: HashMap::default(),
533 active_repo_id: None,
534 _subscriptions,
535 loading_diffs: HashMap::default(),
536 shared_diffs: HashMap::default(),
537 diffs: HashMap::default(),
538 }
539 }
540
541 pub fn init(client: &AnyProtoClient) {
542 client.add_entity_request_handler(Self::handle_get_remotes);
543 client.add_entity_request_handler(Self::handle_get_branches);
544 client.add_entity_request_handler(Self::handle_get_default_branch);
545 client.add_entity_request_handler(Self::handle_change_branch);
546 client.add_entity_request_handler(Self::handle_create_branch);
547 client.add_entity_request_handler(Self::handle_rename_branch);
548 client.add_entity_request_handler(Self::handle_create_remote);
549 client.add_entity_request_handler(Self::handle_remove_remote);
550 client.add_entity_request_handler(Self::handle_delete_branch);
551 client.add_entity_request_handler(Self::handle_git_init);
552 client.add_entity_request_handler(Self::handle_push);
553 client.add_entity_request_handler(Self::handle_pull);
554 client.add_entity_request_handler(Self::handle_fetch);
555 client.add_entity_request_handler(Self::handle_stage);
556 client.add_entity_request_handler(Self::handle_unstage);
557 client.add_entity_request_handler(Self::handle_stash);
558 client.add_entity_request_handler(Self::handle_stash_pop);
559 client.add_entity_request_handler(Self::handle_stash_apply);
560 client.add_entity_request_handler(Self::handle_stash_drop);
561 client.add_entity_request_handler(Self::handle_commit);
562 client.add_entity_request_handler(Self::handle_run_hook);
563 client.add_entity_request_handler(Self::handle_reset);
564 client.add_entity_request_handler(Self::handle_show);
565 client.add_entity_request_handler(Self::handle_create_checkpoint);
566 client.add_entity_request_handler(Self::handle_restore_checkpoint);
567 client.add_entity_request_handler(Self::handle_compare_checkpoints);
568 client.add_entity_request_handler(Self::handle_diff_checkpoints);
569 client.add_entity_request_handler(Self::handle_load_commit_diff);
570 client.add_entity_request_handler(Self::handle_file_history);
571 client.add_entity_request_handler(Self::handle_checkout_files);
572 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
573 client.add_entity_request_handler(Self::handle_set_index_text);
574 client.add_entity_request_handler(Self::handle_askpass);
575 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
576 client.add_entity_request_handler(Self::handle_git_diff);
577 client.add_entity_request_handler(Self::handle_tree_diff);
578 client.add_entity_request_handler(Self::handle_get_blob_content);
579 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
580 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
581 client.add_entity_message_handler(Self::handle_update_diff_bases);
582 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
583 client.add_entity_request_handler(Self::handle_blame_buffer);
584 client.add_entity_message_handler(Self::handle_update_repository);
585 client.add_entity_message_handler(Self::handle_remove_repository);
586 client.add_entity_request_handler(Self::handle_git_clone);
587 client.add_entity_request_handler(Self::handle_get_worktrees);
588 client.add_entity_request_handler(Self::handle_create_worktree);
589 client.add_entity_request_handler(Self::handle_remove_worktree);
590 client.add_entity_request_handler(Self::handle_rename_worktree);
591 client.add_entity_request_handler(Self::handle_get_head_sha);
592 }
593
594 pub fn is_local(&self) -> bool {
595 matches!(self.state, GitStoreState::Local { .. })
596 }
597
598 fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context<Self>) {
599 if self.active_repo_id != Some(repo_id) {
600 self.active_repo_id = Some(repo_id);
601 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
602 }
603 }
604
605 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
606 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
607 self.set_active_repo_id(repo.read(cx).id, cx);
608 }
609 }
610
611 pub fn set_active_repo_for_worktree(
612 &mut self,
613 worktree_id: WorktreeId,
614 cx: &mut Context<Self>,
615 ) {
616 let Some(worktree) = self
617 .worktree_store
618 .read(cx)
619 .worktree_for_id(worktree_id, cx)
620 else {
621 return;
622 };
623 let worktree_abs_path = worktree.read(cx).abs_path();
624 let Some(repo_id) = self
625 .repositories
626 .values()
627 .filter(|repo| {
628 let repo_path = &repo.read(cx).work_directory_abs_path;
629 *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
630 })
631 .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
632 .map(|repo| repo.read(cx).id)
633 else {
634 return;
635 };
636
637 self.set_active_repo_id(repo_id, cx);
638 }
639
640 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
641 match &mut self.state {
642 GitStoreState::Remote {
643 downstream: downstream_client,
644 ..
645 } => {
646 for repo in self.repositories.values() {
647 let update = repo.read(cx).snapshot.initial_update(project_id);
648 for update in split_repository_update(update) {
649 client.send(update).log_err();
650 }
651 }
652 *downstream_client = Some((client, ProjectId(project_id)));
653 }
654 GitStoreState::Local {
655 downstream: downstream_client,
656 ..
657 } => {
658 let mut snapshots = HashMap::default();
659 let (updates_tx, mut updates_rx) = mpsc::unbounded();
660 for repo in self.repositories.values() {
661 updates_tx
662 .unbounded_send(DownstreamUpdate::UpdateRepository(
663 repo.read(cx).snapshot.clone(),
664 ))
665 .ok();
666 }
667 *downstream_client = Some(LocalDownstreamState {
668 client: client.clone(),
669 project_id: ProjectId(project_id),
670 updates_tx,
671 _task: cx.spawn(async move |this, cx| {
672 cx.background_spawn(async move {
673 while let Some(update) = updates_rx.next().await {
674 match update {
675 DownstreamUpdate::UpdateRepository(snapshot) => {
676 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
677 {
678 let update =
679 snapshot.build_update(old_snapshot, project_id);
680 *old_snapshot = snapshot;
681 for update in split_repository_update(update) {
682 client.send(update)?;
683 }
684 } else {
685 let update = snapshot.initial_update(project_id);
686 for update in split_repository_update(update) {
687 client.send(update)?;
688 }
689 snapshots.insert(snapshot.id, snapshot);
690 }
691 }
692 DownstreamUpdate::RemoveRepository(id) => {
693 client.send(proto::RemoveRepository {
694 project_id,
695 id: id.to_proto(),
696 })?;
697 }
698 }
699 }
700 anyhow::Ok(())
701 })
702 .await
703 .ok();
704 this.update(cx, |this, _| {
705 if let GitStoreState::Local {
706 downstream: downstream_client,
707 ..
708 } = &mut this.state
709 {
710 downstream_client.take();
711 } else {
712 unreachable!("unshared called on remote store");
713 }
714 })
715 }),
716 });
717 }
718 }
719 }
720
721 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
722 match &mut self.state {
723 GitStoreState::Local {
724 downstream: downstream_client,
725 ..
726 } => {
727 downstream_client.take();
728 }
729 GitStoreState::Remote {
730 downstream: downstream_client,
731 ..
732 } => {
733 downstream_client.take();
734 }
735 }
736 self.shared_diffs.clear();
737 }
738
739 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
740 self.shared_diffs.remove(peer_id);
741 }
742
743 pub fn active_repository(&self) -> Option<Entity<Repository>> {
744 self.active_repo_id
745 .as_ref()
746 .map(|id| self.repositories[id].clone())
747 }
748
749 pub fn open_unstaged_diff(
750 &mut self,
751 buffer: Entity<Buffer>,
752 cx: &mut Context<Self>,
753 ) -> Task<Result<Entity<BufferDiff>>> {
754 let buffer_id = buffer.read(cx).remote_id();
755 if let Some(diff_state) = self.diffs.get(&buffer_id)
756 && let Some(unstaged_diff) = diff_state
757 .read(cx)
758 .unstaged_diff
759 .as_ref()
760 .and_then(|weak| weak.upgrade())
761 {
762 if let Some(task) =
763 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
764 {
765 return cx.background_executor().spawn(async move {
766 task.await;
767 Ok(unstaged_diff)
768 });
769 }
770 return Task::ready(Ok(unstaged_diff));
771 }
772
773 let Some((repo, repo_path)) =
774 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
775 else {
776 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
777 };
778
779 let task = self
780 .loading_diffs
781 .entry((buffer_id, DiffKind::Unstaged))
782 .or_insert_with(|| {
783 let staged_text = repo.update(cx, |repo, cx| {
784 repo.load_staged_text(buffer_id, repo_path, cx)
785 });
786 cx.spawn(async move |this, cx| {
787 Self::open_diff_internal(
788 this,
789 DiffKind::Unstaged,
790 staged_text.await.map(DiffBasesChange::SetIndex),
791 buffer,
792 cx,
793 )
794 .await
795 .map_err(Arc::new)
796 })
797 .shared()
798 })
799 .clone();
800
801 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
802 }
803
804 pub fn open_diff_since(
805 &mut self,
806 oid: Option<git::Oid>,
807 buffer: Entity<Buffer>,
808 repo: Entity<Repository>,
809 cx: &mut Context<Self>,
810 ) -> Task<Result<Entity<BufferDiff>>> {
811 let buffer_id = buffer.read(cx).remote_id();
812
813 if let Some(diff_state) = self.diffs.get(&buffer_id)
814 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
815 {
816 if let Some(task) =
817 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
818 {
819 return cx.background_executor().spawn(async move {
820 task.await;
821 Ok(oid_diff)
822 });
823 }
824 return Task::ready(Ok(oid_diff));
825 }
826
827 let diff_kind = DiffKind::SinceOid(oid);
828 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
829 let task = task.clone();
830 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
831 }
832
833 let task = cx
834 .spawn(async move |this, cx| {
835 let result: Result<Entity<BufferDiff>> = async {
836 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
837 let language_registry =
838 buffer.update(cx, |buffer, _| buffer.language_registry());
839 let content: Option<Arc<str>> = match oid {
840 None => None,
841 Some(oid) => Some(
842 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
843 .await?
844 .into(),
845 ),
846 };
847 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
848
849 buffer_diff
850 .update(cx, |buffer_diff, cx| {
851 buffer_diff.language_changed(
852 buffer_snapshot.language().cloned(),
853 language_registry,
854 cx,
855 );
856 buffer_diff.set_base_text(
857 content.clone(),
858 buffer_snapshot.language().cloned(),
859 buffer_snapshot.text,
860 cx,
861 )
862 })
863 .await?;
864 let unstaged_diff = this
865 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
866 .await?;
867 buffer_diff.update(cx, |buffer_diff, _| {
868 buffer_diff.set_secondary_diff(unstaged_diff);
869 });
870
871 this.update(cx, |this, cx| {
872 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
873 .detach();
874
875 this.loading_diffs.remove(&(buffer_id, diff_kind));
876
877 let git_store = cx.weak_entity();
878 let diff_state = this
879 .diffs
880 .entry(buffer_id)
881 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
882
883 diff_state.update(cx, |state, _| {
884 if let Some(oid) = oid {
885 if let Some(content) = content {
886 state.oid_texts.insert(oid, content);
887 }
888 }
889 state.oid_diffs.insert(oid, buffer_diff.downgrade());
890 });
891 })?;
892
893 Ok(buffer_diff)
894 }
895 .await;
896 result.map_err(Arc::new)
897 })
898 .shared();
899
900 self.loading_diffs
901 .insert((buffer_id, diff_kind), task.clone());
902 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
903 }
904
905 #[ztracing::instrument(skip_all)]
906 pub fn open_uncommitted_diff(
907 &mut self,
908 buffer: Entity<Buffer>,
909 cx: &mut Context<Self>,
910 ) -> Task<Result<Entity<BufferDiff>>> {
911 let buffer_id = buffer.read(cx).remote_id();
912
913 if let Some(diff_state) = self.diffs.get(&buffer_id)
914 && let Some(uncommitted_diff) = diff_state
915 .read(cx)
916 .uncommitted_diff
917 .as_ref()
918 .and_then(|weak| weak.upgrade())
919 {
920 if let Some(task) =
921 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
922 {
923 return cx.background_executor().spawn(async move {
924 task.await;
925 Ok(uncommitted_diff)
926 });
927 }
928 return Task::ready(Ok(uncommitted_diff));
929 }
930
931 let Some((repo, repo_path)) =
932 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
933 else {
934 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
935 };
936
937 let task = self
938 .loading_diffs
939 .entry((buffer_id, DiffKind::Uncommitted))
940 .or_insert_with(|| {
941 let changes = repo.update(cx, |repo, cx| {
942 repo.load_committed_text(buffer_id, repo_path, cx)
943 });
944
945 // todo(lw): hot foreground spawn
946 cx.spawn(async move |this, cx| {
947 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
948 .await
949 .map_err(Arc::new)
950 })
951 .shared()
952 })
953 .clone();
954
955 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
956 }
957
958 #[ztracing::instrument(skip_all)]
959 async fn open_diff_internal(
960 this: WeakEntity<Self>,
961 kind: DiffKind,
962 texts: Result<DiffBasesChange>,
963 buffer_entity: Entity<Buffer>,
964 cx: &mut AsyncApp,
965 ) -> Result<Entity<BufferDiff>> {
966 let diff_bases_change = match texts {
967 Err(e) => {
968 this.update(cx, |this, cx| {
969 let buffer = buffer_entity.read(cx);
970 let buffer_id = buffer.remote_id();
971 this.loading_diffs.remove(&(buffer_id, kind));
972 })?;
973 return Err(e);
974 }
975 Ok(change) => change,
976 };
977
978 this.update(cx, |this, cx| {
979 let buffer = buffer_entity.read(cx);
980 let buffer_id = buffer.remote_id();
981 let language = buffer.language().cloned();
982 let language_registry = buffer.language_registry();
983 let text_snapshot = buffer.text_snapshot();
984 this.loading_diffs.remove(&(buffer_id, kind));
985
986 let git_store = cx.weak_entity();
987 let diff_state = this
988 .diffs
989 .entry(buffer_id)
990 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
991
992 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
993
994 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
995 diff_state.update(cx, |diff_state, cx| {
996 diff_state.language_changed = true;
997 diff_state.language = language;
998 diff_state.language_registry = language_registry;
999
1000 match kind {
1001 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
1002 DiffKind::Uncommitted => {
1003 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1004 diff
1005 } else {
1006 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1007 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1008 unstaged_diff
1009 };
1010
1011 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1012 diff_state.uncommitted_diff = Some(diff.downgrade())
1013 }
1014 DiffKind::SinceOid(_) => {
1015 unreachable!("open_diff_internal is not used for OID diffs")
1016 }
1017 }
1018
1019 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
1020 let rx = diff_state.wait_for_recalculation();
1021
1022 anyhow::Ok(async move {
1023 if let Some(rx) = rx {
1024 rx.await;
1025 }
1026 Ok(diff)
1027 })
1028 })
1029 })??
1030 .await
1031 }
1032
1033 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1034 let diff_state = self.diffs.get(&buffer_id)?;
1035 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1036 }
1037
1038 pub fn get_uncommitted_diff(
1039 &self,
1040 buffer_id: BufferId,
1041 cx: &App,
1042 ) -> Option<Entity<BufferDiff>> {
1043 let diff_state = self.diffs.get(&buffer_id)?;
1044 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1045 }
1046
1047 pub fn get_diff_since_oid(
1048 &self,
1049 buffer_id: BufferId,
1050 oid: Option<git::Oid>,
1051 cx: &App,
1052 ) -> Option<Entity<BufferDiff>> {
1053 let diff_state = self.diffs.get(&buffer_id)?;
1054 diff_state.read(cx).oid_diff(oid)
1055 }
1056
1057 pub fn open_conflict_set(
1058 &mut self,
1059 buffer: Entity<Buffer>,
1060 cx: &mut Context<Self>,
1061 ) -> Entity<ConflictSet> {
1062 log::debug!("open conflict set");
1063 let buffer_id = buffer.read(cx).remote_id();
1064
1065 if let Some(git_state) = self.diffs.get(&buffer_id)
1066 && let Some(conflict_set) = git_state
1067 .read(cx)
1068 .conflict_set
1069 .as_ref()
1070 .and_then(|weak| weak.upgrade())
1071 {
1072 let conflict_set = conflict_set;
1073 let buffer_snapshot = buffer.read(cx).text_snapshot();
1074
1075 git_state.update(cx, |state, cx| {
1076 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1077 });
1078
1079 return conflict_set;
1080 }
1081
1082 let is_unmerged = self
1083 .repository_and_path_for_buffer_id(buffer_id, cx)
1084 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1085 let git_store = cx.weak_entity();
1086 let buffer_git_state = self
1087 .diffs
1088 .entry(buffer_id)
1089 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1090 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1091
1092 self._subscriptions
1093 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1094 cx.emit(GitStoreEvent::ConflictsUpdated);
1095 }));
1096
1097 buffer_git_state.update(cx, |state, cx| {
1098 state.conflict_set = Some(conflict_set.downgrade());
1099 let buffer_snapshot = buffer.read(cx).text_snapshot();
1100 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1101 });
1102
1103 conflict_set
1104 }
1105
1106 pub fn project_path_git_status(
1107 &self,
1108 project_path: &ProjectPath,
1109 cx: &App,
1110 ) -> Option<FileStatus> {
1111 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1112 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1113 }
1114
1115 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1116 let mut work_directory_abs_paths = Vec::new();
1117 let mut checkpoints = Vec::new();
1118 for repository in self.repositories.values() {
1119 repository.update(cx, |repository, _| {
1120 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1121 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1122 });
1123 }
1124
1125 cx.background_executor().spawn(async move {
1126 let checkpoints = future::try_join_all(checkpoints).await?;
1127 Ok(GitStoreCheckpoint {
1128 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1129 .into_iter()
1130 .zip(checkpoints)
1131 .collect(),
1132 })
1133 })
1134 }
1135
1136 pub fn restore_checkpoint(
1137 &self,
1138 checkpoint: GitStoreCheckpoint,
1139 cx: &mut App,
1140 ) -> Task<Result<()>> {
1141 let repositories_by_work_dir_abs_path = self
1142 .repositories
1143 .values()
1144 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1145 .collect::<HashMap<_, _>>();
1146
1147 let mut tasks = Vec::new();
1148 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1149 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1150 let restore = repository.update(cx, |repository, _| {
1151 repository.restore_checkpoint(checkpoint)
1152 });
1153 tasks.push(async move { restore.await? });
1154 }
1155 }
1156 cx.background_spawn(async move {
1157 future::try_join_all(tasks).await?;
1158 Ok(())
1159 })
1160 }
1161
1162 /// Compares two checkpoints, returning true if they are equal.
1163 pub fn compare_checkpoints(
1164 &self,
1165 left: GitStoreCheckpoint,
1166 mut right: GitStoreCheckpoint,
1167 cx: &mut App,
1168 ) -> Task<Result<bool>> {
1169 let repositories_by_work_dir_abs_path = self
1170 .repositories
1171 .values()
1172 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1173 .collect::<HashMap<_, _>>();
1174
1175 let mut tasks = Vec::new();
1176 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1177 if let Some(right_checkpoint) = right
1178 .checkpoints_by_work_dir_abs_path
1179 .remove(&work_dir_abs_path)
1180 {
1181 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1182 {
1183 let compare = repository.update(cx, |repository, _| {
1184 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1185 });
1186
1187 tasks.push(async move { compare.await? });
1188 }
1189 } else {
1190 return Task::ready(Ok(false));
1191 }
1192 }
1193 cx.background_spawn(async move {
1194 Ok(future::try_join_all(tasks)
1195 .await?
1196 .into_iter()
1197 .all(|result| result))
1198 })
1199 }
1200
1201 /// Blames a buffer.
1202 pub fn blame_buffer(
1203 &self,
1204 buffer: &Entity<Buffer>,
1205 version: Option<clock::Global>,
1206 cx: &mut Context<Self>,
1207 ) -> Task<Result<Option<Blame>>> {
1208 let buffer = buffer.read(cx);
1209 let Some((repo, repo_path)) =
1210 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1211 else {
1212 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1213 };
1214 let content = match &version {
1215 Some(version) => buffer.rope_for_version(version),
1216 None => buffer.as_rope().clone(),
1217 };
1218 let line_ending = buffer.line_ending();
1219 let version = version.unwrap_or(buffer.version());
1220 let buffer_id = buffer.remote_id();
1221
1222 let repo = repo.downgrade();
1223 cx.spawn(async move |_, cx| {
1224 let repository_state = repo
1225 .update(cx, |repo, _| repo.repository_state.clone())?
1226 .await
1227 .map_err(|err| anyhow::anyhow!(err))?;
1228 match repository_state {
1229 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1230 .blame(repo_path.clone(), content, line_ending)
1231 .await
1232 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1233 .map(Some),
1234 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1235 let response = client
1236 .request(proto::BlameBuffer {
1237 project_id: project_id.to_proto(),
1238 buffer_id: buffer_id.into(),
1239 version: serialize_version(&version),
1240 })
1241 .await?;
1242 Ok(deserialize_blame_buffer_response(response))
1243 }
1244 }
1245 })
1246 }
1247
1248 pub fn file_history(
1249 &self,
1250 repo: &Entity<Repository>,
1251 path: RepoPath,
1252 cx: &mut App,
1253 ) -> Task<Result<git::repository::FileHistory>> {
1254 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1255
1256 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1257 }
1258
1259 pub fn file_history_paginated(
1260 &self,
1261 repo: &Entity<Repository>,
1262 path: RepoPath,
1263 skip: usize,
1264 limit: Option<usize>,
1265 cx: &mut App,
1266 ) -> Task<Result<git::repository::FileHistory>> {
1267 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1268
1269 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1270 }
1271
1272 pub fn get_permalink_to_line(
1273 &self,
1274 buffer: &Entity<Buffer>,
1275 selection: Range<u32>,
1276 cx: &mut App,
1277 ) -> Task<Result<url::Url>> {
1278 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1279 return Task::ready(Err(anyhow!("buffer has no file")));
1280 };
1281
1282 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1283 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1284 cx,
1285 ) else {
1286 // If we're not in a Git repo, check whether this is a Rust source
1287 // file in the Cargo registry (presumably opened with go-to-definition
1288 // from a normal Rust file). If so, we can put together a permalink
1289 // using crate metadata.
1290 if buffer
1291 .read(cx)
1292 .language()
1293 .is_none_or(|lang| lang.name() != "Rust")
1294 {
1295 return Task::ready(Err(anyhow!("no permalink available")));
1296 }
1297 let file_path = file.worktree.read(cx).absolutize(&file.path);
1298 return cx.spawn(async move |cx| {
1299 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1300 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1301 .context("no permalink available")
1302 });
1303 };
1304
1305 let buffer_id = buffer.read(cx).remote_id();
1306 let branch = repo.read(cx).branch.clone();
1307 let remote = branch
1308 .as_ref()
1309 .and_then(|b| b.upstream.as_ref())
1310 .and_then(|b| b.remote_name())
1311 .unwrap_or("origin")
1312 .to_string();
1313
1314 let rx = repo.update(cx, |repo, _| {
1315 repo.send_job(None, move |state, cx| async move {
1316 match state {
1317 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1318 let origin_url = backend
1319 .remote_url(&remote)
1320 .await
1321 .with_context(|| format!("remote \"{remote}\" not found"))?;
1322
1323 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1324
1325 let provider_registry =
1326 cx.update(GitHostingProviderRegistry::default_global);
1327
1328 let (provider, remote) =
1329 parse_git_remote_url(provider_registry, &origin_url)
1330 .context("parsing Git remote URL")?;
1331
1332 Ok(provider.build_permalink(
1333 remote,
1334 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1335 ))
1336 }
1337 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1338 let response = client
1339 .request(proto::GetPermalinkToLine {
1340 project_id: project_id.to_proto(),
1341 buffer_id: buffer_id.into(),
1342 selection: Some(proto::Range {
1343 start: selection.start as u64,
1344 end: selection.end as u64,
1345 }),
1346 })
1347 .await?;
1348
1349 url::Url::parse(&response.permalink).context("failed to parse permalink")
1350 }
1351 }
1352 })
1353 });
1354 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1355 }
1356
1357 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1358 match &self.state {
1359 GitStoreState::Local {
1360 downstream: downstream_client,
1361 ..
1362 } => downstream_client
1363 .as_ref()
1364 .map(|state| (state.client.clone(), state.project_id)),
1365 GitStoreState::Remote {
1366 downstream: downstream_client,
1367 ..
1368 } => downstream_client.clone(),
1369 }
1370 }
1371
1372 fn upstream_client(&self) -> Option<AnyProtoClient> {
1373 match &self.state {
1374 GitStoreState::Local { .. } => None,
1375 GitStoreState::Remote {
1376 upstream_client, ..
1377 } => Some(upstream_client.clone()),
1378 }
1379 }
1380
1381 fn on_worktree_store_event(
1382 &mut self,
1383 worktree_store: Entity<WorktreeStore>,
1384 event: &WorktreeStoreEvent,
1385 cx: &mut Context<Self>,
1386 ) {
1387 let GitStoreState::Local {
1388 project_environment,
1389 downstream,
1390 next_repository_id,
1391 fs,
1392 } = &self.state
1393 else {
1394 return;
1395 };
1396
1397 match event {
1398 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1399 if let Some(worktree) = self
1400 .worktree_store
1401 .read(cx)
1402 .worktree_for_id(*worktree_id, cx)
1403 {
1404 let paths_by_git_repo =
1405 self.process_updated_entries(&worktree, updated_entries, cx);
1406 let downstream = downstream
1407 .as_ref()
1408 .map(|downstream| downstream.updates_tx.clone());
1409 cx.spawn(async move |_, cx| {
1410 let paths_by_git_repo = paths_by_git_repo.await;
1411 for (repo, paths) in paths_by_git_repo {
1412 repo.update(cx, |repo, cx| {
1413 repo.paths_changed(paths, downstream.clone(), cx);
1414 });
1415 }
1416 })
1417 .detach();
1418 }
1419 }
1420 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1421 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1422 else {
1423 return;
1424 };
1425 if !worktree.read(cx).is_visible() {
1426 log::debug!(
1427 "not adding repositories for local worktree {:?} because it's not visible",
1428 worktree.read(cx).abs_path()
1429 );
1430 return;
1431 }
1432 self.update_repositories_from_worktree(
1433 *worktree_id,
1434 project_environment.clone(),
1435 next_repository_id.clone(),
1436 downstream
1437 .as_ref()
1438 .map(|downstream| downstream.updates_tx.clone()),
1439 changed_repos.clone(),
1440 fs.clone(),
1441 cx,
1442 );
1443 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1444 }
1445 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1446 let repos_without_worktree: Vec<RepositoryId> = self
1447 .worktree_ids
1448 .iter_mut()
1449 .filter_map(|(repo_id, worktree_ids)| {
1450 worktree_ids.remove(worktree_id);
1451 if worktree_ids.is_empty() {
1452 Some(*repo_id)
1453 } else {
1454 None
1455 }
1456 })
1457 .collect();
1458 let is_active_repo_removed = repos_without_worktree
1459 .iter()
1460 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1461
1462 for repo_id in repos_without_worktree {
1463 self.repositories.remove(&repo_id);
1464 self.worktree_ids.remove(&repo_id);
1465 if let Some(updates_tx) =
1466 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1467 {
1468 updates_tx
1469 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1470 .ok();
1471 }
1472 }
1473
1474 if is_active_repo_removed {
1475 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1476 self.active_repo_id = Some(repo_id);
1477 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1478 } else {
1479 self.active_repo_id = None;
1480 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1481 }
1482 }
1483 }
1484 _ => {}
1485 }
1486 }
1487 fn on_repository_event(
1488 &mut self,
1489 repo: Entity<Repository>,
1490 event: &RepositoryEvent,
1491 cx: &mut Context<Self>,
1492 ) {
1493 let id = repo.read(cx).id;
1494 let repo_snapshot = repo.read(cx).snapshot.clone();
1495 for (buffer_id, diff) in self.diffs.iter() {
1496 if let Some((buffer_repo, repo_path)) =
1497 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1498 && buffer_repo == repo
1499 {
1500 diff.update(cx, |diff, cx| {
1501 if let Some(conflict_set) = &diff.conflict_set {
1502 let conflict_status_changed =
1503 conflict_set.update(cx, |conflict_set, cx| {
1504 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1505 conflict_set.set_has_conflict(has_conflict, cx)
1506 })?;
1507 if conflict_status_changed {
1508 let buffer_store = self.buffer_store.read(cx);
1509 if let Some(buffer) = buffer_store.get(*buffer_id) {
1510 let _ = diff
1511 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1512 }
1513 }
1514 }
1515 anyhow::Ok(())
1516 })
1517 .ok();
1518 }
1519 }
1520 cx.emit(GitStoreEvent::RepositoryUpdated(
1521 id,
1522 event.clone(),
1523 self.active_repo_id == Some(id),
1524 ))
1525 }
1526
1527 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1528 cx.emit(GitStoreEvent::JobsUpdated)
1529 }
1530
1531 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1532 fn update_repositories_from_worktree(
1533 &mut self,
1534 worktree_id: WorktreeId,
1535 project_environment: Entity<ProjectEnvironment>,
1536 next_repository_id: Arc<AtomicU64>,
1537 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1538 updated_git_repositories: UpdatedGitRepositoriesSet,
1539 fs: Arc<dyn Fs>,
1540 cx: &mut Context<Self>,
1541 ) {
1542 let mut removed_ids = Vec::new();
1543 for update in updated_git_repositories.iter() {
1544 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1545 let existing_work_directory_abs_path =
1546 repo.read(cx).work_directory_abs_path.clone();
1547 Some(&existing_work_directory_abs_path)
1548 == update.old_work_directory_abs_path.as_ref()
1549 || Some(&existing_work_directory_abs_path)
1550 == update.new_work_directory_abs_path.as_ref()
1551 }) {
1552 let repo_id = *id;
1553 if let Some(new_work_directory_abs_path) =
1554 update.new_work_directory_abs_path.clone()
1555 {
1556 self.worktree_ids
1557 .entry(repo_id)
1558 .or_insert_with(HashSet::new)
1559 .insert(worktree_id);
1560 existing.update(cx, |existing, cx| {
1561 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1562 existing.schedule_scan(updates_tx.clone(), cx);
1563 });
1564 } else {
1565 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1566 worktree_ids.remove(&worktree_id);
1567 if worktree_ids.is_empty() {
1568 removed_ids.push(repo_id);
1569 }
1570 }
1571 }
1572 } else if let UpdatedGitRepository {
1573 new_work_directory_abs_path: Some(work_directory_abs_path),
1574 dot_git_abs_path: Some(dot_git_abs_path),
1575 repository_dir_abs_path: Some(repository_dir_abs_path),
1576 common_dir_abs_path: Some(common_dir_abs_path),
1577 ..
1578 } = update
1579 {
1580 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1581 work_directory_abs_path,
1582 common_dir_abs_path,
1583 repository_dir_abs_path,
1584 )
1585 .into();
1586 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1587 let is_trusted = TrustedWorktrees::try_get_global(cx)
1588 .map(|trusted_worktrees| {
1589 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1590 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1591 })
1592 })
1593 .unwrap_or(false);
1594 let git_store = cx.weak_entity();
1595 let repo = cx.new(|cx| {
1596 let mut repo = Repository::local(
1597 id,
1598 work_directory_abs_path.clone(),
1599 original_repo_abs_path.clone(),
1600 dot_git_abs_path.clone(),
1601 project_environment.downgrade(),
1602 fs.clone(),
1603 is_trusted,
1604 git_store,
1605 cx,
1606 );
1607 if let Some(updates_tx) = updates_tx.as_ref() {
1608 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1609 updates_tx
1610 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1611 .ok();
1612 }
1613 repo.schedule_scan(updates_tx.clone(), cx);
1614 repo
1615 });
1616 self._subscriptions
1617 .push(cx.subscribe(&repo, Self::on_repository_event));
1618 self._subscriptions
1619 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1620 self.repositories.insert(id, repo);
1621 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1622 cx.emit(GitStoreEvent::RepositoryAdded);
1623 self.active_repo_id.get_or_insert_with(|| {
1624 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1625 id
1626 });
1627 }
1628 }
1629
1630 for id in removed_ids {
1631 if self.active_repo_id == Some(id) {
1632 self.active_repo_id = None;
1633 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1634 }
1635 self.repositories.remove(&id);
1636 if let Some(updates_tx) = updates_tx.as_ref() {
1637 updates_tx
1638 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1639 .ok();
1640 }
1641 }
1642 }
1643
1644 fn on_trusted_worktrees_event(
1645 &mut self,
1646 _: Entity<TrustedWorktreesStore>,
1647 event: &TrustedWorktreesEvent,
1648 cx: &mut Context<Self>,
1649 ) {
1650 if !matches!(self.state, GitStoreState::Local { .. }) {
1651 return;
1652 }
1653
1654 let (is_trusted, event_paths) = match event {
1655 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1656 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1657 };
1658
1659 for (repo_id, worktree_ids) in &self.worktree_ids {
1660 if worktree_ids
1661 .iter()
1662 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1663 {
1664 if let Some(repo) = self.repositories.get(repo_id) {
1665 let repository_state = repo.read(cx).repository_state.clone();
1666 cx.background_spawn(async move {
1667 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1668 state.backend.set_trusted(is_trusted);
1669 }
1670 })
1671 .detach();
1672 }
1673 }
1674 }
1675 }
1676
1677 fn on_buffer_store_event(
1678 &mut self,
1679 _: Entity<BufferStore>,
1680 event: &BufferStoreEvent,
1681 cx: &mut Context<Self>,
1682 ) {
1683 match event {
1684 BufferStoreEvent::BufferAdded(buffer) => {
1685 cx.subscribe(buffer, |this, buffer, event, cx| {
1686 if let BufferEvent::LanguageChanged(_) = event {
1687 let buffer_id = buffer.read(cx).remote_id();
1688 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1689 diff_state.update(cx, |diff_state, cx| {
1690 diff_state.buffer_language_changed(buffer, cx);
1691 });
1692 }
1693 }
1694 })
1695 .detach();
1696 }
1697 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1698 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1699 diffs.remove(buffer_id);
1700 }
1701 }
1702 BufferStoreEvent::BufferDropped(buffer_id) => {
1703 self.diffs.remove(buffer_id);
1704 for diffs in self.shared_diffs.values_mut() {
1705 diffs.remove(buffer_id);
1706 }
1707 }
1708 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1709 // Whenever a buffer's file path changes, it's possible that the
1710 // new path is actually a path that is being tracked by a git
1711 // repository. In that case, we'll want to update the buffer's
1712 // `BufferDiffState`, in case it already has one.
1713 let buffer_id = buffer.read(cx).remote_id();
1714 let diff_state = self.diffs.get(&buffer_id);
1715 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1716
1717 if let Some(diff_state) = diff_state
1718 && let Some((repo, repo_path)) = repo
1719 {
1720 let buffer = buffer.clone();
1721 let diff_state = diff_state.clone();
1722
1723 cx.spawn(async move |_git_store, cx| {
1724 async {
1725 let diff_bases_change = repo
1726 .update(cx, |repo, cx| {
1727 repo.load_committed_text(buffer_id, repo_path, cx)
1728 })
1729 .await?;
1730
1731 diff_state.update(cx, |diff_state, cx| {
1732 let buffer_snapshot = buffer.read(cx).text_snapshot();
1733 diff_state.diff_bases_changed(
1734 buffer_snapshot,
1735 Some(diff_bases_change),
1736 cx,
1737 );
1738 });
1739 anyhow::Ok(())
1740 }
1741 .await
1742 .log_err();
1743 })
1744 .detach();
1745 }
1746 }
1747 }
1748 }
1749
1750 pub fn recalculate_buffer_diffs(
1751 &mut self,
1752 buffers: Vec<Entity<Buffer>>,
1753 cx: &mut Context<Self>,
1754 ) -> impl Future<Output = ()> + use<> {
1755 let mut futures = Vec::new();
1756 for buffer in buffers {
1757 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1758 let buffer = buffer.read(cx).text_snapshot();
1759 diff_state.update(cx, |diff_state, cx| {
1760 diff_state.recalculate_diffs(buffer.clone(), cx);
1761 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1762 });
1763 futures.push(diff_state.update(cx, |diff_state, cx| {
1764 diff_state
1765 .reparse_conflict_markers(buffer, cx)
1766 .map(|_| {})
1767 .boxed()
1768 }));
1769 }
1770 }
1771 async move {
1772 futures::future::join_all(futures).await;
1773 }
1774 }
1775
1776 fn on_buffer_diff_event(
1777 &mut self,
1778 diff: Entity<buffer_diff::BufferDiff>,
1779 event: &BufferDiffEvent,
1780 cx: &mut Context<Self>,
1781 ) {
1782 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1783 let buffer_id = diff.read(cx).buffer_id;
1784 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1785 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1786 diff_state.hunk_staging_operation_count += 1;
1787 diff_state.hunk_staging_operation_count
1788 });
1789 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1790 let recv = repo.update(cx, |repo, cx| {
1791 log::debug!("hunks changed for {}", path.as_unix_str());
1792 repo.spawn_set_index_text_job(
1793 path,
1794 new_index_text.as_ref().map(|rope| rope.to_string()),
1795 Some(hunk_staging_operation_count),
1796 cx,
1797 )
1798 });
1799 let diff = diff.downgrade();
1800 cx.spawn(async move |this, cx| {
1801 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1802 diff.update(cx, |diff, cx| {
1803 diff.clear_pending_hunks(cx);
1804 })
1805 .ok();
1806 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1807 .ok();
1808 }
1809 })
1810 .detach();
1811 }
1812 }
1813 }
1814 }
1815
1816 fn local_worktree_git_repos_changed(
1817 &mut self,
1818 worktree: Entity<Worktree>,
1819 changed_repos: &UpdatedGitRepositoriesSet,
1820 cx: &mut Context<Self>,
1821 ) {
1822 log::debug!("local worktree repos changed");
1823 debug_assert!(worktree.read(cx).is_local());
1824
1825 for repository in self.repositories.values() {
1826 repository.update(cx, |repository, cx| {
1827 let repo_abs_path = &repository.work_directory_abs_path;
1828 if changed_repos.iter().any(|update| {
1829 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1830 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1831 }) {
1832 repository.reload_buffer_diff_bases(cx);
1833 }
1834 });
1835 }
1836 }
1837
1838 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1839 &self.repositories
1840 }
1841
1842 /// Returns the original (main) repository working directory for the given worktree.
1843 /// For normal checkouts this equals the worktree's own path; for linked
1844 /// worktrees it points back to the original repo.
1845 pub fn original_repo_path_for_worktree(
1846 &self,
1847 worktree_id: WorktreeId,
1848 cx: &App,
1849 ) -> Option<Arc<Path>> {
1850 self.active_repo_id
1851 .iter()
1852 .chain(self.worktree_ids.keys())
1853 .find(|repo_id| {
1854 self.worktree_ids
1855 .get(repo_id)
1856 .is_some_and(|ids| ids.contains(&worktree_id))
1857 })
1858 .and_then(|repo_id| self.repositories.get(repo_id))
1859 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1860 }
1861
1862 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1863 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1864 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1865 Some(status.status)
1866 }
1867
1868 pub fn repository_and_path_for_buffer_id(
1869 &self,
1870 buffer_id: BufferId,
1871 cx: &App,
1872 ) -> Option<(Entity<Repository>, RepoPath)> {
1873 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1874 let project_path = buffer.read(cx).project_path(cx)?;
1875 self.repository_and_path_for_project_path(&project_path, cx)
1876 }
1877
1878 pub fn repository_and_path_for_project_path(
1879 &self,
1880 path: &ProjectPath,
1881 cx: &App,
1882 ) -> Option<(Entity<Repository>, RepoPath)> {
1883 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1884 self.repositories
1885 .values()
1886 .filter_map(|repo| {
1887 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1888 Some((repo.clone(), repo_path))
1889 })
1890 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1891 }
1892
1893 pub fn git_init(
1894 &self,
1895 path: Arc<Path>,
1896 fallback_branch_name: String,
1897 cx: &App,
1898 ) -> Task<Result<()>> {
1899 match &self.state {
1900 GitStoreState::Local { fs, .. } => {
1901 let fs = fs.clone();
1902 cx.background_executor()
1903 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1904 }
1905 GitStoreState::Remote {
1906 upstream_client,
1907 upstream_project_id: project_id,
1908 ..
1909 } => {
1910 let client = upstream_client.clone();
1911 let project_id = *project_id;
1912 cx.background_executor().spawn(async move {
1913 client
1914 .request(proto::GitInit {
1915 project_id: project_id,
1916 abs_path: path.to_string_lossy().into_owned(),
1917 fallback_branch_name,
1918 })
1919 .await?;
1920 Ok(())
1921 })
1922 }
1923 }
1924 }
1925
1926 pub fn git_clone(
1927 &self,
1928 repo: String,
1929 path: impl Into<Arc<std::path::Path>>,
1930 cx: &App,
1931 ) -> Task<Result<()>> {
1932 let path = path.into();
1933 match &self.state {
1934 GitStoreState::Local { fs, .. } => {
1935 let fs = fs.clone();
1936 cx.background_executor()
1937 .spawn(async move { fs.git_clone(&repo, &path).await })
1938 }
1939 GitStoreState::Remote {
1940 upstream_client,
1941 upstream_project_id,
1942 ..
1943 } => {
1944 if upstream_client.is_via_collab() {
1945 return Task::ready(Err(anyhow!(
1946 "Git Clone isn't supported for project guests"
1947 )));
1948 }
1949 let request = upstream_client.request(proto::GitClone {
1950 project_id: *upstream_project_id,
1951 abs_path: path.to_string_lossy().into_owned(),
1952 remote_repo: repo,
1953 });
1954
1955 cx.background_spawn(async move {
1956 let result = request.await?;
1957
1958 match result.success {
1959 true => Ok(()),
1960 false => Err(anyhow!("Git Clone failed")),
1961 }
1962 })
1963 }
1964 }
1965 }
1966
1967 async fn handle_update_repository(
1968 this: Entity<Self>,
1969 envelope: TypedEnvelope<proto::UpdateRepository>,
1970 mut cx: AsyncApp,
1971 ) -> Result<()> {
1972 this.update(&mut cx, |this, cx| {
1973 let path_style = this.worktree_store.read(cx).path_style();
1974 let mut update = envelope.payload;
1975
1976 let id = RepositoryId::from_proto(update.id);
1977 let client = this.upstream_client().context("no upstream client")?;
1978
1979 let original_repo_abs_path: Option<Arc<Path>> = update
1980 .original_repo_abs_path
1981 .as_deref()
1982 .map(|p| Path::new(p).into());
1983
1984 let mut repo_subscription = None;
1985 let repo = this.repositories.entry(id).or_insert_with(|| {
1986 let git_store = cx.weak_entity();
1987 let repo = cx.new(|cx| {
1988 Repository::remote(
1989 id,
1990 Path::new(&update.abs_path).into(),
1991 original_repo_abs_path.clone(),
1992 path_style,
1993 ProjectId(update.project_id),
1994 client,
1995 git_store,
1996 cx,
1997 )
1998 });
1999 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
2000 cx.emit(GitStoreEvent::RepositoryAdded);
2001 repo
2002 });
2003 this._subscriptions.extend(repo_subscription);
2004
2005 repo.update(cx, {
2006 let update = update.clone();
2007 |repo, cx| repo.apply_remote_update(update, cx)
2008 })?;
2009
2010 this.active_repo_id.get_or_insert_with(|| {
2011 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2012 id
2013 });
2014
2015 if let Some((client, project_id)) = this.downstream_client() {
2016 update.project_id = project_id.to_proto();
2017 client.send(update).log_err();
2018 }
2019 Ok(())
2020 })
2021 }
2022
2023 async fn handle_remove_repository(
2024 this: Entity<Self>,
2025 envelope: TypedEnvelope<proto::RemoveRepository>,
2026 mut cx: AsyncApp,
2027 ) -> Result<()> {
2028 this.update(&mut cx, |this, cx| {
2029 let mut update = envelope.payload;
2030 let id = RepositoryId::from_proto(update.id);
2031 this.repositories.remove(&id);
2032 if let Some((client, project_id)) = this.downstream_client() {
2033 update.project_id = project_id.to_proto();
2034 client.send(update).log_err();
2035 }
2036 if this.active_repo_id == Some(id) {
2037 this.active_repo_id = None;
2038 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2039 }
2040 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2041 });
2042 Ok(())
2043 }
2044
2045 async fn handle_git_init(
2046 this: Entity<Self>,
2047 envelope: TypedEnvelope<proto::GitInit>,
2048 cx: AsyncApp,
2049 ) -> Result<proto::Ack> {
2050 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2051 let name = envelope.payload.fallback_branch_name;
2052 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2053 .await?;
2054
2055 Ok(proto::Ack {})
2056 }
2057
2058 async fn handle_git_clone(
2059 this: Entity<Self>,
2060 envelope: TypedEnvelope<proto::GitClone>,
2061 cx: AsyncApp,
2062 ) -> Result<proto::GitCloneResponse> {
2063 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2064 let repo_name = envelope.payload.remote_repo;
2065 let result = cx
2066 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2067 .await;
2068
2069 Ok(proto::GitCloneResponse {
2070 success: result.is_ok(),
2071 })
2072 }
2073
2074 async fn handle_fetch(
2075 this: Entity<Self>,
2076 envelope: TypedEnvelope<proto::Fetch>,
2077 mut cx: AsyncApp,
2078 ) -> Result<proto::RemoteMessageResponse> {
2079 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2080 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2081 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2082 let askpass_id = envelope.payload.askpass_id;
2083
2084 let askpass = make_remote_delegate(
2085 this,
2086 envelope.payload.project_id,
2087 repository_id,
2088 askpass_id,
2089 &mut cx,
2090 );
2091
2092 let remote_output = repository_handle
2093 .update(&mut cx, |repository_handle, cx| {
2094 repository_handle.fetch(fetch_options, askpass, cx)
2095 })
2096 .await??;
2097
2098 Ok(proto::RemoteMessageResponse {
2099 stdout: remote_output.stdout,
2100 stderr: remote_output.stderr,
2101 })
2102 }
2103
2104 async fn handle_push(
2105 this: Entity<Self>,
2106 envelope: TypedEnvelope<proto::Push>,
2107 mut cx: AsyncApp,
2108 ) -> Result<proto::RemoteMessageResponse> {
2109 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2110 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2111
2112 let askpass_id = envelope.payload.askpass_id;
2113 let askpass = make_remote_delegate(
2114 this,
2115 envelope.payload.project_id,
2116 repository_id,
2117 askpass_id,
2118 &mut cx,
2119 );
2120
2121 let options = envelope
2122 .payload
2123 .options
2124 .as_ref()
2125 .map(|_| match envelope.payload.options() {
2126 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2127 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2128 });
2129
2130 let branch_name = envelope.payload.branch_name.into();
2131 let remote_branch_name = envelope.payload.remote_branch_name.into();
2132 let remote_name = envelope.payload.remote_name.into();
2133
2134 let remote_output = repository_handle
2135 .update(&mut cx, |repository_handle, cx| {
2136 repository_handle.push(
2137 branch_name,
2138 remote_branch_name,
2139 remote_name,
2140 options,
2141 askpass,
2142 cx,
2143 )
2144 })
2145 .await??;
2146 Ok(proto::RemoteMessageResponse {
2147 stdout: remote_output.stdout,
2148 stderr: remote_output.stderr,
2149 })
2150 }
2151
2152 async fn handle_pull(
2153 this: Entity<Self>,
2154 envelope: TypedEnvelope<proto::Pull>,
2155 mut cx: AsyncApp,
2156 ) -> Result<proto::RemoteMessageResponse> {
2157 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2158 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2159 let askpass_id = envelope.payload.askpass_id;
2160 let askpass = make_remote_delegate(
2161 this,
2162 envelope.payload.project_id,
2163 repository_id,
2164 askpass_id,
2165 &mut cx,
2166 );
2167
2168 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2169 let remote_name = envelope.payload.remote_name.into();
2170 let rebase = envelope.payload.rebase;
2171
2172 let remote_message = repository_handle
2173 .update(&mut cx, |repository_handle, cx| {
2174 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2175 })
2176 .await??;
2177
2178 Ok(proto::RemoteMessageResponse {
2179 stdout: remote_message.stdout,
2180 stderr: remote_message.stderr,
2181 })
2182 }
2183
2184 async fn handle_stage(
2185 this: Entity<Self>,
2186 envelope: TypedEnvelope<proto::Stage>,
2187 mut cx: AsyncApp,
2188 ) -> Result<proto::Ack> {
2189 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2190 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2191
2192 let entries = envelope
2193 .payload
2194 .paths
2195 .into_iter()
2196 .map(|path| RepoPath::new(&path))
2197 .collect::<Result<Vec<_>>>()?;
2198
2199 repository_handle
2200 .update(&mut cx, |repository_handle, cx| {
2201 repository_handle.stage_entries(entries, cx)
2202 })
2203 .await?;
2204 Ok(proto::Ack {})
2205 }
2206
2207 async fn handle_unstage(
2208 this: Entity<Self>,
2209 envelope: TypedEnvelope<proto::Unstage>,
2210 mut cx: AsyncApp,
2211 ) -> Result<proto::Ack> {
2212 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2213 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2214
2215 let entries = envelope
2216 .payload
2217 .paths
2218 .into_iter()
2219 .map(|path| RepoPath::new(&path))
2220 .collect::<Result<Vec<_>>>()?;
2221
2222 repository_handle
2223 .update(&mut cx, |repository_handle, cx| {
2224 repository_handle.unstage_entries(entries, cx)
2225 })
2226 .await?;
2227
2228 Ok(proto::Ack {})
2229 }
2230
2231 async fn handle_stash(
2232 this: Entity<Self>,
2233 envelope: TypedEnvelope<proto::Stash>,
2234 mut cx: AsyncApp,
2235 ) -> Result<proto::Ack> {
2236 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2237 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2238
2239 let entries = envelope
2240 .payload
2241 .paths
2242 .into_iter()
2243 .map(|path| RepoPath::new(&path))
2244 .collect::<Result<Vec<_>>>()?;
2245
2246 repository_handle
2247 .update(&mut cx, |repository_handle, cx| {
2248 repository_handle.stash_entries(entries, cx)
2249 })
2250 .await?;
2251
2252 Ok(proto::Ack {})
2253 }
2254
2255 async fn handle_stash_pop(
2256 this: Entity<Self>,
2257 envelope: TypedEnvelope<proto::StashPop>,
2258 mut cx: AsyncApp,
2259 ) -> Result<proto::Ack> {
2260 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2261 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2262 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2263
2264 repository_handle
2265 .update(&mut cx, |repository_handle, cx| {
2266 repository_handle.stash_pop(stash_index, cx)
2267 })
2268 .await?;
2269
2270 Ok(proto::Ack {})
2271 }
2272
2273 async fn handle_stash_apply(
2274 this: Entity<Self>,
2275 envelope: TypedEnvelope<proto::StashApply>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::Ack> {
2278 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2279 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2280 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2281
2282 repository_handle
2283 .update(&mut cx, |repository_handle, cx| {
2284 repository_handle.stash_apply(stash_index, cx)
2285 })
2286 .await?;
2287
2288 Ok(proto::Ack {})
2289 }
2290
2291 async fn handle_stash_drop(
2292 this: Entity<Self>,
2293 envelope: TypedEnvelope<proto::StashDrop>,
2294 mut cx: AsyncApp,
2295 ) -> Result<proto::Ack> {
2296 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2297 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2298 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2299
2300 repository_handle
2301 .update(&mut cx, |repository_handle, cx| {
2302 repository_handle.stash_drop(stash_index, cx)
2303 })
2304 .await??;
2305
2306 Ok(proto::Ack {})
2307 }
2308
2309 async fn handle_set_index_text(
2310 this: Entity<Self>,
2311 envelope: TypedEnvelope<proto::SetIndexText>,
2312 mut cx: AsyncApp,
2313 ) -> Result<proto::Ack> {
2314 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2315 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2316 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2317
2318 repository_handle
2319 .update(&mut cx, |repository_handle, cx| {
2320 repository_handle.spawn_set_index_text_job(
2321 repo_path,
2322 envelope.payload.text,
2323 None,
2324 cx,
2325 )
2326 })
2327 .await??;
2328 Ok(proto::Ack {})
2329 }
2330
2331 async fn handle_run_hook(
2332 this: Entity<Self>,
2333 envelope: TypedEnvelope<proto::RunGitHook>,
2334 mut cx: AsyncApp,
2335 ) -> Result<proto::Ack> {
2336 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2337 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2338 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2339 repository_handle
2340 .update(&mut cx, |repository_handle, cx| {
2341 repository_handle.run_hook(hook, cx)
2342 })
2343 .await??;
2344 Ok(proto::Ack {})
2345 }
2346
2347 async fn handle_commit(
2348 this: Entity<Self>,
2349 envelope: TypedEnvelope<proto::Commit>,
2350 mut cx: AsyncApp,
2351 ) -> Result<proto::Ack> {
2352 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2353 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2354 let askpass_id = envelope.payload.askpass_id;
2355
2356 let askpass = make_remote_delegate(
2357 this,
2358 envelope.payload.project_id,
2359 repository_id,
2360 askpass_id,
2361 &mut cx,
2362 );
2363
2364 let message = SharedString::from(envelope.payload.message);
2365 let name = envelope.payload.name.map(SharedString::from);
2366 let email = envelope.payload.email.map(SharedString::from);
2367 let options = envelope.payload.options.unwrap_or_default();
2368
2369 repository_handle
2370 .update(&mut cx, |repository_handle, cx| {
2371 repository_handle.commit(
2372 message,
2373 name.zip(email),
2374 CommitOptions {
2375 amend: options.amend,
2376 signoff: options.signoff,
2377 allow_empty: options.allow_empty,
2378 },
2379 askpass,
2380 cx,
2381 )
2382 })
2383 .await??;
2384 Ok(proto::Ack {})
2385 }
2386
2387 async fn handle_get_remotes(
2388 this: Entity<Self>,
2389 envelope: TypedEnvelope<proto::GetRemotes>,
2390 mut cx: AsyncApp,
2391 ) -> Result<proto::GetRemotesResponse> {
2392 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2393 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2394
2395 let branch_name = envelope.payload.branch_name;
2396 let is_push = envelope.payload.is_push;
2397
2398 let remotes = repository_handle
2399 .update(&mut cx, |repository_handle, _| {
2400 repository_handle.get_remotes(branch_name, is_push)
2401 })
2402 .await??;
2403
2404 Ok(proto::GetRemotesResponse {
2405 remotes: remotes
2406 .into_iter()
2407 .map(|remotes| proto::get_remotes_response::Remote {
2408 name: remotes.name.to_string(),
2409 })
2410 .collect::<Vec<_>>(),
2411 })
2412 }
2413
2414 async fn handle_get_worktrees(
2415 this: Entity<Self>,
2416 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2417 mut cx: AsyncApp,
2418 ) -> Result<proto::GitWorktreesResponse> {
2419 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2420 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2421
2422 let worktrees = repository_handle
2423 .update(&mut cx, |repository_handle, _| {
2424 repository_handle.worktrees()
2425 })
2426 .await??;
2427
2428 Ok(proto::GitWorktreesResponse {
2429 worktrees: worktrees
2430 .into_iter()
2431 .map(|worktree| worktree_to_proto(&worktree))
2432 .collect::<Vec<_>>(),
2433 })
2434 }
2435
2436 async fn handle_create_worktree(
2437 this: Entity<Self>,
2438 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2439 mut cx: AsyncApp,
2440 ) -> Result<proto::Ack> {
2441 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2442 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2443 let directory = PathBuf::from(envelope.payload.directory);
2444 let name = envelope.payload.name;
2445 let commit = envelope.payload.commit;
2446 let use_existing_branch = envelope.payload.use_existing_branch;
2447 let target = if name.is_empty() {
2448 CreateWorktreeTarget::Detached { base_sha: commit }
2449 } else if use_existing_branch {
2450 CreateWorktreeTarget::ExistingBranch { branch_name: name }
2451 } else {
2452 CreateWorktreeTarget::NewBranch {
2453 branch_name: name,
2454 base_sha: commit,
2455 }
2456 };
2457
2458 repository_handle
2459 .update(&mut cx, |repository_handle, _| {
2460 repository_handle.create_worktree(target, directory)
2461 })
2462 .await??;
2463
2464 Ok(proto::Ack {})
2465 }
2466
2467 async fn handle_remove_worktree(
2468 this: Entity<Self>,
2469 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2470 mut cx: AsyncApp,
2471 ) -> Result<proto::Ack> {
2472 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2473 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2474 let path = PathBuf::from(envelope.payload.path);
2475 let force = envelope.payload.force;
2476
2477 repository_handle
2478 .update(&mut cx, |repository_handle, _| {
2479 repository_handle.remove_worktree(path, force)
2480 })
2481 .await??;
2482
2483 Ok(proto::Ack {})
2484 }
2485
2486 async fn handle_rename_worktree(
2487 this: Entity<Self>,
2488 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2489 mut cx: AsyncApp,
2490 ) -> Result<proto::Ack> {
2491 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2492 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2493 let old_path = PathBuf::from(envelope.payload.old_path);
2494 let new_path = PathBuf::from(envelope.payload.new_path);
2495
2496 repository_handle
2497 .update(&mut cx, |repository_handle, _| {
2498 repository_handle.rename_worktree(old_path, new_path)
2499 })
2500 .await??;
2501
2502 Ok(proto::Ack {})
2503 }
2504
2505 async fn handle_get_head_sha(
2506 this: Entity<Self>,
2507 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2508 mut cx: AsyncApp,
2509 ) -> Result<proto::GitGetHeadShaResponse> {
2510 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2511 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2512
2513 let head_sha = repository_handle
2514 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2515 .await??;
2516
2517 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2518 }
2519
2520 async fn handle_get_branches(
2521 this: Entity<Self>,
2522 envelope: TypedEnvelope<proto::GitGetBranches>,
2523 mut cx: AsyncApp,
2524 ) -> Result<proto::GitBranchesResponse> {
2525 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2526 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2527
2528 let branches = repository_handle
2529 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2530 .await??;
2531
2532 Ok(proto::GitBranchesResponse {
2533 branches: branches
2534 .into_iter()
2535 .map(|branch| branch_to_proto(&branch))
2536 .collect::<Vec<_>>(),
2537 })
2538 }
2539 async fn handle_get_default_branch(
2540 this: Entity<Self>,
2541 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2542 mut cx: AsyncApp,
2543 ) -> Result<proto::GetDefaultBranchResponse> {
2544 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2545 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2546
2547 let branch = repository_handle
2548 .update(&mut cx, |repository_handle, _| {
2549 repository_handle.default_branch(false)
2550 })
2551 .await??
2552 .map(Into::into);
2553
2554 Ok(proto::GetDefaultBranchResponse { branch })
2555 }
2556 async fn handle_create_branch(
2557 this: Entity<Self>,
2558 envelope: TypedEnvelope<proto::GitCreateBranch>,
2559 mut cx: AsyncApp,
2560 ) -> Result<proto::Ack> {
2561 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2562 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2563 let branch_name = envelope.payload.branch_name;
2564
2565 repository_handle
2566 .update(&mut cx, |repository_handle, _| {
2567 repository_handle.create_branch(branch_name, None)
2568 })
2569 .await??;
2570
2571 Ok(proto::Ack {})
2572 }
2573
2574 async fn handle_change_branch(
2575 this: Entity<Self>,
2576 envelope: TypedEnvelope<proto::GitChangeBranch>,
2577 mut cx: AsyncApp,
2578 ) -> Result<proto::Ack> {
2579 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2580 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2581 let branch_name = envelope.payload.branch_name;
2582
2583 repository_handle
2584 .update(&mut cx, |repository_handle, _| {
2585 repository_handle.change_branch(branch_name)
2586 })
2587 .await??;
2588
2589 Ok(proto::Ack {})
2590 }
2591
2592 async fn handle_rename_branch(
2593 this: Entity<Self>,
2594 envelope: TypedEnvelope<proto::GitRenameBranch>,
2595 mut cx: AsyncApp,
2596 ) -> Result<proto::Ack> {
2597 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2598 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2599 let branch = envelope.payload.branch;
2600 let new_name = envelope.payload.new_name;
2601
2602 repository_handle
2603 .update(&mut cx, |repository_handle, _| {
2604 repository_handle.rename_branch(branch, new_name)
2605 })
2606 .await??;
2607
2608 Ok(proto::Ack {})
2609 }
2610
2611 async fn handle_create_remote(
2612 this: Entity<Self>,
2613 envelope: TypedEnvelope<proto::GitCreateRemote>,
2614 mut cx: AsyncApp,
2615 ) -> Result<proto::Ack> {
2616 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2617 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2618 let remote_name = envelope.payload.remote_name;
2619 let remote_url = envelope.payload.remote_url;
2620
2621 repository_handle
2622 .update(&mut cx, |repository_handle, _| {
2623 repository_handle.create_remote(remote_name, remote_url)
2624 })
2625 .await??;
2626
2627 Ok(proto::Ack {})
2628 }
2629
2630 async fn handle_delete_branch(
2631 this: Entity<Self>,
2632 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2633 mut cx: AsyncApp,
2634 ) -> Result<proto::Ack> {
2635 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2636 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2637 let is_remote = envelope.payload.is_remote;
2638 let branch_name = envelope.payload.branch_name;
2639
2640 repository_handle
2641 .update(&mut cx, |repository_handle, _| {
2642 repository_handle.delete_branch(is_remote, branch_name)
2643 })
2644 .await??;
2645
2646 Ok(proto::Ack {})
2647 }
2648
2649 async fn handle_remove_remote(
2650 this: Entity<Self>,
2651 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2652 mut cx: AsyncApp,
2653 ) -> Result<proto::Ack> {
2654 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2655 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2656 let remote_name = envelope.payload.remote_name;
2657
2658 repository_handle
2659 .update(&mut cx, |repository_handle, _| {
2660 repository_handle.remove_remote(remote_name)
2661 })
2662 .await??;
2663
2664 Ok(proto::Ack {})
2665 }
2666
2667 async fn handle_show(
2668 this: Entity<Self>,
2669 envelope: TypedEnvelope<proto::GitShow>,
2670 mut cx: AsyncApp,
2671 ) -> Result<proto::GitCommitDetails> {
2672 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2673 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2674
2675 let commit = repository_handle
2676 .update(&mut cx, |repository_handle, _| {
2677 repository_handle.show(envelope.payload.commit)
2678 })
2679 .await??;
2680 Ok(proto::GitCommitDetails {
2681 sha: commit.sha.into(),
2682 message: commit.message.into(),
2683 commit_timestamp: commit.commit_timestamp,
2684 author_email: commit.author_email.into(),
2685 author_name: commit.author_name.into(),
2686 })
2687 }
2688
2689 async fn handle_create_checkpoint(
2690 this: Entity<Self>,
2691 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2692 mut cx: AsyncApp,
2693 ) -> Result<proto::GitCreateCheckpointResponse> {
2694 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2695 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2696
2697 let checkpoint = repository_handle
2698 .update(&mut cx, |repository, _| repository.checkpoint())
2699 .await??;
2700
2701 Ok(proto::GitCreateCheckpointResponse {
2702 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2703 })
2704 }
2705
2706 async fn handle_restore_checkpoint(
2707 this: Entity<Self>,
2708 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2709 mut cx: AsyncApp,
2710 ) -> Result<proto::Ack> {
2711 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2712 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2713
2714 let checkpoint = GitRepositoryCheckpoint {
2715 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2716 };
2717
2718 repository_handle
2719 .update(&mut cx, |repository, _| {
2720 repository.restore_checkpoint(checkpoint)
2721 })
2722 .await??;
2723
2724 Ok(proto::Ack {})
2725 }
2726
2727 async fn handle_compare_checkpoints(
2728 this: Entity<Self>,
2729 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2730 mut cx: AsyncApp,
2731 ) -> Result<proto::GitCompareCheckpointsResponse> {
2732 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2733 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2734
2735 let left = GitRepositoryCheckpoint {
2736 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2737 };
2738 let right = GitRepositoryCheckpoint {
2739 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2740 };
2741
2742 let equal = repository_handle
2743 .update(&mut cx, |repository, _| {
2744 repository.compare_checkpoints(left, right)
2745 })
2746 .await??;
2747
2748 Ok(proto::GitCompareCheckpointsResponse { equal })
2749 }
2750
2751 async fn handle_diff_checkpoints(
2752 this: Entity<Self>,
2753 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2754 mut cx: AsyncApp,
2755 ) -> Result<proto::GitDiffCheckpointsResponse> {
2756 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2757 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2758
2759 let base = GitRepositoryCheckpoint {
2760 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2761 };
2762 let target = GitRepositoryCheckpoint {
2763 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2764 };
2765
2766 let diff = repository_handle
2767 .update(&mut cx, |repository, _| {
2768 repository.diff_checkpoints(base, target)
2769 })
2770 .await??;
2771
2772 Ok(proto::GitDiffCheckpointsResponse { diff })
2773 }
2774
2775 async fn handle_load_commit_diff(
2776 this: Entity<Self>,
2777 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2778 mut cx: AsyncApp,
2779 ) -> Result<proto::LoadCommitDiffResponse> {
2780 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2781 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2782
2783 let commit_diff = repository_handle
2784 .update(&mut cx, |repository_handle, _| {
2785 repository_handle.load_commit_diff(envelope.payload.commit)
2786 })
2787 .await??;
2788 Ok(proto::LoadCommitDiffResponse {
2789 files: commit_diff
2790 .files
2791 .into_iter()
2792 .map(|file| proto::CommitFile {
2793 path: file.path.to_proto(),
2794 old_text: file.old_text,
2795 new_text: file.new_text,
2796 is_binary: file.is_binary,
2797 })
2798 .collect(),
2799 })
2800 }
2801
2802 async fn handle_file_history(
2803 this: Entity<Self>,
2804 envelope: TypedEnvelope<proto::GitFileHistory>,
2805 mut cx: AsyncApp,
2806 ) -> Result<proto::GitFileHistoryResponse> {
2807 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2808 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2809 let path = RepoPath::from_proto(&envelope.payload.path)?;
2810 let skip = envelope.payload.skip as usize;
2811 let limit = envelope.payload.limit.map(|l| l as usize);
2812
2813 let file_history = repository_handle
2814 .update(&mut cx, |repository_handle, _| {
2815 repository_handle.file_history_paginated(path, skip, limit)
2816 })
2817 .await??;
2818
2819 Ok(proto::GitFileHistoryResponse {
2820 entries: file_history
2821 .entries
2822 .into_iter()
2823 .map(|entry| proto::FileHistoryEntry {
2824 sha: entry.sha.to_string(),
2825 subject: entry.subject.to_string(),
2826 message: entry.message.to_string(),
2827 commit_timestamp: entry.commit_timestamp,
2828 author_name: entry.author_name.to_string(),
2829 author_email: entry.author_email.to_string(),
2830 })
2831 .collect(),
2832 path: file_history.path.to_proto(),
2833 })
2834 }
2835
2836 async fn handle_reset(
2837 this: Entity<Self>,
2838 envelope: TypedEnvelope<proto::GitReset>,
2839 mut cx: AsyncApp,
2840 ) -> Result<proto::Ack> {
2841 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2842 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2843
2844 let mode = match envelope.payload.mode() {
2845 git_reset::ResetMode::Soft => ResetMode::Soft,
2846 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2847 };
2848
2849 repository_handle
2850 .update(&mut cx, |repository_handle, cx| {
2851 repository_handle.reset(envelope.payload.commit, mode, cx)
2852 })
2853 .await??;
2854 Ok(proto::Ack {})
2855 }
2856
2857 async fn handle_checkout_files(
2858 this: Entity<Self>,
2859 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2860 mut cx: AsyncApp,
2861 ) -> Result<proto::Ack> {
2862 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2863 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2864 let paths = envelope
2865 .payload
2866 .paths
2867 .iter()
2868 .map(|s| RepoPath::from_proto(s))
2869 .collect::<Result<Vec<_>>>()?;
2870
2871 repository_handle
2872 .update(&mut cx, |repository_handle, cx| {
2873 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2874 })
2875 .await?;
2876 Ok(proto::Ack {})
2877 }
2878
2879 async fn handle_open_commit_message_buffer(
2880 this: Entity<Self>,
2881 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2882 mut cx: AsyncApp,
2883 ) -> Result<proto::OpenBufferResponse> {
2884 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2885 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2886 let buffer = repository
2887 .update(&mut cx, |repository, cx| {
2888 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2889 })
2890 .await?;
2891
2892 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2893 this.update(&mut cx, |this, cx| {
2894 this.buffer_store.update(cx, |buffer_store, cx| {
2895 buffer_store
2896 .create_buffer_for_peer(
2897 &buffer,
2898 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2899 cx,
2900 )
2901 .detach_and_log_err(cx);
2902 })
2903 });
2904
2905 Ok(proto::OpenBufferResponse {
2906 buffer_id: buffer_id.to_proto(),
2907 })
2908 }
2909
2910 async fn handle_askpass(
2911 this: Entity<Self>,
2912 envelope: TypedEnvelope<proto::AskPassRequest>,
2913 mut cx: AsyncApp,
2914 ) -> Result<proto::AskPassResponse> {
2915 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2916 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2917
2918 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2919 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2920 debug_panic!("no askpass found");
2921 anyhow::bail!("no askpass found");
2922 };
2923
2924 let response = askpass
2925 .ask_password(envelope.payload.prompt)
2926 .await
2927 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2928
2929 delegates
2930 .lock()
2931 .insert(envelope.payload.askpass_id, askpass);
2932
2933 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2934 Ok(proto::AskPassResponse {
2935 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2936 })
2937 }
2938
2939 async fn handle_check_for_pushed_commits(
2940 this: Entity<Self>,
2941 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2942 mut cx: AsyncApp,
2943 ) -> Result<proto::CheckForPushedCommitsResponse> {
2944 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2945 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2946
2947 let branches = repository_handle
2948 .update(&mut cx, |repository_handle, _| {
2949 repository_handle.check_for_pushed_commits()
2950 })
2951 .await??;
2952 Ok(proto::CheckForPushedCommitsResponse {
2953 pushed_to: branches
2954 .into_iter()
2955 .map(|commit| commit.to_string())
2956 .collect(),
2957 })
2958 }
2959
2960 async fn handle_git_diff(
2961 this: Entity<Self>,
2962 envelope: TypedEnvelope<proto::GitDiff>,
2963 mut cx: AsyncApp,
2964 ) -> Result<proto::GitDiffResponse> {
2965 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2966 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2967 let diff_type = match envelope.payload.diff_type() {
2968 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2969 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2970 proto::git_diff::DiffType::MergeBase => {
2971 let base_ref = envelope
2972 .payload
2973 .merge_base_ref
2974 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2975 DiffType::MergeBase {
2976 base_ref: base_ref.into(),
2977 }
2978 }
2979 };
2980
2981 let mut diff = repository_handle
2982 .update(&mut cx, |repository_handle, cx| {
2983 repository_handle.diff(diff_type, cx)
2984 })
2985 .await??;
2986 const ONE_MB: usize = 1_000_000;
2987 if diff.len() > ONE_MB {
2988 diff = diff.chars().take(ONE_MB).collect()
2989 }
2990
2991 Ok(proto::GitDiffResponse { diff })
2992 }
2993
2994 async fn handle_tree_diff(
2995 this: Entity<Self>,
2996 request: TypedEnvelope<proto::GetTreeDiff>,
2997 mut cx: AsyncApp,
2998 ) -> Result<proto::GetTreeDiffResponse> {
2999 let repository_id = RepositoryId(request.payload.repository_id);
3000 let diff_type = if request.payload.is_merge {
3001 DiffTreeType::MergeBase {
3002 base: request.payload.base.into(),
3003 head: request.payload.head.into(),
3004 }
3005 } else {
3006 DiffTreeType::Since {
3007 base: request.payload.base.into(),
3008 head: request.payload.head.into(),
3009 }
3010 };
3011
3012 let diff = this
3013 .update(&mut cx, |this, cx| {
3014 let repository = this.repositories().get(&repository_id)?;
3015 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
3016 })
3017 .context("missing repository")?
3018 .await??;
3019
3020 Ok(proto::GetTreeDiffResponse {
3021 entries: diff
3022 .entries
3023 .into_iter()
3024 .map(|(path, status)| proto::TreeDiffStatus {
3025 path: path.as_ref().to_proto(),
3026 status: match status {
3027 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
3028 TreeDiffStatus::Modified { .. } => {
3029 proto::tree_diff_status::Status::Modified.into()
3030 }
3031 TreeDiffStatus::Deleted { .. } => {
3032 proto::tree_diff_status::Status::Deleted.into()
3033 }
3034 },
3035 oid: match status {
3036 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3037 Some(old.to_string())
3038 }
3039 TreeDiffStatus::Added => None,
3040 },
3041 })
3042 .collect(),
3043 })
3044 }
3045
3046 async fn handle_get_blob_content(
3047 this: Entity<Self>,
3048 request: TypedEnvelope<proto::GetBlobContent>,
3049 mut cx: AsyncApp,
3050 ) -> Result<proto::GetBlobContentResponse> {
3051 let oid = git::Oid::from_str(&request.payload.oid)?;
3052 let repository_id = RepositoryId(request.payload.repository_id);
3053 let content = this
3054 .update(&mut cx, |this, cx| {
3055 let repository = this.repositories().get(&repository_id)?;
3056 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3057 })
3058 .context("missing repository")?
3059 .await?;
3060 Ok(proto::GetBlobContentResponse { content })
3061 }
3062
3063 async fn handle_open_unstaged_diff(
3064 this: Entity<Self>,
3065 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3066 mut cx: AsyncApp,
3067 ) -> Result<proto::OpenUnstagedDiffResponse> {
3068 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3069 let diff = this
3070 .update(&mut cx, |this, cx| {
3071 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3072 Some(this.open_unstaged_diff(buffer, cx))
3073 })
3074 .context("missing buffer")?
3075 .await?;
3076 this.update(&mut cx, |this, _| {
3077 let shared_diffs = this
3078 .shared_diffs
3079 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3080 .or_default();
3081 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3082 });
3083 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3084 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3085 }
3086
3087 async fn handle_open_uncommitted_diff(
3088 this: Entity<Self>,
3089 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3090 mut cx: AsyncApp,
3091 ) -> Result<proto::OpenUncommittedDiffResponse> {
3092 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3093 let diff = this
3094 .update(&mut cx, |this, cx| {
3095 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3096 Some(this.open_uncommitted_diff(buffer, cx))
3097 })
3098 .context("missing buffer")?
3099 .await?;
3100 this.update(&mut cx, |this, _| {
3101 let shared_diffs = this
3102 .shared_diffs
3103 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3104 .or_default();
3105 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3106 });
3107 Ok(diff.read_with(&cx, |diff, cx| {
3108 use proto::open_uncommitted_diff_response::Mode;
3109
3110 let unstaged_diff = diff.secondary_diff();
3111 let index_snapshot = unstaged_diff.and_then(|diff| {
3112 let diff = diff.read(cx);
3113 diff.base_text_exists().then(|| diff.base_text(cx))
3114 });
3115
3116 let mode;
3117 let staged_text;
3118 let committed_text;
3119 if diff.base_text_exists() {
3120 let committed_snapshot = diff.base_text(cx);
3121 committed_text = Some(committed_snapshot.text());
3122 if let Some(index_text) = index_snapshot {
3123 if index_text.remote_id() == committed_snapshot.remote_id() {
3124 mode = Mode::IndexMatchesHead;
3125 staged_text = None;
3126 } else {
3127 mode = Mode::IndexAndHead;
3128 staged_text = Some(index_text.text());
3129 }
3130 } else {
3131 mode = Mode::IndexAndHead;
3132 staged_text = None;
3133 }
3134 } else {
3135 mode = Mode::IndexAndHead;
3136 committed_text = None;
3137 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3138 }
3139
3140 proto::OpenUncommittedDiffResponse {
3141 committed_text,
3142 staged_text,
3143 mode: mode.into(),
3144 }
3145 }))
3146 }
3147
3148 async fn handle_update_diff_bases(
3149 this: Entity<Self>,
3150 request: TypedEnvelope<proto::UpdateDiffBases>,
3151 mut cx: AsyncApp,
3152 ) -> Result<()> {
3153 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3154 this.update(&mut cx, |this, cx| {
3155 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3156 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3157 {
3158 let buffer = buffer.read(cx).text_snapshot();
3159 diff_state.update(cx, |diff_state, cx| {
3160 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3161 })
3162 }
3163 });
3164 Ok(())
3165 }
3166
3167 async fn handle_blame_buffer(
3168 this: Entity<Self>,
3169 envelope: TypedEnvelope<proto::BlameBuffer>,
3170 mut cx: AsyncApp,
3171 ) -> Result<proto::BlameBufferResponse> {
3172 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3173 let version = deserialize_version(&envelope.payload.version);
3174 let buffer = this.read_with(&cx, |this, cx| {
3175 this.buffer_store.read(cx).get_existing(buffer_id)
3176 })?;
3177 buffer
3178 .update(&mut cx, |buffer, _| {
3179 buffer.wait_for_version(version.clone())
3180 })
3181 .await?;
3182 let blame = this
3183 .update(&mut cx, |this, cx| {
3184 this.blame_buffer(&buffer, Some(version), cx)
3185 })
3186 .await?;
3187 Ok(serialize_blame_buffer_response(blame))
3188 }
3189
3190 async fn handle_get_permalink_to_line(
3191 this: Entity<Self>,
3192 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3193 mut cx: AsyncApp,
3194 ) -> Result<proto::GetPermalinkToLineResponse> {
3195 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3196 // let version = deserialize_version(&envelope.payload.version);
3197 let selection = {
3198 let proto_selection = envelope
3199 .payload
3200 .selection
3201 .context("no selection to get permalink for defined")?;
3202 proto_selection.start as u32..proto_selection.end as u32
3203 };
3204 let buffer = this.read_with(&cx, |this, cx| {
3205 this.buffer_store.read(cx).get_existing(buffer_id)
3206 })?;
3207 let permalink = this
3208 .update(&mut cx, |this, cx| {
3209 this.get_permalink_to_line(&buffer, selection, cx)
3210 })
3211 .await?;
3212 Ok(proto::GetPermalinkToLineResponse {
3213 permalink: permalink.to_string(),
3214 })
3215 }
3216
3217 fn repository_for_request(
3218 this: &Entity<Self>,
3219 id: RepositoryId,
3220 cx: &mut AsyncApp,
3221 ) -> Result<Entity<Repository>> {
3222 this.read_with(cx, |this, _| {
3223 this.repositories
3224 .get(&id)
3225 .context("missing repository handle")
3226 .cloned()
3227 })
3228 }
3229
3230 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3231 self.repositories
3232 .iter()
3233 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3234 .collect()
3235 }
3236
3237 fn process_updated_entries(
3238 &self,
3239 worktree: &Entity<Worktree>,
3240 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3241 cx: &mut App,
3242 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3243 let path_style = worktree.read(cx).path_style();
3244 let mut repo_paths = self
3245 .repositories
3246 .values()
3247 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3248 .collect::<Vec<_>>();
3249 let mut entries: Vec<_> = updated_entries
3250 .iter()
3251 .map(|(path, _, _)| path.clone())
3252 .collect();
3253 entries.sort();
3254 let worktree = worktree.read(cx);
3255
3256 let entries = entries
3257 .into_iter()
3258 .map(|path| worktree.absolutize(&path))
3259 .collect::<Arc<[_]>>();
3260
3261 let executor = cx.background_executor().clone();
3262 cx.background_executor().spawn(async move {
3263 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3264 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3265 let mut tasks = FuturesOrdered::new();
3266 for (repo_path, repo) in repo_paths.into_iter().rev() {
3267 let entries = entries.clone();
3268 let task = executor.spawn(async move {
3269 // Find all repository paths that belong to this repo
3270 let mut ix = entries.partition_point(|path| path < &*repo_path);
3271 if ix == entries.len() {
3272 return None;
3273 };
3274
3275 let mut paths = Vec::new();
3276 // All paths prefixed by a given repo will constitute a continuous range.
3277 while let Some(path) = entries.get(ix)
3278 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3279 &repo_path, path, path_style,
3280 )
3281 {
3282 paths.push((repo_path, ix));
3283 ix += 1;
3284 }
3285 if paths.is_empty() {
3286 None
3287 } else {
3288 Some((repo, paths))
3289 }
3290 });
3291 tasks.push_back(task);
3292 }
3293
3294 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3295 let mut path_was_used = vec![false; entries.len()];
3296 let tasks = tasks.collect::<Vec<_>>().await;
3297 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3298 // We always want to assign a path to it's innermost repository.
3299 for t in tasks {
3300 let Some((repo, paths)) = t else {
3301 continue;
3302 };
3303 let entry = paths_by_git_repo.entry(repo).or_default();
3304 for (repo_path, ix) in paths {
3305 if path_was_used[ix] {
3306 continue;
3307 }
3308 path_was_used[ix] = true;
3309 entry.push(repo_path);
3310 }
3311 }
3312
3313 paths_by_git_repo
3314 })
3315 }
3316}
3317
3318impl BufferGitState {
3319 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3320 Self {
3321 unstaged_diff: Default::default(),
3322 uncommitted_diff: Default::default(),
3323 oid_diffs: Default::default(),
3324 recalculate_diff_task: Default::default(),
3325 language: Default::default(),
3326 language_registry: Default::default(),
3327 recalculating_tx: postage::watch::channel_with(false).0,
3328 hunk_staging_operation_count: 0,
3329 hunk_staging_operation_count_as_of_write: 0,
3330 head_text: Default::default(),
3331 index_text: Default::default(),
3332 oid_texts: Default::default(),
3333 head_changed: Default::default(),
3334 index_changed: Default::default(),
3335 language_changed: Default::default(),
3336 conflict_updated_futures: Default::default(),
3337 conflict_set: Default::default(),
3338 reparse_conflict_markers_task: Default::default(),
3339 }
3340 }
3341
3342 #[ztracing::instrument(skip_all)]
3343 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3344 self.language = buffer.read(cx).language().cloned();
3345 self.language_changed = true;
3346 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3347 }
3348
3349 fn reparse_conflict_markers(
3350 &mut self,
3351 buffer: text::BufferSnapshot,
3352 cx: &mut Context<Self>,
3353 ) -> oneshot::Receiver<()> {
3354 let (tx, rx) = oneshot::channel();
3355
3356 let Some(conflict_set) = self
3357 .conflict_set
3358 .as_ref()
3359 .and_then(|conflict_set| conflict_set.upgrade())
3360 else {
3361 return rx;
3362 };
3363
3364 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3365 if conflict_set.has_conflict {
3366 Some(conflict_set.snapshot())
3367 } else {
3368 None
3369 }
3370 });
3371
3372 if let Some(old_snapshot) = old_snapshot {
3373 self.conflict_updated_futures.push(tx);
3374 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3375 let (snapshot, changed_range) = cx
3376 .background_spawn(async move {
3377 let new_snapshot = ConflictSet::parse(&buffer);
3378 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3379 (new_snapshot, changed_range)
3380 })
3381 .await;
3382 this.update(cx, |this, cx| {
3383 if let Some(conflict_set) = &this.conflict_set {
3384 conflict_set
3385 .update(cx, |conflict_set, cx| {
3386 conflict_set.set_snapshot(snapshot, changed_range, cx);
3387 })
3388 .ok();
3389 }
3390 let futures = std::mem::take(&mut this.conflict_updated_futures);
3391 for tx in futures {
3392 tx.send(()).ok();
3393 }
3394 })
3395 }))
3396 }
3397
3398 rx
3399 }
3400
3401 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3402 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3403 }
3404
3405 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3406 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3407 }
3408
3409 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3410 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3411 }
3412
3413 fn handle_base_texts_updated(
3414 &mut self,
3415 buffer: text::BufferSnapshot,
3416 message: proto::UpdateDiffBases,
3417 cx: &mut Context<Self>,
3418 ) {
3419 use proto::update_diff_bases::Mode;
3420
3421 let Some(mode) = Mode::from_i32(message.mode) else {
3422 return;
3423 };
3424
3425 let diff_bases_change = match mode {
3426 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3427 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3428 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3429 Mode::IndexAndHead => DiffBasesChange::SetEach {
3430 index: message.staged_text,
3431 head: message.committed_text,
3432 },
3433 };
3434
3435 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3436 }
3437
3438 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3439 if *self.recalculating_tx.borrow() {
3440 let mut rx = self.recalculating_tx.subscribe();
3441 Some(async move {
3442 loop {
3443 let is_recalculating = rx.recv().await;
3444 if is_recalculating != Some(true) {
3445 break;
3446 }
3447 }
3448 })
3449 } else {
3450 None
3451 }
3452 }
3453
3454 fn diff_bases_changed(
3455 &mut self,
3456 buffer: text::BufferSnapshot,
3457 diff_bases_change: Option<DiffBasesChange>,
3458 cx: &mut Context<Self>,
3459 ) {
3460 match diff_bases_change {
3461 Some(DiffBasesChange::SetIndex(index)) => {
3462 self.index_text = index.map(|mut index| {
3463 text::LineEnding::normalize(&mut index);
3464 Arc::from(index.as_str())
3465 });
3466 self.index_changed = true;
3467 }
3468 Some(DiffBasesChange::SetHead(head)) => {
3469 self.head_text = head.map(|mut head| {
3470 text::LineEnding::normalize(&mut head);
3471 Arc::from(head.as_str())
3472 });
3473 self.head_changed = true;
3474 }
3475 Some(DiffBasesChange::SetBoth(text)) => {
3476 let text = text.map(|mut text| {
3477 text::LineEnding::normalize(&mut text);
3478 Arc::from(text.as_str())
3479 });
3480 self.head_text = text.clone();
3481 self.index_text = text;
3482 self.head_changed = true;
3483 self.index_changed = true;
3484 }
3485 Some(DiffBasesChange::SetEach { index, head }) => {
3486 self.index_text = index.map(|mut index| {
3487 text::LineEnding::normalize(&mut index);
3488 Arc::from(index.as_str())
3489 });
3490 self.index_changed = true;
3491 self.head_text = head.map(|mut head| {
3492 text::LineEnding::normalize(&mut head);
3493 Arc::from(head.as_str())
3494 });
3495 self.head_changed = true;
3496 }
3497 None => {}
3498 }
3499
3500 self.recalculate_diffs(buffer, cx)
3501 }
3502
3503 #[ztracing::instrument(skip_all)]
3504 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3505 *self.recalculating_tx.borrow_mut() = true;
3506
3507 let language = self.language.clone();
3508 let language_registry = self.language_registry.clone();
3509 let unstaged_diff = self.unstaged_diff();
3510 let uncommitted_diff = self.uncommitted_diff();
3511 let head = self.head_text.clone();
3512 let index = self.index_text.clone();
3513 let index_changed = self.index_changed;
3514 let head_changed = self.head_changed;
3515 let language_changed = self.language_changed;
3516 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3517 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3518 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3519 (None, None) => true,
3520 _ => false,
3521 };
3522
3523 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3524 .oid_diffs
3525 .iter()
3526 .filter_map(|(oid, weak)| {
3527 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3528 weak.upgrade().map(|diff| (*oid, diff, base_text))
3529 })
3530 .collect();
3531
3532 self.oid_diffs.retain(|oid, weak| {
3533 let alive = weak.upgrade().is_some();
3534 if !alive {
3535 if let Some(oid) = oid {
3536 self.oid_texts.remove(oid);
3537 }
3538 }
3539 alive
3540 });
3541 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3542 log::debug!(
3543 "start recalculating diffs for buffer {}",
3544 buffer.remote_id()
3545 );
3546
3547 let mut new_unstaged_diff = None;
3548 if let Some(unstaged_diff) = &unstaged_diff {
3549 new_unstaged_diff = Some(
3550 cx.update(|cx| {
3551 unstaged_diff.read(cx).update_diff(
3552 buffer.clone(),
3553 index,
3554 index_changed.then_some(false),
3555 language.clone(),
3556 cx,
3557 )
3558 })
3559 .await,
3560 );
3561 }
3562
3563 // Dropping BufferDiff can be expensive, so yield back to the event loop
3564 // for a bit
3565 yield_now().await;
3566
3567 let mut new_uncommitted_diff = None;
3568 if let Some(uncommitted_diff) = &uncommitted_diff {
3569 new_uncommitted_diff = if index_matches_head {
3570 new_unstaged_diff.clone()
3571 } else {
3572 Some(
3573 cx.update(|cx| {
3574 uncommitted_diff.read(cx).update_diff(
3575 buffer.clone(),
3576 head,
3577 head_changed.then_some(true),
3578 language.clone(),
3579 cx,
3580 )
3581 })
3582 .await,
3583 )
3584 }
3585 }
3586
3587 // Dropping BufferDiff can be expensive, so yield back to the event loop
3588 // for a bit
3589 yield_now().await;
3590
3591 let cancel = this.update(cx, |this, _| {
3592 // This checks whether all pending stage/unstage operations
3593 // have quiesced (i.e. both the corresponding write and the
3594 // read of that write have completed). If not, then we cancel
3595 // this recalculation attempt to avoid invalidating pending
3596 // state too quickly; another recalculation will come along
3597 // later and clear the pending state once the state of the index has settled.
3598 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3599 *this.recalculating_tx.borrow_mut() = false;
3600 true
3601 } else {
3602 false
3603 }
3604 })?;
3605 if cancel {
3606 log::debug!(
3607 concat!(
3608 "aborting recalculating diffs for buffer {}",
3609 "due to subsequent hunk operations",
3610 ),
3611 buffer.remote_id()
3612 );
3613 return Ok(());
3614 }
3615
3616 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3617 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3618 {
3619 let task = unstaged_diff.update(cx, |diff, cx| {
3620 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3621 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3622 // view multibuffers.
3623 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3624 });
3625 Some(task.await)
3626 } else {
3627 None
3628 };
3629
3630 yield_now().await;
3631
3632 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3633 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3634 {
3635 uncommitted_diff
3636 .update(cx, |diff, cx| {
3637 if language_changed {
3638 diff.language_changed(language.clone(), language_registry, cx);
3639 }
3640 diff.set_snapshot_with_secondary(
3641 new_uncommitted_diff,
3642 &buffer,
3643 unstaged_changed_range.flatten(),
3644 true,
3645 cx,
3646 )
3647 })
3648 .await;
3649 }
3650
3651 yield_now().await;
3652
3653 for (oid, oid_diff, base_text) in oid_diffs {
3654 let new_oid_diff = cx
3655 .update(|cx| {
3656 oid_diff.read(cx).update_diff(
3657 buffer.clone(),
3658 base_text,
3659 None,
3660 language.clone(),
3661 cx,
3662 )
3663 })
3664 .await;
3665
3666 oid_diff
3667 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3668 .await;
3669
3670 log::debug!(
3671 "finished recalculating oid diff for buffer {} oid {:?}",
3672 buffer.remote_id(),
3673 oid
3674 );
3675
3676 yield_now().await;
3677 }
3678
3679 log::debug!(
3680 "finished recalculating diffs for buffer {}",
3681 buffer.remote_id()
3682 );
3683
3684 if let Some(this) = this.upgrade() {
3685 this.update(cx, |this, _| {
3686 this.index_changed = false;
3687 this.head_changed = false;
3688 this.language_changed = false;
3689 *this.recalculating_tx.borrow_mut() = false;
3690 });
3691 }
3692
3693 Ok(())
3694 }));
3695 }
3696}
3697
3698fn make_remote_delegate(
3699 this: Entity<GitStore>,
3700 project_id: u64,
3701 repository_id: RepositoryId,
3702 askpass_id: u64,
3703 cx: &mut AsyncApp,
3704) -> AskPassDelegate {
3705 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3706 this.update(cx, |this, cx| {
3707 let Some((client, _)) = this.downstream_client() else {
3708 return;
3709 };
3710 let response = client.request(proto::AskPassRequest {
3711 project_id,
3712 repository_id: repository_id.to_proto(),
3713 askpass_id,
3714 prompt,
3715 });
3716 cx.spawn(async move |_, _| {
3717 let mut response = response.await?.response;
3718 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3719 .ok();
3720 response.zeroize();
3721 anyhow::Ok(())
3722 })
3723 .detach_and_log_err(cx);
3724 });
3725 })
3726}
3727
3728impl RepositoryId {
3729 pub fn to_proto(self) -> u64 {
3730 self.0
3731 }
3732
3733 pub fn from_proto(id: u64) -> Self {
3734 RepositoryId(id)
3735 }
3736}
3737
3738impl RepositorySnapshot {
3739 fn empty(
3740 id: RepositoryId,
3741 work_directory_abs_path: Arc<Path>,
3742 original_repo_abs_path: Option<Arc<Path>>,
3743 path_style: PathStyle,
3744 ) -> Self {
3745 Self {
3746 id,
3747 statuses_by_path: Default::default(),
3748 original_repo_abs_path: original_repo_abs_path
3749 .unwrap_or_else(|| work_directory_abs_path.clone()),
3750 work_directory_abs_path,
3751 branch: None,
3752 branch_list: Arc::from([]),
3753 head_commit: None,
3754 scan_id: 0,
3755 merge: Default::default(),
3756 remote_origin_url: None,
3757 remote_upstream_url: None,
3758 stash_entries: Default::default(),
3759 linked_worktrees: Arc::from([]),
3760 path_style,
3761 }
3762 }
3763
3764 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3765 proto::UpdateRepository {
3766 branch_summary: self.branch.as_ref().map(branch_to_proto),
3767 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3768 updated_statuses: self
3769 .statuses_by_path
3770 .iter()
3771 .map(|entry| entry.to_proto())
3772 .collect(),
3773 removed_statuses: Default::default(),
3774 current_merge_conflicts: self
3775 .merge
3776 .merge_heads_by_conflicted_path
3777 .iter()
3778 .map(|(repo_path, _)| repo_path.to_proto())
3779 .collect(),
3780 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3781 project_id,
3782 id: self.id.to_proto(),
3783 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3784 entry_ids: vec![self.id.to_proto()],
3785 scan_id: self.scan_id,
3786 is_last_update: true,
3787 stash_entries: self
3788 .stash_entries
3789 .entries
3790 .iter()
3791 .map(stash_to_proto)
3792 .collect(),
3793 remote_upstream_url: self.remote_upstream_url.clone(),
3794 remote_origin_url: self.remote_origin_url.clone(),
3795 original_repo_abs_path: Some(
3796 self.original_repo_abs_path.to_string_lossy().into_owned(),
3797 ),
3798 linked_worktrees: self
3799 .linked_worktrees
3800 .iter()
3801 .map(worktree_to_proto)
3802 .collect(),
3803 }
3804 }
3805
3806 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3807 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3808 let mut removed_statuses: Vec<String> = Vec::new();
3809
3810 let mut new_statuses = self.statuses_by_path.iter().peekable();
3811 let mut old_statuses = old.statuses_by_path.iter().peekable();
3812
3813 let mut current_new_entry = new_statuses.next();
3814 let mut current_old_entry = old_statuses.next();
3815 loop {
3816 match (current_new_entry, current_old_entry) {
3817 (Some(new_entry), Some(old_entry)) => {
3818 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3819 Ordering::Less => {
3820 updated_statuses.push(new_entry.to_proto());
3821 current_new_entry = new_statuses.next();
3822 }
3823 Ordering::Equal => {
3824 if new_entry.status != old_entry.status
3825 || new_entry.diff_stat != old_entry.diff_stat
3826 {
3827 updated_statuses.push(new_entry.to_proto());
3828 }
3829 current_old_entry = old_statuses.next();
3830 current_new_entry = new_statuses.next();
3831 }
3832 Ordering::Greater => {
3833 removed_statuses.push(old_entry.repo_path.to_proto());
3834 current_old_entry = old_statuses.next();
3835 }
3836 }
3837 }
3838 (None, Some(old_entry)) => {
3839 removed_statuses.push(old_entry.repo_path.to_proto());
3840 current_old_entry = old_statuses.next();
3841 }
3842 (Some(new_entry), None) => {
3843 updated_statuses.push(new_entry.to_proto());
3844 current_new_entry = new_statuses.next();
3845 }
3846 (None, None) => break,
3847 }
3848 }
3849
3850 proto::UpdateRepository {
3851 branch_summary: self.branch.as_ref().map(branch_to_proto),
3852 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3853 updated_statuses,
3854 removed_statuses,
3855 current_merge_conflicts: self
3856 .merge
3857 .merge_heads_by_conflicted_path
3858 .iter()
3859 .map(|(path, _)| path.to_proto())
3860 .collect(),
3861 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3862 project_id,
3863 id: self.id.to_proto(),
3864 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3865 entry_ids: vec![],
3866 scan_id: self.scan_id,
3867 is_last_update: true,
3868 stash_entries: self
3869 .stash_entries
3870 .entries
3871 .iter()
3872 .map(stash_to_proto)
3873 .collect(),
3874 remote_upstream_url: self.remote_upstream_url.clone(),
3875 remote_origin_url: self.remote_origin_url.clone(),
3876 original_repo_abs_path: Some(
3877 self.original_repo_abs_path.to_string_lossy().into_owned(),
3878 ),
3879 linked_worktrees: self
3880 .linked_worktrees
3881 .iter()
3882 .map(worktree_to_proto)
3883 .collect(),
3884 }
3885 }
3886
3887 /// The main worktree is the original checkout that other worktrees were
3888 /// created from.
3889 ///
3890 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3891 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3892 ///
3893 /// Submodules also return `true` here, since they are not linked worktrees.
3894 pub fn is_main_worktree(&self) -> bool {
3895 self.work_directory_abs_path == self.original_repo_abs_path
3896 }
3897
3898 /// Returns true if this repository is a linked worktree, that is, one that
3899 /// was created from another worktree.
3900 ///
3901 /// Returns `false` for both the main worktree and submodules.
3902 pub fn is_linked_worktree(&self) -> bool {
3903 !self.is_main_worktree()
3904 }
3905
3906 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3907 &self.linked_worktrees
3908 }
3909
3910 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3911 self.statuses_by_path.iter().cloned()
3912 }
3913
3914 pub fn status_summary(&self) -> GitSummary {
3915 self.statuses_by_path.summary().item_summary
3916 }
3917
3918 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3919 self.statuses_by_path
3920 .get(&PathKey(path.as_ref().clone()), ())
3921 .cloned()
3922 }
3923
3924 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3925 self.statuses_by_path
3926 .get(&PathKey(path.as_ref().clone()), ())
3927 .and_then(|entry| entry.diff_stat)
3928 }
3929
3930 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3931 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3932 }
3933
3934 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3935 self.path_style
3936 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3937 .unwrap()
3938 .into()
3939 }
3940
3941 #[inline]
3942 fn abs_path_to_repo_path_inner(
3943 work_directory_abs_path: &Path,
3944 abs_path: &Path,
3945 path_style: PathStyle,
3946 ) -> Option<RepoPath> {
3947 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3948 Some(RepoPath::from_rel_path(&rel_path))
3949 }
3950
3951 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3952 self.merge
3953 .merge_heads_by_conflicted_path
3954 .contains_key(repo_path)
3955 }
3956
3957 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3958 let had_conflict_on_last_merge_head_change = self
3959 .merge
3960 .merge_heads_by_conflicted_path
3961 .contains_key(repo_path);
3962 let has_conflict_currently = self
3963 .status_for_path(repo_path)
3964 .is_some_and(|entry| entry.status.is_conflicted());
3965 had_conflict_on_last_merge_head_change || has_conflict_currently
3966 }
3967
3968 /// This is the name that will be displayed in the repository selector for this repository.
3969 pub fn display_name(&self) -> SharedString {
3970 self.work_directory_abs_path
3971 .file_name()
3972 .unwrap_or_default()
3973 .to_string_lossy()
3974 .to_string()
3975 .into()
3976 }
3977}
3978
3979pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3980 proto::StashEntry {
3981 oid: entry.oid.as_bytes().to_vec(),
3982 message: entry.message.clone(),
3983 branch: entry.branch.clone(),
3984 index: entry.index as u64,
3985 timestamp: entry.timestamp,
3986 }
3987}
3988
3989pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3990 Ok(StashEntry {
3991 oid: Oid::from_bytes(&entry.oid)?,
3992 message: entry.message.clone(),
3993 index: entry.index as usize,
3994 branch: entry.branch.clone(),
3995 timestamp: entry.timestamp,
3996 })
3997}
3998
3999impl MergeDetails {
4000 async fn update(
4001 &mut self,
4002 backend: &Arc<dyn GitRepository>,
4003 current_conflicted_paths: Vec<RepoPath>,
4004 ) -> Result<bool> {
4005 log::debug!("load merge details");
4006 self.message = backend.merge_message().await.map(SharedString::from);
4007 let heads = backend
4008 .revparse_batch(vec![
4009 "MERGE_HEAD".into(),
4010 "CHERRY_PICK_HEAD".into(),
4011 "REBASE_HEAD".into(),
4012 "REVERT_HEAD".into(),
4013 "APPLY_HEAD".into(),
4014 ])
4015 .await
4016 .log_err()
4017 .unwrap_or_default()
4018 .into_iter()
4019 .map(|opt| opt.map(SharedString::from))
4020 .collect::<Vec<_>>();
4021
4022 let mut conflicts_changed = false;
4023
4024 // Record the merge state for newly conflicted paths
4025 for path in ¤t_conflicted_paths {
4026 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
4027 conflicts_changed = true;
4028 self.merge_heads_by_conflicted_path
4029 .insert(path.clone(), heads.clone());
4030 }
4031 }
4032
4033 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4034 self.merge_heads_by_conflicted_path
4035 .retain(|path, old_merge_heads| {
4036 let keep = current_conflicted_paths.contains(path)
4037 || (old_merge_heads == &heads
4038 && old_merge_heads.iter().any(|head| head.is_some()));
4039 if !keep {
4040 conflicts_changed = true;
4041 }
4042 keep
4043 });
4044
4045 Ok(conflicts_changed)
4046 }
4047}
4048
4049impl Repository {
4050 pub fn is_trusted(&self) -> bool {
4051 match self.repository_state.peek() {
4052 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4053 _ => false,
4054 }
4055 }
4056
4057 pub fn snapshot(&self) -> RepositorySnapshot {
4058 self.snapshot.clone()
4059 }
4060
4061 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4062 self.pending_ops.iter().cloned()
4063 }
4064
4065 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4066 self.pending_ops.summary().clone()
4067 }
4068
4069 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4070 self.pending_ops
4071 .get(&PathKey(path.as_ref().clone()), ())
4072 .cloned()
4073 }
4074
4075 fn local(
4076 id: RepositoryId,
4077 work_directory_abs_path: Arc<Path>,
4078 original_repo_abs_path: Arc<Path>,
4079 dot_git_abs_path: Arc<Path>,
4080 project_environment: WeakEntity<ProjectEnvironment>,
4081 fs: Arc<dyn Fs>,
4082 is_trusted: bool,
4083 git_store: WeakEntity<GitStore>,
4084 cx: &mut Context<Self>,
4085 ) -> Self {
4086 let snapshot = RepositorySnapshot::empty(
4087 id,
4088 work_directory_abs_path.clone(),
4089 Some(original_repo_abs_path),
4090 PathStyle::local(),
4091 );
4092 let state = cx
4093 .spawn(async move |_, cx| {
4094 LocalRepositoryState::new(
4095 work_directory_abs_path,
4096 dot_git_abs_path,
4097 project_environment,
4098 fs,
4099 is_trusted,
4100 cx,
4101 )
4102 .await
4103 .map_err(|err| err.to_string())
4104 })
4105 .shared();
4106 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4107 let state = cx
4108 .spawn(async move |_, _| {
4109 let state = state.await?;
4110 Ok(RepositoryState::Local(state))
4111 })
4112 .shared();
4113
4114 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4115 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4116 if this.scan_id > 1 {
4117 this.initial_graph_data.clear();
4118 }
4119 }
4120 RepositoryEvent::StashEntriesChanged => {
4121 if this.scan_id > 1 {
4122 this.initial_graph_data
4123 .retain(|(log_source, _), _| *log_source != LogSource::All);
4124 }
4125 }
4126 _ => {}
4127 })
4128 .detach();
4129
4130 Repository {
4131 this: cx.weak_entity(),
4132 git_store,
4133 snapshot,
4134 pending_ops: Default::default(),
4135 repository_state: state,
4136 commit_message_buffer: None,
4137 askpass_delegates: Default::default(),
4138 paths_needing_status_update: Default::default(),
4139 latest_askpass_id: 0,
4140 job_sender,
4141 job_id: 0,
4142 active_jobs: Default::default(),
4143 initial_graph_data: Default::default(),
4144 commit_data: Default::default(),
4145 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4146 }
4147 }
4148
4149 fn remote(
4150 id: RepositoryId,
4151 work_directory_abs_path: Arc<Path>,
4152 original_repo_abs_path: Option<Arc<Path>>,
4153 path_style: PathStyle,
4154 project_id: ProjectId,
4155 client: AnyProtoClient,
4156 git_store: WeakEntity<GitStore>,
4157 cx: &mut Context<Self>,
4158 ) -> Self {
4159 let snapshot = RepositorySnapshot::empty(
4160 id,
4161 work_directory_abs_path,
4162 original_repo_abs_path,
4163 path_style,
4164 );
4165 let repository_state = RemoteRepositoryState { project_id, client };
4166 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4167 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4168 Self {
4169 this: cx.weak_entity(),
4170 snapshot,
4171 commit_message_buffer: None,
4172 git_store,
4173 pending_ops: Default::default(),
4174 paths_needing_status_update: Default::default(),
4175 job_sender,
4176 repository_state,
4177 askpass_delegates: Default::default(),
4178 latest_askpass_id: 0,
4179 active_jobs: Default::default(),
4180 job_id: 0,
4181 initial_graph_data: Default::default(),
4182 commit_data: Default::default(),
4183 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4184 }
4185 }
4186
4187 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4188 self.git_store.upgrade()
4189 }
4190
4191 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4192 let this = cx.weak_entity();
4193 let git_store = self.git_store.clone();
4194 let _ = self.send_keyed_job(
4195 Some(GitJobKey::ReloadBufferDiffBases),
4196 None,
4197 |state, mut cx| async move {
4198 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4199 log::error!("tried to recompute diffs for a non-local repository");
4200 return Ok(());
4201 };
4202
4203 let Some(this) = this.upgrade() else {
4204 return Ok(());
4205 };
4206
4207 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4208 git_store.update(cx, |git_store, cx| {
4209 git_store
4210 .diffs
4211 .iter()
4212 .filter_map(|(buffer_id, diff_state)| {
4213 let buffer_store = git_store.buffer_store.read(cx);
4214 let buffer = buffer_store.get(*buffer_id)?;
4215 let file = File::from_dyn(buffer.read(cx).file())?;
4216 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4217 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4218 log::debug!(
4219 "start reload diff bases for repo path {}",
4220 repo_path.as_unix_str()
4221 );
4222 diff_state.update(cx, |diff_state, _| {
4223 let has_unstaged_diff = diff_state
4224 .unstaged_diff
4225 .as_ref()
4226 .is_some_and(|diff| diff.is_upgradable());
4227 let has_uncommitted_diff = diff_state
4228 .uncommitted_diff
4229 .as_ref()
4230 .is_some_and(|set| set.is_upgradable());
4231
4232 Some((
4233 buffer,
4234 repo_path,
4235 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4236 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4237 ))
4238 })
4239 })
4240 .collect::<Vec<_>>()
4241 })
4242 })?;
4243
4244 let buffer_diff_base_changes = cx
4245 .background_spawn(async move {
4246 let mut changes = Vec::new();
4247 for (buffer, repo_path, current_index_text, current_head_text) in
4248 &repo_diff_state_updates
4249 {
4250 let index_text = if current_index_text.is_some() {
4251 backend.load_index_text(repo_path.clone()).await
4252 } else {
4253 None
4254 };
4255 let head_text = if current_head_text.is_some() {
4256 backend.load_committed_text(repo_path.clone()).await
4257 } else {
4258 None
4259 };
4260
4261 let change =
4262 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4263 (Some(current_index), Some(current_head)) => {
4264 let index_changed =
4265 index_text.as_deref() != current_index.as_deref();
4266 let head_changed =
4267 head_text.as_deref() != current_head.as_deref();
4268 if index_changed && head_changed {
4269 if index_text == head_text {
4270 Some(DiffBasesChange::SetBoth(head_text))
4271 } else {
4272 Some(DiffBasesChange::SetEach {
4273 index: index_text,
4274 head: head_text,
4275 })
4276 }
4277 } else if index_changed {
4278 Some(DiffBasesChange::SetIndex(index_text))
4279 } else if head_changed {
4280 Some(DiffBasesChange::SetHead(head_text))
4281 } else {
4282 None
4283 }
4284 }
4285 (Some(current_index), None) => {
4286 let index_changed =
4287 index_text.as_deref() != current_index.as_deref();
4288 index_changed
4289 .then_some(DiffBasesChange::SetIndex(index_text))
4290 }
4291 (None, Some(current_head)) => {
4292 let head_changed =
4293 head_text.as_deref() != current_head.as_deref();
4294 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4295 }
4296 (None, None) => None,
4297 };
4298
4299 changes.push((buffer.clone(), change))
4300 }
4301 changes
4302 })
4303 .await;
4304
4305 git_store.update(&mut cx, |git_store, cx| {
4306 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4307 let buffer_snapshot = buffer.read(cx).text_snapshot();
4308 let buffer_id = buffer_snapshot.remote_id();
4309 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4310 continue;
4311 };
4312
4313 let downstream_client = git_store.downstream_client();
4314 diff_state.update(cx, |diff_state, cx| {
4315 use proto::update_diff_bases::Mode;
4316
4317 if let Some((diff_bases_change, (client, project_id))) =
4318 diff_bases_change.clone().zip(downstream_client)
4319 {
4320 let (staged_text, committed_text, mode) = match diff_bases_change {
4321 DiffBasesChange::SetIndex(index) => {
4322 (index, None, Mode::IndexOnly)
4323 }
4324 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4325 DiffBasesChange::SetEach { index, head } => {
4326 (index, head, Mode::IndexAndHead)
4327 }
4328 DiffBasesChange::SetBoth(text) => {
4329 (None, text, Mode::IndexMatchesHead)
4330 }
4331 };
4332 client
4333 .send(proto::UpdateDiffBases {
4334 project_id: project_id.to_proto(),
4335 buffer_id: buffer_id.to_proto(),
4336 staged_text,
4337 committed_text,
4338 mode: mode as i32,
4339 })
4340 .log_err();
4341 }
4342
4343 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4344 });
4345 }
4346 })
4347 },
4348 );
4349 }
4350
4351 pub fn send_job<F, Fut, R>(
4352 &mut self,
4353 status: Option<SharedString>,
4354 job: F,
4355 ) -> oneshot::Receiver<R>
4356 where
4357 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4358 Fut: Future<Output = R> + 'static,
4359 R: Send + 'static,
4360 {
4361 self.send_keyed_job(None, status, job)
4362 }
4363
4364 fn send_keyed_job<F, Fut, R>(
4365 &mut self,
4366 key: Option<GitJobKey>,
4367 status: Option<SharedString>,
4368 job: F,
4369 ) -> oneshot::Receiver<R>
4370 where
4371 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4372 Fut: Future<Output = R> + 'static,
4373 R: Send + 'static,
4374 {
4375 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4376 let job_id = post_inc(&mut self.job_id);
4377 let this = self.this.clone();
4378 self.job_sender
4379 .unbounded_send(GitJob {
4380 key,
4381 job: Box::new(move |state, cx: &mut AsyncApp| {
4382 let job = job(state, cx.clone());
4383 cx.spawn(async move |cx| {
4384 if let Some(s) = status.clone() {
4385 this.update(cx, |this, cx| {
4386 this.active_jobs.insert(
4387 job_id,
4388 JobInfo {
4389 start: Instant::now(),
4390 message: s.clone(),
4391 },
4392 );
4393
4394 cx.notify();
4395 })
4396 .ok();
4397 }
4398 let result = job.await;
4399
4400 this.update(cx, |this, cx| {
4401 this.active_jobs.remove(&job_id);
4402 cx.notify();
4403 })
4404 .ok();
4405
4406 result_tx.send(result).ok();
4407 })
4408 }),
4409 })
4410 .ok();
4411 result_rx
4412 }
4413
4414 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4415 let Some(git_store) = self.git_store.upgrade() else {
4416 return;
4417 };
4418 let entity = cx.entity();
4419 git_store.update(cx, |git_store, cx| {
4420 let Some((&id, _)) = git_store
4421 .repositories
4422 .iter()
4423 .find(|(_, handle)| *handle == &entity)
4424 else {
4425 return;
4426 };
4427 git_store.active_repo_id = Some(id);
4428 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4429 });
4430 }
4431
4432 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4433 self.snapshot.status()
4434 }
4435
4436 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4437 self.snapshot.diff_stat_for_path(path)
4438 }
4439
4440 pub fn cached_stash(&self) -> GitStash {
4441 self.snapshot.stash_entries.clone()
4442 }
4443
4444 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4445 let git_store = self.git_store.upgrade()?;
4446 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4447 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4448 let abs_path = SanitizedPath::new(&abs_path);
4449 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4450 Some(ProjectPath {
4451 worktree_id: worktree.read(cx).id(),
4452 path: relative_path,
4453 })
4454 }
4455
4456 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4457 let git_store = self.git_store.upgrade()?;
4458 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4459 let abs_path = worktree_store.absolutize(path, cx)?;
4460 self.snapshot.abs_path_to_repo_path(&abs_path)
4461 }
4462
4463 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4464 other
4465 .read(cx)
4466 .snapshot
4467 .work_directory_abs_path
4468 .starts_with(&self.snapshot.work_directory_abs_path)
4469 }
4470
4471 pub fn open_commit_buffer(
4472 &mut self,
4473 languages: Option<Arc<LanguageRegistry>>,
4474 buffer_store: Entity<BufferStore>,
4475 cx: &mut Context<Self>,
4476 ) -> Task<Result<Entity<Buffer>>> {
4477 let id = self.id;
4478 if let Some(buffer) = self.commit_message_buffer.clone() {
4479 return Task::ready(Ok(buffer));
4480 }
4481 let this = cx.weak_entity();
4482
4483 let rx = self.send_job(None, move |state, mut cx| async move {
4484 let Some(this) = this.upgrade() else {
4485 bail!("git store was dropped");
4486 };
4487 match state {
4488 RepositoryState::Local(..) => {
4489 this.update(&mut cx, |_, cx| {
4490 Self::open_local_commit_buffer(languages, buffer_store, cx)
4491 })
4492 .await
4493 }
4494 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4495 let request = client.request(proto::OpenCommitMessageBuffer {
4496 project_id: project_id.0,
4497 repository_id: id.to_proto(),
4498 });
4499 let response = request.await.context("requesting to open commit buffer")?;
4500 let buffer_id = BufferId::new(response.buffer_id)?;
4501 let buffer = buffer_store
4502 .update(&mut cx, |buffer_store, cx| {
4503 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4504 })
4505 .await?;
4506 if let Some(language_registry) = languages {
4507 let git_commit_language =
4508 language_registry.language_for_name("Git Commit").await?;
4509 buffer.update(&mut cx, |buffer, cx| {
4510 buffer.set_language(Some(git_commit_language), cx);
4511 });
4512 }
4513 this.update(&mut cx, |this, _| {
4514 this.commit_message_buffer = Some(buffer.clone());
4515 });
4516 Ok(buffer)
4517 }
4518 }
4519 });
4520
4521 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4522 }
4523
4524 fn open_local_commit_buffer(
4525 language_registry: Option<Arc<LanguageRegistry>>,
4526 buffer_store: Entity<BufferStore>,
4527 cx: &mut Context<Self>,
4528 ) -> Task<Result<Entity<Buffer>>> {
4529 cx.spawn(async move |repository, cx| {
4530 let git_commit_language = match language_registry {
4531 Some(language_registry) => {
4532 Some(language_registry.language_for_name("Git Commit").await?)
4533 }
4534 None => None,
4535 };
4536 let buffer = buffer_store
4537 .update(cx, |buffer_store, cx| {
4538 buffer_store.create_buffer(git_commit_language, false, cx)
4539 })
4540 .await?;
4541
4542 repository.update(cx, |repository, _| {
4543 repository.commit_message_buffer = Some(buffer.clone());
4544 })?;
4545 Ok(buffer)
4546 })
4547 }
4548
4549 pub fn checkout_files(
4550 &mut self,
4551 commit: &str,
4552 paths: Vec<RepoPath>,
4553 cx: &mut Context<Self>,
4554 ) -> Task<Result<()>> {
4555 let commit = commit.to_string();
4556 let id = self.id;
4557
4558 self.spawn_job_with_tracking(
4559 paths.clone(),
4560 pending_op::GitStatus::Reverted,
4561 cx,
4562 async move |this, cx| {
4563 this.update(cx, |this, _cx| {
4564 this.send_job(
4565 Some(format!("git checkout {}", commit).into()),
4566 move |git_repo, _| async move {
4567 match git_repo {
4568 RepositoryState::Local(LocalRepositoryState {
4569 backend,
4570 environment,
4571 ..
4572 }) => {
4573 backend
4574 .checkout_files(commit, paths, environment.clone())
4575 .await
4576 }
4577 RepositoryState::Remote(RemoteRepositoryState {
4578 project_id,
4579 client,
4580 }) => {
4581 client
4582 .request(proto::GitCheckoutFiles {
4583 project_id: project_id.0,
4584 repository_id: id.to_proto(),
4585 commit,
4586 paths: paths
4587 .into_iter()
4588 .map(|p| p.to_proto())
4589 .collect(),
4590 })
4591 .await?;
4592
4593 Ok(())
4594 }
4595 }
4596 },
4597 )
4598 })?
4599 .await?
4600 },
4601 )
4602 }
4603
4604 pub fn reset(
4605 &mut self,
4606 commit: String,
4607 reset_mode: ResetMode,
4608 _cx: &mut App,
4609 ) -> oneshot::Receiver<Result<()>> {
4610 let id = self.id;
4611
4612 self.send_job(None, move |git_repo, _| async move {
4613 match git_repo {
4614 RepositoryState::Local(LocalRepositoryState {
4615 backend,
4616 environment,
4617 ..
4618 }) => backend.reset(commit, reset_mode, environment).await,
4619 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4620 client
4621 .request(proto::GitReset {
4622 project_id: project_id.0,
4623 repository_id: id.to_proto(),
4624 commit,
4625 mode: match reset_mode {
4626 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4627 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4628 },
4629 })
4630 .await?;
4631
4632 Ok(())
4633 }
4634 }
4635 })
4636 }
4637
4638 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4639 let id = self.id;
4640 self.send_job(None, move |git_repo, _cx| async move {
4641 match git_repo {
4642 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4643 backend.show(commit).await
4644 }
4645 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4646 let resp = client
4647 .request(proto::GitShow {
4648 project_id: project_id.0,
4649 repository_id: id.to_proto(),
4650 commit,
4651 })
4652 .await?;
4653
4654 Ok(CommitDetails {
4655 sha: resp.sha.into(),
4656 message: resp.message.into(),
4657 commit_timestamp: resp.commit_timestamp,
4658 author_email: resp.author_email.into(),
4659 author_name: resp.author_name.into(),
4660 })
4661 }
4662 }
4663 })
4664 }
4665
4666 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4667 let id = self.id;
4668 self.send_job(None, move |git_repo, cx| async move {
4669 match git_repo {
4670 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4671 backend.load_commit(commit, cx).await
4672 }
4673 RepositoryState::Remote(RemoteRepositoryState {
4674 client, project_id, ..
4675 }) => {
4676 let response = client
4677 .request(proto::LoadCommitDiff {
4678 project_id: project_id.0,
4679 repository_id: id.to_proto(),
4680 commit,
4681 })
4682 .await?;
4683 Ok(CommitDiff {
4684 files: response
4685 .files
4686 .into_iter()
4687 .map(|file| {
4688 Ok(CommitFile {
4689 path: RepoPath::from_proto(&file.path)?,
4690 old_text: file.old_text,
4691 new_text: file.new_text,
4692 is_binary: file.is_binary,
4693 })
4694 })
4695 .collect::<Result<Vec<_>>>()?,
4696 })
4697 }
4698 }
4699 })
4700 }
4701
4702 pub fn file_history(
4703 &mut self,
4704 path: RepoPath,
4705 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4706 self.file_history_paginated(path, 0, None)
4707 }
4708
4709 pub fn file_history_paginated(
4710 &mut self,
4711 path: RepoPath,
4712 skip: usize,
4713 limit: Option<usize>,
4714 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4715 let id = self.id;
4716 self.send_job(None, move |git_repo, _cx| async move {
4717 match git_repo {
4718 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4719 backend.file_history_paginated(path, skip, limit).await
4720 }
4721 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4722 let response = client
4723 .request(proto::GitFileHistory {
4724 project_id: project_id.0,
4725 repository_id: id.to_proto(),
4726 path: path.to_proto(),
4727 skip: skip as u64,
4728 limit: limit.map(|l| l as u64),
4729 })
4730 .await?;
4731 Ok(git::repository::FileHistory {
4732 entries: response
4733 .entries
4734 .into_iter()
4735 .map(|entry| git::repository::FileHistoryEntry {
4736 sha: entry.sha.into(),
4737 subject: entry.subject.into(),
4738 message: entry.message.into(),
4739 commit_timestamp: entry.commit_timestamp,
4740 author_name: entry.author_name.into(),
4741 author_email: entry.author_email.into(),
4742 })
4743 .collect(),
4744 path: RepoPath::from_proto(&response.path)?,
4745 })
4746 }
4747 }
4748 })
4749 }
4750
4751 pub fn get_graph_data(
4752 &self,
4753 log_source: LogSource,
4754 log_order: LogOrder,
4755 ) -> Option<&InitialGitGraphData> {
4756 self.initial_graph_data.get(&(log_source, log_order))
4757 }
4758
4759 pub fn search_commits(
4760 &mut self,
4761 log_source: LogSource,
4762 search_args: SearchCommitArgs,
4763 request_tx: smol::channel::Sender<Oid>,
4764 cx: &mut Context<Self>,
4765 ) {
4766 let repository_state = self.repository_state.clone();
4767
4768 cx.background_spawn(async move {
4769 let repo_state = repository_state.await;
4770
4771 match repo_state {
4772 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4773 backend
4774 .search_commits(log_source, search_args, request_tx)
4775 .await
4776 .log_err();
4777 }
4778 Ok(RepositoryState::Remote(_)) => {}
4779 Err(_) => {}
4780 };
4781 })
4782 .detach();
4783 }
4784
4785 pub fn graph_data(
4786 &mut self,
4787 log_source: LogSource,
4788 log_order: LogOrder,
4789 range: Range<usize>,
4790 cx: &mut Context<Self>,
4791 ) -> GraphDataResponse<'_> {
4792 let initial_commit_data = self
4793 .initial_graph_data
4794 .entry((log_source.clone(), log_order))
4795 .or_insert_with(|| {
4796 let state = self.repository_state.clone();
4797 let log_source = log_source.clone();
4798
4799 let fetch_task = cx.spawn(async move |repository, cx| {
4800 let state = state.await;
4801 let result = match state {
4802 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4803 Self::local_git_graph_data(
4804 repository.clone(),
4805 backend,
4806 log_source.clone(),
4807 log_order,
4808 cx,
4809 )
4810 .await
4811 }
4812 Ok(RepositoryState::Remote(_)) => {
4813 Err("Git graph is not supported for collab yet".into())
4814 }
4815 Err(e) => Err(SharedString::from(e)),
4816 };
4817
4818 if let Err(fetch_task_error) = result {
4819 repository
4820 .update(cx, |repository, _| {
4821 if let Some(data) = repository
4822 .initial_graph_data
4823 .get_mut(&(log_source, log_order))
4824 {
4825 data.error = Some(fetch_task_error);
4826 } else {
4827 debug_panic!(
4828 "This task would be dropped if this entry doesn't exist"
4829 );
4830 }
4831 })
4832 .ok();
4833 }
4834 });
4835
4836 InitialGitGraphData {
4837 fetch_task,
4838 error: None,
4839 commit_data: Vec::new(),
4840 commit_oid_to_index: HashMap::default(),
4841 }
4842 });
4843
4844 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4845 let max_end = initial_commit_data.commit_data.len();
4846
4847 GraphDataResponse {
4848 commits: &initial_commit_data.commit_data
4849 [range.start.min(max_start)..range.end.min(max_end)],
4850 is_loading: !initial_commit_data.fetch_task.is_ready(),
4851 error: initial_commit_data.error.clone(),
4852 }
4853 }
4854
4855 async fn local_git_graph_data(
4856 this: WeakEntity<Self>,
4857 backend: Arc<dyn GitRepository>,
4858 log_source: LogSource,
4859 log_order: LogOrder,
4860 cx: &mut AsyncApp,
4861 ) -> Result<(), SharedString> {
4862 let (request_tx, request_rx) =
4863 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4864
4865 let task = cx.background_executor().spawn({
4866 let log_source = log_source.clone();
4867 async move {
4868 backend
4869 .initial_graph_data(log_source, log_order, request_tx)
4870 .await
4871 .map_err(|err| SharedString::from(err.to_string()))
4872 }
4873 });
4874
4875 let graph_data_key = (log_source, log_order);
4876
4877 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4878 this.update(cx, |repository, cx| {
4879 let graph_data = repository
4880 .initial_graph_data
4881 .entry(graph_data_key.clone())
4882 .and_modify(|graph_data| {
4883 for commit_data in initial_graph_commit_data {
4884 graph_data
4885 .commit_oid_to_index
4886 .insert(commit_data.sha, graph_data.commit_data.len());
4887 graph_data.commit_data.push(commit_data);
4888 }
4889 cx.emit(RepositoryEvent::GraphEvent(
4890 graph_data_key.clone(),
4891 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4892 ));
4893 });
4894
4895 match &graph_data {
4896 Entry::Occupied(_) => {}
4897 Entry::Vacant(_) => {
4898 debug_panic!("This task should be dropped if data doesn't exist");
4899 }
4900 }
4901 })
4902 .ok();
4903 }
4904
4905 task.await?;
4906 Ok(())
4907 }
4908
4909 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4910 if !self.commit_data.contains_key(&sha) {
4911 match &self.graph_commit_data_handler {
4912 GraphCommitHandlerState::Open(handler) => {
4913 if handler.commit_data_request.try_send(sha).is_ok() {
4914 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4915 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4916 }
4917 }
4918 GraphCommitHandlerState::Closed => {
4919 self.open_graph_commit_data_handler(cx);
4920 }
4921 GraphCommitHandlerState::Starting => {}
4922 }
4923 }
4924
4925 self.commit_data
4926 .get(&sha)
4927 .unwrap_or(&CommitDataState::Loading)
4928 }
4929
4930 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4931 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4932
4933 let state = self.repository_state.clone();
4934 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4935 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4936
4937 let foreground_task = cx.spawn(async move |this, cx| {
4938 while let Ok((sha, commit_data)) = result_rx.recv().await {
4939 let result = this.update(cx, |this, cx| {
4940 let old_value = this
4941 .commit_data
4942 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4943 debug_assert!(
4944 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4945 "We should never overwrite commit data"
4946 );
4947
4948 cx.notify();
4949 });
4950 if result.is_err() {
4951 break;
4952 }
4953 }
4954
4955 this.update(cx, |this, _cx| {
4956 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4957 })
4958 .ok();
4959 });
4960
4961 let request_tx_for_handler = request_tx;
4962 let background_executor = cx.background_executor().clone();
4963
4964 cx.background_spawn(async move {
4965 let backend = match state.await {
4966 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4967 Ok(RepositoryState::Remote(_)) => {
4968 log::error!("commit_data_reader not supported for remote repositories");
4969 return;
4970 }
4971 Err(error) => {
4972 log::error!("failed to get repository state: {error}");
4973 return;
4974 }
4975 };
4976
4977 let reader = match backend.commit_data_reader() {
4978 Ok(reader) => reader,
4979 Err(error) => {
4980 log::error!("failed to create commit data reader: {error:?}");
4981 return;
4982 }
4983 };
4984
4985 loop {
4986 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4987
4988 futures::select_biased! {
4989 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4990 let Ok(sha) = sha else {
4991 break;
4992 };
4993
4994 match reader.read(sha).await {
4995 Ok(commit_data) => {
4996 if result_tx.send((sha, commit_data)).await.is_err() {
4997 break;
4998 }
4999 }
5000 Err(error) => {
5001 log::error!("failed to read commit data for {sha}: {error:?}");
5002 }
5003 }
5004 }
5005 _ = futures::FutureExt::fuse(timeout) => {
5006 break;
5007 }
5008 }
5009 }
5010
5011 drop(result_tx);
5012 })
5013 .detach();
5014
5015 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
5016 _task: foreground_task,
5017 commit_data_request: request_tx_for_handler,
5018 });
5019 }
5020
5021 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
5022 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
5023 }
5024
5025 fn save_buffers<'a>(
5026 &self,
5027 entries: impl IntoIterator<Item = &'a RepoPath>,
5028 cx: &mut Context<Self>,
5029 ) -> Vec<Task<anyhow::Result<()>>> {
5030 let mut save_futures = Vec::new();
5031 if let Some(buffer_store) = self.buffer_store(cx) {
5032 buffer_store.update(cx, |buffer_store, cx| {
5033 for path in entries {
5034 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5035 continue;
5036 };
5037 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5038 && buffer
5039 .read(cx)
5040 .file()
5041 .is_some_and(|file| file.disk_state().exists())
5042 && buffer.read(cx).has_unsaved_edits()
5043 {
5044 save_futures.push(buffer_store.save_buffer(buffer, cx));
5045 }
5046 }
5047 })
5048 }
5049 save_futures
5050 }
5051
5052 pub fn stage_entries(
5053 &mut self,
5054 entries: Vec<RepoPath>,
5055 cx: &mut Context<Self>,
5056 ) -> Task<anyhow::Result<()>> {
5057 self.stage_or_unstage_entries(true, entries, cx)
5058 }
5059
5060 pub fn unstage_entries(
5061 &mut self,
5062 entries: Vec<RepoPath>,
5063 cx: &mut Context<Self>,
5064 ) -> Task<anyhow::Result<()>> {
5065 self.stage_or_unstage_entries(false, entries, cx)
5066 }
5067
5068 fn stage_or_unstage_entries(
5069 &mut self,
5070 stage: bool,
5071 entries: Vec<RepoPath>,
5072 cx: &mut Context<Self>,
5073 ) -> Task<anyhow::Result<()>> {
5074 if entries.is_empty() {
5075 return Task::ready(Ok(()));
5076 }
5077 let Some(git_store) = self.git_store.upgrade() else {
5078 return Task::ready(Ok(()));
5079 };
5080 let id = self.id;
5081 let save_tasks = self.save_buffers(&entries, cx);
5082 let paths = entries
5083 .iter()
5084 .map(|p| p.as_unix_str())
5085 .collect::<Vec<_>>()
5086 .join(" ");
5087 let status = if stage {
5088 format!("git add {paths}")
5089 } else {
5090 format!("git reset {paths}")
5091 };
5092 let job_key = GitJobKey::WriteIndex(entries.clone());
5093
5094 self.spawn_job_with_tracking(
5095 entries.clone(),
5096 if stage {
5097 pending_op::GitStatus::Staged
5098 } else {
5099 pending_op::GitStatus::Unstaged
5100 },
5101 cx,
5102 async move |this, cx| {
5103 for save_task in save_tasks {
5104 save_task.await?;
5105 }
5106
5107 this.update(cx, |this, cx| {
5108 let weak_this = cx.weak_entity();
5109 this.send_keyed_job(
5110 Some(job_key),
5111 Some(status.into()),
5112 move |git_repo, mut cx| async move {
5113 let hunk_staging_operation_counts = weak_this
5114 .update(&mut cx, |this, cx| {
5115 let mut hunk_staging_operation_counts = HashMap::default();
5116 for path in &entries {
5117 let Some(project_path) =
5118 this.repo_path_to_project_path(path, cx)
5119 else {
5120 continue;
5121 };
5122 let Some(buffer) = git_store
5123 .read(cx)
5124 .buffer_store
5125 .read(cx)
5126 .get_by_path(&project_path)
5127 else {
5128 continue;
5129 };
5130 let Some(diff_state) = git_store
5131 .read(cx)
5132 .diffs
5133 .get(&buffer.read(cx).remote_id())
5134 .cloned()
5135 else {
5136 continue;
5137 };
5138 let Some(uncommitted_diff) =
5139 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5140 |uncommitted_diff| uncommitted_diff.upgrade(),
5141 )
5142 else {
5143 continue;
5144 };
5145 let buffer_snapshot = buffer.read(cx).text_snapshot();
5146 let file_exists = buffer
5147 .read(cx)
5148 .file()
5149 .is_some_and(|file| file.disk_state().exists());
5150 let hunk_staging_operation_count =
5151 diff_state.update(cx, |diff_state, cx| {
5152 uncommitted_diff.update(
5153 cx,
5154 |uncommitted_diff, cx| {
5155 uncommitted_diff
5156 .stage_or_unstage_all_hunks(
5157 stage,
5158 &buffer_snapshot,
5159 file_exists,
5160 cx,
5161 );
5162 },
5163 );
5164
5165 diff_state.hunk_staging_operation_count += 1;
5166 diff_state.hunk_staging_operation_count
5167 });
5168 hunk_staging_operation_counts.insert(
5169 diff_state.downgrade(),
5170 hunk_staging_operation_count,
5171 );
5172 }
5173 hunk_staging_operation_counts
5174 })
5175 .unwrap_or_default();
5176
5177 let result = match git_repo {
5178 RepositoryState::Local(LocalRepositoryState {
5179 backend,
5180 environment,
5181 ..
5182 }) => {
5183 if stage {
5184 backend.stage_paths(entries, environment.clone()).await
5185 } else {
5186 backend.unstage_paths(entries, environment.clone()).await
5187 }
5188 }
5189 RepositoryState::Remote(RemoteRepositoryState {
5190 project_id,
5191 client,
5192 }) => {
5193 if stage {
5194 client
5195 .request(proto::Stage {
5196 project_id: project_id.0,
5197 repository_id: id.to_proto(),
5198 paths: entries
5199 .into_iter()
5200 .map(|repo_path| repo_path.to_proto())
5201 .collect(),
5202 })
5203 .await
5204 .context("sending stage request")
5205 .map(|_| ())
5206 } else {
5207 client
5208 .request(proto::Unstage {
5209 project_id: project_id.0,
5210 repository_id: id.to_proto(),
5211 paths: entries
5212 .into_iter()
5213 .map(|repo_path| repo_path.to_proto())
5214 .collect(),
5215 })
5216 .await
5217 .context("sending unstage request")
5218 .map(|_| ())
5219 }
5220 }
5221 };
5222
5223 for (diff_state, hunk_staging_operation_count) in
5224 hunk_staging_operation_counts
5225 {
5226 diff_state
5227 .update(&mut cx, |diff_state, cx| {
5228 if result.is_ok() {
5229 diff_state.hunk_staging_operation_count_as_of_write =
5230 hunk_staging_operation_count;
5231 } else if let Some(uncommitted_diff) =
5232 &diff_state.uncommitted_diff
5233 {
5234 uncommitted_diff
5235 .update(cx, |uncommitted_diff, cx| {
5236 uncommitted_diff.clear_pending_hunks(cx);
5237 })
5238 .ok();
5239 }
5240 })
5241 .ok();
5242 }
5243
5244 result
5245 },
5246 )
5247 })?
5248 .await?
5249 },
5250 )
5251 }
5252
5253 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5254 let snapshot = self.snapshot.clone();
5255 let pending_ops = self.pending_ops.clone();
5256 let to_stage = cx.background_spawn(async move {
5257 snapshot
5258 .status()
5259 .filter_map(|entry| {
5260 if let Some(ops) =
5261 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5262 {
5263 if ops.staging() || ops.staged() {
5264 None
5265 } else {
5266 Some(entry.repo_path)
5267 }
5268 } else if entry.status.staging().is_fully_staged() {
5269 None
5270 } else {
5271 Some(entry.repo_path)
5272 }
5273 })
5274 .collect()
5275 });
5276
5277 cx.spawn(async move |this, cx| {
5278 let to_stage = to_stage.await;
5279 this.update(cx, |this, cx| {
5280 this.stage_or_unstage_entries(true, to_stage, cx)
5281 })?
5282 .await
5283 })
5284 }
5285
5286 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5287 let snapshot = self.snapshot.clone();
5288 let pending_ops = self.pending_ops.clone();
5289 let to_unstage = cx.background_spawn(async move {
5290 snapshot
5291 .status()
5292 .filter_map(|entry| {
5293 if let Some(ops) =
5294 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5295 {
5296 if !ops.staging() && !ops.staged() {
5297 None
5298 } else {
5299 Some(entry.repo_path)
5300 }
5301 } else if entry.status.staging().is_fully_unstaged() {
5302 None
5303 } else {
5304 Some(entry.repo_path)
5305 }
5306 })
5307 .collect()
5308 });
5309
5310 cx.spawn(async move |this, cx| {
5311 let to_unstage = to_unstage.await;
5312 this.update(cx, |this, cx| {
5313 this.stage_or_unstage_entries(false, to_unstage, cx)
5314 })?
5315 .await
5316 })
5317 }
5318
5319 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5320 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5321
5322 self.stash_entries(to_stash, cx)
5323 }
5324
5325 pub fn stash_entries(
5326 &mut self,
5327 entries: Vec<RepoPath>,
5328 cx: &mut Context<Self>,
5329 ) -> Task<anyhow::Result<()>> {
5330 let id = self.id;
5331
5332 cx.spawn(async move |this, cx| {
5333 this.update(cx, |this, _| {
5334 this.send_job(None, move |git_repo, _cx| async move {
5335 match git_repo {
5336 RepositoryState::Local(LocalRepositoryState {
5337 backend,
5338 environment,
5339 ..
5340 }) => backend.stash_paths(entries, environment).await,
5341 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5342 client
5343 .request(proto::Stash {
5344 project_id: project_id.0,
5345 repository_id: id.to_proto(),
5346 paths: entries
5347 .into_iter()
5348 .map(|repo_path| repo_path.to_proto())
5349 .collect(),
5350 })
5351 .await?;
5352 Ok(())
5353 }
5354 }
5355 })
5356 })?
5357 .await??;
5358 Ok(())
5359 })
5360 }
5361
5362 pub fn stash_pop(
5363 &mut self,
5364 index: Option<usize>,
5365 cx: &mut Context<Self>,
5366 ) -> Task<anyhow::Result<()>> {
5367 let id = self.id;
5368 cx.spawn(async move |this, cx| {
5369 this.update(cx, |this, _| {
5370 this.send_job(None, move |git_repo, _cx| async move {
5371 match git_repo {
5372 RepositoryState::Local(LocalRepositoryState {
5373 backend,
5374 environment,
5375 ..
5376 }) => backend.stash_pop(index, environment).await,
5377 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5378 client
5379 .request(proto::StashPop {
5380 project_id: project_id.0,
5381 repository_id: id.to_proto(),
5382 stash_index: index.map(|i| i as u64),
5383 })
5384 .await
5385 .context("sending stash pop request")?;
5386 Ok(())
5387 }
5388 }
5389 })
5390 })?
5391 .await??;
5392 Ok(())
5393 })
5394 }
5395
5396 pub fn stash_apply(
5397 &mut self,
5398 index: Option<usize>,
5399 cx: &mut Context<Self>,
5400 ) -> Task<anyhow::Result<()>> {
5401 let id = self.id;
5402 cx.spawn(async move |this, cx| {
5403 this.update(cx, |this, _| {
5404 this.send_job(None, move |git_repo, _cx| async move {
5405 match git_repo {
5406 RepositoryState::Local(LocalRepositoryState {
5407 backend,
5408 environment,
5409 ..
5410 }) => backend.stash_apply(index, environment).await,
5411 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5412 client
5413 .request(proto::StashApply {
5414 project_id: project_id.0,
5415 repository_id: id.to_proto(),
5416 stash_index: index.map(|i| i as u64),
5417 })
5418 .await
5419 .context("sending stash apply request")?;
5420 Ok(())
5421 }
5422 }
5423 })
5424 })?
5425 .await??;
5426 Ok(())
5427 })
5428 }
5429
5430 pub fn stash_drop(
5431 &mut self,
5432 index: Option<usize>,
5433 cx: &mut Context<Self>,
5434 ) -> oneshot::Receiver<anyhow::Result<()>> {
5435 let id = self.id;
5436 let updates_tx = self
5437 .git_store()
5438 .and_then(|git_store| match &git_store.read(cx).state {
5439 GitStoreState::Local { downstream, .. } => downstream
5440 .as_ref()
5441 .map(|downstream| downstream.updates_tx.clone()),
5442 _ => None,
5443 });
5444 let this = cx.weak_entity();
5445 self.send_job(None, move |git_repo, mut cx| async move {
5446 match git_repo {
5447 RepositoryState::Local(LocalRepositoryState {
5448 backend,
5449 environment,
5450 ..
5451 }) => {
5452 // TODO would be nice to not have to do this manually
5453 let result = backend.stash_drop(index, environment).await;
5454 if result.is_ok()
5455 && let Ok(stash_entries) = backend.stash_entries().await
5456 {
5457 let snapshot = this.update(&mut cx, |this, cx| {
5458 this.snapshot.stash_entries = stash_entries;
5459 cx.emit(RepositoryEvent::StashEntriesChanged);
5460 this.snapshot.clone()
5461 })?;
5462 if let Some(updates_tx) = updates_tx {
5463 updates_tx
5464 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5465 .ok();
5466 }
5467 }
5468
5469 result
5470 }
5471 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5472 client
5473 .request(proto::StashDrop {
5474 project_id: project_id.0,
5475 repository_id: id.to_proto(),
5476 stash_index: index.map(|i| i as u64),
5477 })
5478 .await
5479 .context("sending stash pop request")?;
5480 Ok(())
5481 }
5482 }
5483 })
5484 }
5485
5486 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5487 let id = self.id;
5488 self.send_job(
5489 Some(format!("git hook {}", hook.as_str()).into()),
5490 move |git_repo, _cx| async move {
5491 match git_repo {
5492 RepositoryState::Local(LocalRepositoryState {
5493 backend,
5494 environment,
5495 ..
5496 }) => backend.run_hook(hook, environment.clone()).await,
5497 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5498 client
5499 .request(proto::RunGitHook {
5500 project_id: project_id.0,
5501 repository_id: id.to_proto(),
5502 hook: hook.to_proto(),
5503 })
5504 .await?;
5505
5506 Ok(())
5507 }
5508 }
5509 },
5510 )
5511 }
5512
5513 pub fn commit(
5514 &mut self,
5515 message: SharedString,
5516 name_and_email: Option<(SharedString, SharedString)>,
5517 options: CommitOptions,
5518 askpass: AskPassDelegate,
5519 cx: &mut App,
5520 ) -> oneshot::Receiver<Result<()>> {
5521 let id = self.id;
5522 let askpass_delegates = self.askpass_delegates.clone();
5523 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5524
5525 let rx = self.run_hook(RunHook::PreCommit, cx);
5526
5527 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5528 rx.await??;
5529
5530 match git_repo {
5531 RepositoryState::Local(LocalRepositoryState {
5532 backend,
5533 environment,
5534 ..
5535 }) => {
5536 backend
5537 .commit(message, name_and_email, options, askpass, environment)
5538 .await
5539 }
5540 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5541 askpass_delegates.lock().insert(askpass_id, askpass);
5542 let _defer = util::defer(|| {
5543 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5544 debug_assert!(askpass_delegate.is_some());
5545 });
5546 let (name, email) = name_and_email.unzip();
5547 client
5548 .request(proto::Commit {
5549 project_id: project_id.0,
5550 repository_id: id.to_proto(),
5551 message: String::from(message),
5552 name: name.map(String::from),
5553 email: email.map(String::from),
5554 options: Some(proto::commit::CommitOptions {
5555 amend: options.amend,
5556 signoff: options.signoff,
5557 allow_empty: options.allow_empty,
5558 }),
5559 askpass_id,
5560 })
5561 .await?;
5562
5563 Ok(())
5564 }
5565 }
5566 })
5567 }
5568
5569 pub fn fetch(
5570 &mut self,
5571 fetch_options: FetchOptions,
5572 askpass: AskPassDelegate,
5573 _cx: &mut App,
5574 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5575 let askpass_delegates = self.askpass_delegates.clone();
5576 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5577 let id = self.id;
5578
5579 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5580 match git_repo {
5581 RepositoryState::Local(LocalRepositoryState {
5582 backend,
5583 environment,
5584 ..
5585 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5586 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5587 askpass_delegates.lock().insert(askpass_id, askpass);
5588 let _defer = util::defer(|| {
5589 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5590 debug_assert!(askpass_delegate.is_some());
5591 });
5592
5593 let response = client
5594 .request(proto::Fetch {
5595 project_id: project_id.0,
5596 repository_id: id.to_proto(),
5597 askpass_id,
5598 remote: fetch_options.to_proto(),
5599 })
5600 .await?;
5601
5602 Ok(RemoteCommandOutput {
5603 stdout: response.stdout,
5604 stderr: response.stderr,
5605 })
5606 }
5607 }
5608 })
5609 }
5610
5611 pub fn push(
5612 &mut self,
5613 branch: SharedString,
5614 remote_branch: SharedString,
5615 remote: SharedString,
5616 options: Option<PushOptions>,
5617 askpass: AskPassDelegate,
5618 cx: &mut Context<Self>,
5619 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5620 let askpass_delegates = self.askpass_delegates.clone();
5621 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5622 let id = self.id;
5623
5624 let args = options
5625 .map(|option| match option {
5626 PushOptions::SetUpstream => " --set-upstream",
5627 PushOptions::Force => " --force-with-lease",
5628 })
5629 .unwrap_or("");
5630
5631 let updates_tx = self
5632 .git_store()
5633 .and_then(|git_store| match &git_store.read(cx).state {
5634 GitStoreState::Local { downstream, .. } => downstream
5635 .as_ref()
5636 .map(|downstream| downstream.updates_tx.clone()),
5637 _ => None,
5638 });
5639
5640 let this = cx.weak_entity();
5641 self.send_job(
5642 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5643 move |git_repo, mut cx| async move {
5644 match git_repo {
5645 RepositoryState::Local(LocalRepositoryState {
5646 backend,
5647 environment,
5648 ..
5649 }) => {
5650 let result = backend
5651 .push(
5652 branch.to_string(),
5653 remote_branch.to_string(),
5654 remote.to_string(),
5655 options,
5656 askpass,
5657 environment.clone(),
5658 cx.clone(),
5659 )
5660 .await;
5661 // TODO would be nice to not have to do this manually
5662 if result.is_ok() {
5663 let branches = backend.branches().await?;
5664 let branch = branches.into_iter().find(|branch| branch.is_head);
5665 log::info!("head branch after scan is {branch:?}");
5666 let snapshot = this.update(&mut cx, |this, cx| {
5667 this.snapshot.branch = branch;
5668 cx.emit(RepositoryEvent::HeadChanged);
5669 this.snapshot.clone()
5670 })?;
5671 if let Some(updates_tx) = updates_tx {
5672 updates_tx
5673 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5674 .ok();
5675 }
5676 }
5677 result
5678 }
5679 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5680 askpass_delegates.lock().insert(askpass_id, askpass);
5681 let _defer = util::defer(|| {
5682 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5683 debug_assert!(askpass_delegate.is_some());
5684 });
5685 let response = client
5686 .request(proto::Push {
5687 project_id: project_id.0,
5688 repository_id: id.to_proto(),
5689 askpass_id,
5690 branch_name: branch.to_string(),
5691 remote_branch_name: remote_branch.to_string(),
5692 remote_name: remote.to_string(),
5693 options: options.map(|options| match options {
5694 PushOptions::Force => proto::push::PushOptions::Force,
5695 PushOptions::SetUpstream => {
5696 proto::push::PushOptions::SetUpstream
5697 }
5698 }
5699 as i32),
5700 })
5701 .await?;
5702
5703 Ok(RemoteCommandOutput {
5704 stdout: response.stdout,
5705 stderr: response.stderr,
5706 })
5707 }
5708 }
5709 },
5710 )
5711 }
5712
5713 pub fn pull(
5714 &mut self,
5715 branch: Option<SharedString>,
5716 remote: SharedString,
5717 rebase: bool,
5718 askpass: AskPassDelegate,
5719 _cx: &mut App,
5720 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5721 let askpass_delegates = self.askpass_delegates.clone();
5722 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5723 let id = self.id;
5724
5725 let mut status = "git pull".to_string();
5726 if rebase {
5727 status.push_str(" --rebase");
5728 }
5729 status.push_str(&format!(" {}", remote));
5730 if let Some(b) = &branch {
5731 status.push_str(&format!(" {}", b));
5732 }
5733
5734 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5735 match git_repo {
5736 RepositoryState::Local(LocalRepositoryState {
5737 backend,
5738 environment,
5739 ..
5740 }) => {
5741 backend
5742 .pull(
5743 branch.as_ref().map(|b| b.to_string()),
5744 remote.to_string(),
5745 rebase,
5746 askpass,
5747 environment.clone(),
5748 cx,
5749 )
5750 .await
5751 }
5752 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5753 askpass_delegates.lock().insert(askpass_id, askpass);
5754 let _defer = util::defer(|| {
5755 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5756 debug_assert!(askpass_delegate.is_some());
5757 });
5758 let response = client
5759 .request(proto::Pull {
5760 project_id: project_id.0,
5761 repository_id: id.to_proto(),
5762 askpass_id,
5763 rebase,
5764 branch_name: branch.as_ref().map(|b| b.to_string()),
5765 remote_name: remote.to_string(),
5766 })
5767 .await?;
5768
5769 Ok(RemoteCommandOutput {
5770 stdout: response.stdout,
5771 stderr: response.stderr,
5772 })
5773 }
5774 }
5775 })
5776 }
5777
5778 fn spawn_set_index_text_job(
5779 &mut self,
5780 path: RepoPath,
5781 content: Option<String>,
5782 hunk_staging_operation_count: Option<usize>,
5783 cx: &mut Context<Self>,
5784 ) -> oneshot::Receiver<anyhow::Result<()>> {
5785 let id = self.id;
5786 let this = cx.weak_entity();
5787 let git_store = self.git_store.clone();
5788 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5789 self.send_keyed_job(
5790 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5791 None,
5792 move |git_repo, mut cx| async move {
5793 log::debug!(
5794 "start updating index text for buffer {}",
5795 path.as_unix_str()
5796 );
5797
5798 match git_repo {
5799 RepositoryState::Local(LocalRepositoryState {
5800 fs,
5801 backend,
5802 environment,
5803 ..
5804 }) => {
5805 let executable = match fs.metadata(&abs_path).await {
5806 Ok(Some(meta)) => meta.is_executable,
5807 Ok(None) => false,
5808 Err(_err) => false,
5809 };
5810 backend
5811 .set_index_text(path.clone(), content, environment.clone(), executable)
5812 .await?;
5813 }
5814 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5815 client
5816 .request(proto::SetIndexText {
5817 project_id: project_id.0,
5818 repository_id: id.to_proto(),
5819 path: path.to_proto(),
5820 text: content,
5821 })
5822 .await?;
5823 }
5824 }
5825 log::debug!(
5826 "finish updating index text for buffer {}",
5827 path.as_unix_str()
5828 );
5829
5830 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5831 let project_path = this
5832 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5833 .ok()
5834 .flatten();
5835 git_store
5836 .update(&mut cx, |git_store, cx| {
5837 let buffer_id = git_store
5838 .buffer_store
5839 .read(cx)
5840 .get_by_path(&project_path?)?
5841 .read(cx)
5842 .remote_id();
5843 let diff_state = git_store.diffs.get(&buffer_id)?;
5844 diff_state.update(cx, |diff_state, _| {
5845 diff_state.hunk_staging_operation_count_as_of_write =
5846 hunk_staging_operation_count;
5847 });
5848 Some(())
5849 })
5850 .context("Git store dropped")?;
5851 }
5852 Ok(())
5853 },
5854 )
5855 }
5856
5857 pub fn create_remote(
5858 &mut self,
5859 remote_name: String,
5860 remote_url: String,
5861 ) -> oneshot::Receiver<Result<()>> {
5862 let id = self.id;
5863 self.send_job(
5864 Some(format!("git remote add {remote_name} {remote_url}").into()),
5865 move |repo, _cx| async move {
5866 match repo {
5867 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5868 backend.create_remote(remote_name, remote_url).await
5869 }
5870 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5871 client
5872 .request(proto::GitCreateRemote {
5873 project_id: project_id.0,
5874 repository_id: id.to_proto(),
5875 remote_name,
5876 remote_url,
5877 })
5878 .await?;
5879
5880 Ok(())
5881 }
5882 }
5883 },
5884 )
5885 }
5886
5887 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5888 let id = self.id;
5889 self.send_job(
5890 Some(format!("git remove remote {remote_name}").into()),
5891 move |repo, _cx| async move {
5892 match repo {
5893 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5894 backend.remove_remote(remote_name).await
5895 }
5896 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5897 client
5898 .request(proto::GitRemoveRemote {
5899 project_id: project_id.0,
5900 repository_id: id.to_proto(),
5901 remote_name,
5902 })
5903 .await?;
5904
5905 Ok(())
5906 }
5907 }
5908 },
5909 )
5910 }
5911
5912 pub fn get_remotes(
5913 &mut self,
5914 branch_name: Option<String>,
5915 is_push: bool,
5916 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5917 let id = self.id;
5918 self.send_job(None, move |repo, _cx| async move {
5919 match repo {
5920 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5921 let remote = if let Some(branch_name) = branch_name {
5922 if is_push {
5923 backend.get_push_remote(branch_name).await?
5924 } else {
5925 backend.get_branch_remote(branch_name).await?
5926 }
5927 } else {
5928 None
5929 };
5930
5931 match remote {
5932 Some(remote) => Ok(vec![remote]),
5933 None => backend.get_all_remotes().await,
5934 }
5935 }
5936 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5937 let response = client
5938 .request(proto::GetRemotes {
5939 project_id: project_id.0,
5940 repository_id: id.to_proto(),
5941 branch_name,
5942 is_push,
5943 })
5944 .await?;
5945
5946 let remotes = response
5947 .remotes
5948 .into_iter()
5949 .map(|remotes| Remote {
5950 name: remotes.name.into(),
5951 })
5952 .collect();
5953
5954 Ok(remotes)
5955 }
5956 }
5957 })
5958 }
5959
5960 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5961 let id = self.id;
5962 self.send_job(None, move |repo, _| async move {
5963 match repo {
5964 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5965 backend.branches().await
5966 }
5967 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5968 let response = client
5969 .request(proto::GitGetBranches {
5970 project_id: project_id.0,
5971 repository_id: id.to_proto(),
5972 })
5973 .await?;
5974
5975 let branches = response
5976 .branches
5977 .into_iter()
5978 .map(|branch| proto_to_branch(&branch))
5979 .collect();
5980
5981 Ok(branches)
5982 }
5983 }
5984 })
5985 }
5986
5987 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5988 /// returns the pathed for the linked worktree.
5989 ///
5990 /// Returns None if this is the main checkout.
5991 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5992 if self.work_directory_abs_path != self.original_repo_abs_path {
5993 Some(&self.work_directory_abs_path)
5994 } else {
5995 None
5996 }
5997 }
5998
5999 pub fn path_for_new_linked_worktree(
6000 &self,
6001 branch_name: &str,
6002 worktree_directory_setting: &str,
6003 ) -> Result<PathBuf> {
6004 let original_repo = self.original_repo_abs_path.clone();
6005 let project_name = original_repo
6006 .file_name()
6007 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
6008 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
6009 Ok(directory.join(branch_name).join(project_name))
6010 }
6011
6012 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
6013 let id = self.id;
6014 self.send_job(None, move |repo, _| async move {
6015 match repo {
6016 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6017 backend.worktrees().await
6018 }
6019 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6020 let response = client
6021 .request(proto::GitGetWorktrees {
6022 project_id: project_id.0,
6023 repository_id: id.to_proto(),
6024 })
6025 .await?;
6026
6027 let worktrees = response
6028 .worktrees
6029 .into_iter()
6030 .map(|worktree| proto_to_worktree(&worktree))
6031 .collect();
6032
6033 Ok(worktrees)
6034 }
6035 }
6036 })
6037 }
6038
6039 pub fn create_worktree(
6040 &mut self,
6041 target: CreateWorktreeTarget,
6042 path: PathBuf,
6043 ) -> oneshot::Receiver<Result<()>> {
6044 let id = self.id;
6045 let job_description = match target.branch_name() {
6046 Some(branch_name) => format!("git worktree add: {branch_name}"),
6047 None => "git worktree add (detached)".to_string(),
6048 };
6049 self.send_job(Some(job_description.into()), move |repo, _cx| async move {
6050 match repo {
6051 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6052 backend.create_worktree(target, path).await
6053 }
6054 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6055 let (name, commit, use_existing_branch) = match target {
6056 CreateWorktreeTarget::ExistingBranch { branch_name } => {
6057 (Some(branch_name), None, true)
6058 }
6059 CreateWorktreeTarget::NewBranch {
6060 branch_name,
6061 base_sha,
6062 } => (Some(branch_name), base_sha, false),
6063 CreateWorktreeTarget::Detached { base_sha } => (None, base_sha, false),
6064 };
6065
6066 client
6067 .request(proto::GitCreateWorktree {
6068 project_id: project_id.0,
6069 repository_id: id.to_proto(),
6070 name: name.unwrap_or_default(),
6071 directory: path.to_string_lossy().to_string(),
6072 commit,
6073 use_existing_branch,
6074 })
6075 .await?;
6076
6077 Ok(())
6078 }
6079 }
6080 })
6081 }
6082
6083 pub fn create_worktree_detached(
6084 &mut self,
6085 path: PathBuf,
6086 commit: String,
6087 ) -> oneshot::Receiver<Result<()>> {
6088 self.create_worktree(
6089 CreateWorktreeTarget::Detached {
6090 base_sha: Some(commit),
6091 },
6092 path,
6093 )
6094 }
6095
6096 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6097 let id = self.id;
6098 self.send_job(None, move |repo, _cx| async move {
6099 match repo {
6100 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6101 Ok(backend.head_sha().await)
6102 }
6103 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6104 let response = client
6105 .request(proto::GitGetHeadSha {
6106 project_id: project_id.0,
6107 repository_id: id.to_proto(),
6108 })
6109 .await?;
6110
6111 Ok(response.sha)
6112 }
6113 }
6114 })
6115 }
6116
6117 pub fn update_ref(
6118 &mut self,
6119 ref_name: String,
6120 commit: String,
6121 ) -> oneshot::Receiver<Result<()>> {
6122 self.send_job(None, move |repo, _cx| async move {
6123 match repo {
6124 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6125 backend.update_ref(ref_name, commit).await
6126 }
6127 RepositoryState::Remote(_) => {
6128 anyhow::bail!("update_ref is not supported for remote repositories")
6129 }
6130 }
6131 })
6132 }
6133
6134 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6135 self.send_job(None, move |repo, _cx| async move {
6136 match repo {
6137 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6138 backend.delete_ref(ref_name).await
6139 }
6140 RepositoryState::Remote(_) => {
6141 anyhow::bail!("delete_ref is not supported for remote repositories")
6142 }
6143 }
6144 })
6145 }
6146
6147 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6148 self.send_job(None, move |repo, _cx| async move {
6149 match repo {
6150 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6151 backend.repair_worktrees().await
6152 }
6153 RepositoryState::Remote(_) => {
6154 anyhow::bail!("repair_worktrees is not supported for remote repositories")
6155 }
6156 }
6157 })
6158 }
6159
6160 pub fn create_archive_checkpoint(&mut self) -> oneshot::Receiver<Result<(String, String)>> {
6161 self.send_job(None, move |repo, _cx| async move {
6162 match repo {
6163 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6164 backend.create_archive_checkpoint().await
6165 }
6166 RepositoryState::Remote(_) => {
6167 anyhow::bail!(
6168 "create_archive_checkpoint is not supported for remote repositories"
6169 )
6170 }
6171 }
6172 })
6173 }
6174
6175 pub fn restore_archive_checkpoint(
6176 &mut self,
6177 staged_sha: String,
6178 unstaged_sha: String,
6179 ) -> oneshot::Receiver<Result<()>> {
6180 self.send_job(None, move |repo, _cx| async move {
6181 match repo {
6182 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6183 backend
6184 .restore_archive_checkpoint(staged_sha, unstaged_sha)
6185 .await
6186 }
6187 RepositoryState::Remote(_) => {
6188 anyhow::bail!(
6189 "restore_archive_checkpoint is not supported for remote repositories"
6190 )
6191 }
6192 }
6193 })
6194 }
6195
6196 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6197 let id = self.id;
6198 self.send_job(
6199 Some(format!("git worktree remove: {}", path.display()).into()),
6200 move |repo, _cx| async move {
6201 match repo {
6202 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6203 backend.remove_worktree(path, force).await
6204 }
6205 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6206 client
6207 .request(proto::GitRemoveWorktree {
6208 project_id: project_id.0,
6209 repository_id: id.to_proto(),
6210 path: path.to_string_lossy().to_string(),
6211 force,
6212 })
6213 .await?;
6214
6215 Ok(())
6216 }
6217 }
6218 },
6219 )
6220 }
6221
6222 pub fn rename_worktree(
6223 &mut self,
6224 old_path: PathBuf,
6225 new_path: PathBuf,
6226 ) -> oneshot::Receiver<Result<()>> {
6227 let id = self.id;
6228 self.send_job(
6229 Some(format!("git worktree move: {}", old_path.display()).into()),
6230 move |repo, _cx| async move {
6231 match repo {
6232 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6233 backend.rename_worktree(old_path, new_path).await
6234 }
6235 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6236 client
6237 .request(proto::GitRenameWorktree {
6238 project_id: project_id.0,
6239 repository_id: id.to_proto(),
6240 old_path: old_path.to_string_lossy().to_string(),
6241 new_path: new_path.to_string_lossy().to_string(),
6242 })
6243 .await?;
6244
6245 Ok(())
6246 }
6247 }
6248 },
6249 )
6250 }
6251
6252 pub fn default_branch(
6253 &mut self,
6254 include_remote_name: bool,
6255 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6256 let id = self.id;
6257 self.send_job(None, move |repo, _| async move {
6258 match repo {
6259 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6260 backend.default_branch(include_remote_name).await
6261 }
6262 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6263 let response = client
6264 .request(proto::GetDefaultBranch {
6265 project_id: project_id.0,
6266 repository_id: id.to_proto(),
6267 })
6268 .await?;
6269
6270 anyhow::Ok(response.branch.map(SharedString::from))
6271 }
6272 }
6273 })
6274 }
6275
6276 pub fn diff_tree(
6277 &mut self,
6278 diff_type: DiffTreeType,
6279 _cx: &App,
6280 ) -> oneshot::Receiver<Result<TreeDiff>> {
6281 let repository_id = self.snapshot.id;
6282 self.send_job(None, move |repo, _cx| async move {
6283 match repo {
6284 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6285 backend.diff_tree(diff_type).await
6286 }
6287 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6288 let response = client
6289 .request(proto::GetTreeDiff {
6290 project_id: project_id.0,
6291 repository_id: repository_id.0,
6292 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6293 base: diff_type.base().to_string(),
6294 head: diff_type.head().to_string(),
6295 })
6296 .await?;
6297
6298 let entries = response
6299 .entries
6300 .into_iter()
6301 .filter_map(|entry| {
6302 let status = match entry.status() {
6303 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6304 proto::tree_diff_status::Status::Modified => {
6305 TreeDiffStatus::Modified {
6306 old: git::Oid::from_str(
6307 &entry.oid.context("missing oid").log_err()?,
6308 )
6309 .log_err()?,
6310 }
6311 }
6312 proto::tree_diff_status::Status::Deleted => {
6313 TreeDiffStatus::Deleted {
6314 old: git::Oid::from_str(
6315 &entry.oid.context("missing oid").log_err()?,
6316 )
6317 .log_err()?,
6318 }
6319 }
6320 };
6321 Some((
6322 RepoPath::from_rel_path(
6323 &RelPath::from_proto(&entry.path).log_err()?,
6324 ),
6325 status,
6326 ))
6327 })
6328 .collect();
6329
6330 Ok(TreeDiff { entries })
6331 }
6332 }
6333 })
6334 }
6335
6336 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6337 let id = self.id;
6338 self.send_job(None, move |repo, _cx| async move {
6339 match repo {
6340 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6341 backend.diff(diff_type).await
6342 }
6343 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6344 let (proto_diff_type, merge_base_ref) = match &diff_type {
6345 DiffType::HeadToIndex => {
6346 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6347 }
6348 DiffType::HeadToWorktree => {
6349 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6350 }
6351 DiffType::MergeBase { base_ref } => (
6352 proto::git_diff::DiffType::MergeBase.into(),
6353 Some(base_ref.to_string()),
6354 ),
6355 };
6356 let response = client
6357 .request(proto::GitDiff {
6358 project_id: project_id.0,
6359 repository_id: id.to_proto(),
6360 diff_type: proto_diff_type,
6361 merge_base_ref,
6362 })
6363 .await?;
6364
6365 Ok(response.diff)
6366 }
6367 }
6368 })
6369 }
6370
6371 pub fn create_branch(
6372 &mut self,
6373 branch_name: String,
6374 base_branch: Option<String>,
6375 ) -> oneshot::Receiver<Result<()>> {
6376 let id = self.id;
6377 let status_msg = if let Some(ref base) = base_branch {
6378 format!("git switch -c {branch_name} {base}").into()
6379 } else {
6380 format!("git switch -c {branch_name}").into()
6381 };
6382 self.send_job(Some(status_msg), move |repo, _cx| async move {
6383 match repo {
6384 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6385 backend.create_branch(branch_name, base_branch).await
6386 }
6387 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6388 client
6389 .request(proto::GitCreateBranch {
6390 project_id: project_id.0,
6391 repository_id: id.to_proto(),
6392 branch_name,
6393 })
6394 .await?;
6395
6396 Ok(())
6397 }
6398 }
6399 })
6400 }
6401
6402 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6403 let id = self.id;
6404 self.send_job(
6405 Some(format!("git switch {branch_name}").into()),
6406 move |repo, _cx| async move {
6407 match repo {
6408 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6409 backend.change_branch(branch_name).await
6410 }
6411 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6412 client
6413 .request(proto::GitChangeBranch {
6414 project_id: project_id.0,
6415 repository_id: id.to_proto(),
6416 branch_name,
6417 })
6418 .await?;
6419
6420 Ok(())
6421 }
6422 }
6423 },
6424 )
6425 }
6426
6427 pub fn delete_branch(
6428 &mut self,
6429 is_remote: bool,
6430 branch_name: String,
6431 ) -> oneshot::Receiver<Result<()>> {
6432 let id = self.id;
6433 self.send_job(
6434 Some(
6435 format!(
6436 "git branch {} {}",
6437 if is_remote { "-dr" } else { "-d" },
6438 branch_name
6439 )
6440 .into(),
6441 ),
6442 move |repo, _cx| async move {
6443 match repo {
6444 RepositoryState::Local(state) => {
6445 state.backend.delete_branch(is_remote, branch_name).await
6446 }
6447 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6448 client
6449 .request(proto::GitDeleteBranch {
6450 project_id: project_id.0,
6451 repository_id: id.to_proto(),
6452 is_remote,
6453 branch_name,
6454 })
6455 .await?;
6456
6457 Ok(())
6458 }
6459 }
6460 },
6461 )
6462 }
6463
6464 pub fn rename_branch(
6465 &mut self,
6466 branch: String,
6467 new_name: String,
6468 ) -> oneshot::Receiver<Result<()>> {
6469 let id = self.id;
6470 self.send_job(
6471 Some(format!("git branch -m {branch} {new_name}").into()),
6472 move |repo, _cx| async move {
6473 match repo {
6474 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6475 backend.rename_branch(branch, new_name).await
6476 }
6477 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6478 client
6479 .request(proto::GitRenameBranch {
6480 project_id: project_id.0,
6481 repository_id: id.to_proto(),
6482 branch,
6483 new_name,
6484 })
6485 .await?;
6486
6487 Ok(())
6488 }
6489 }
6490 },
6491 )
6492 }
6493
6494 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6495 let id = self.id;
6496 self.send_job(None, move |repo, _cx| async move {
6497 match repo {
6498 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6499 backend.check_for_pushed_commit().await
6500 }
6501 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6502 let response = client
6503 .request(proto::CheckForPushedCommits {
6504 project_id: project_id.0,
6505 repository_id: id.to_proto(),
6506 })
6507 .await?;
6508
6509 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6510
6511 Ok(branches)
6512 }
6513 }
6514 })
6515 }
6516
6517 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6518 let id = self.id;
6519 self.send_job(None, move |repo, _cx| async move {
6520 match repo {
6521 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6522 backend.checkpoint().await
6523 }
6524 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6525 let response = client
6526 .request(proto::GitCreateCheckpoint {
6527 project_id: project_id.0,
6528 repository_id: id.to_proto(),
6529 })
6530 .await?;
6531
6532 Ok(GitRepositoryCheckpoint {
6533 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6534 })
6535 }
6536 }
6537 })
6538 }
6539
6540 pub fn restore_checkpoint(
6541 &mut self,
6542 checkpoint: GitRepositoryCheckpoint,
6543 ) -> oneshot::Receiver<Result<()>> {
6544 let id = self.id;
6545 self.send_job(None, move |repo, _cx| async move {
6546 match repo {
6547 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6548 backend.restore_checkpoint(checkpoint).await
6549 }
6550 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6551 client
6552 .request(proto::GitRestoreCheckpoint {
6553 project_id: project_id.0,
6554 repository_id: id.to_proto(),
6555 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6556 })
6557 .await?;
6558 Ok(())
6559 }
6560 }
6561 })
6562 }
6563
6564 pub(crate) fn apply_remote_update(
6565 &mut self,
6566 update: proto::UpdateRepository,
6567 cx: &mut Context<Self>,
6568 ) -> Result<()> {
6569 if let Some(main_path) = &update.original_repo_abs_path {
6570 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6571 }
6572
6573 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6574 let new_head_commit = update
6575 .head_commit_details
6576 .as_ref()
6577 .map(proto_to_commit_details);
6578 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6579 cx.emit(RepositoryEvent::HeadChanged)
6580 }
6581 self.snapshot.branch = new_branch;
6582 self.snapshot.head_commit = new_head_commit;
6583
6584 // We don't store any merge head state for downstream projects; the upstream
6585 // will track it and we will just get the updated conflicts
6586 let new_merge_heads = TreeMap::from_ordered_entries(
6587 update
6588 .current_merge_conflicts
6589 .into_iter()
6590 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6591 );
6592 let conflicts_changed =
6593 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6594 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6595 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6596 let new_stash_entries = GitStash {
6597 entries: update
6598 .stash_entries
6599 .iter()
6600 .filter_map(|entry| proto_to_stash(entry).ok())
6601 .collect(),
6602 };
6603 if self.snapshot.stash_entries != new_stash_entries {
6604 cx.emit(RepositoryEvent::StashEntriesChanged)
6605 }
6606 self.snapshot.stash_entries = new_stash_entries;
6607 let new_linked_worktrees: Arc<[GitWorktree]> = update
6608 .linked_worktrees
6609 .iter()
6610 .map(proto_to_worktree)
6611 .collect();
6612 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6613 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6614 }
6615 self.snapshot.linked_worktrees = new_linked_worktrees;
6616 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6617 self.snapshot.remote_origin_url = update.remote_origin_url;
6618
6619 let edits = update
6620 .removed_statuses
6621 .into_iter()
6622 .filter_map(|path| {
6623 Some(sum_tree::Edit::Remove(PathKey(
6624 RelPath::from_proto(&path).log_err()?,
6625 )))
6626 })
6627 .chain(
6628 update
6629 .updated_statuses
6630 .into_iter()
6631 .filter_map(|updated_status| {
6632 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6633 }),
6634 )
6635 .collect::<Vec<_>>();
6636 if conflicts_changed || !edits.is_empty() {
6637 cx.emit(RepositoryEvent::StatusesChanged);
6638 }
6639 self.snapshot.statuses_by_path.edit(edits, ());
6640
6641 if update.is_last_update {
6642 self.snapshot.scan_id = update.scan_id;
6643 }
6644 self.clear_pending_ops(cx);
6645 Ok(())
6646 }
6647
6648 pub fn compare_checkpoints(
6649 &mut self,
6650 left: GitRepositoryCheckpoint,
6651 right: GitRepositoryCheckpoint,
6652 ) -> oneshot::Receiver<Result<bool>> {
6653 let id = self.id;
6654 self.send_job(None, move |repo, _cx| async move {
6655 match repo {
6656 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6657 backend.compare_checkpoints(left, right).await
6658 }
6659 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6660 let response = client
6661 .request(proto::GitCompareCheckpoints {
6662 project_id: project_id.0,
6663 repository_id: id.to_proto(),
6664 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6665 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6666 })
6667 .await?;
6668 Ok(response.equal)
6669 }
6670 }
6671 })
6672 }
6673
6674 pub fn diff_checkpoints(
6675 &mut self,
6676 base_checkpoint: GitRepositoryCheckpoint,
6677 target_checkpoint: GitRepositoryCheckpoint,
6678 ) -> oneshot::Receiver<Result<String>> {
6679 let id = self.id;
6680 self.send_job(None, move |repo, _cx| async move {
6681 match repo {
6682 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6683 backend
6684 .diff_checkpoints(base_checkpoint, target_checkpoint)
6685 .await
6686 }
6687 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6688 let response = client
6689 .request(proto::GitDiffCheckpoints {
6690 project_id: project_id.0,
6691 repository_id: id.to_proto(),
6692 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6693 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6694 })
6695 .await?;
6696 Ok(response.diff)
6697 }
6698 }
6699 })
6700 }
6701
6702 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6703 let updated = SumTree::from_iter(
6704 self.pending_ops.iter().filter_map(|ops| {
6705 let inner_ops: Vec<PendingOp> =
6706 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6707 if inner_ops.is_empty() {
6708 None
6709 } else {
6710 Some(PendingOps {
6711 repo_path: ops.repo_path.clone(),
6712 ops: inner_ops,
6713 })
6714 }
6715 }),
6716 (),
6717 );
6718
6719 if updated != self.pending_ops {
6720 cx.emit(RepositoryEvent::PendingOpsChanged {
6721 pending_ops: self.pending_ops.clone(),
6722 })
6723 }
6724
6725 self.pending_ops = updated;
6726 }
6727
6728 fn schedule_scan(
6729 &mut self,
6730 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6731 cx: &mut Context<Self>,
6732 ) {
6733 let this = cx.weak_entity();
6734 let _ = self.send_keyed_job(
6735 Some(GitJobKey::ReloadGitState),
6736 None,
6737 |state, mut cx| async move {
6738 log::debug!("run scheduled git status scan");
6739
6740 let Some(this) = this.upgrade() else {
6741 return Ok(());
6742 };
6743 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6744 bail!("not a local repository")
6745 };
6746 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6747 this.update(&mut cx, |this, cx| {
6748 this.clear_pending_ops(cx);
6749 });
6750 if let Some(updates_tx) = updates_tx {
6751 updates_tx
6752 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6753 .ok();
6754 }
6755 Ok(())
6756 },
6757 );
6758 }
6759
6760 fn spawn_local_git_worker(
6761 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6762 cx: &mut Context<Self>,
6763 ) -> mpsc::UnboundedSender<GitJob> {
6764 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6765
6766 cx.spawn(async move |_, cx| {
6767 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6768 if let Some(git_hosting_provider_registry) =
6769 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6770 {
6771 git_hosting_providers::register_additional_providers(
6772 git_hosting_provider_registry,
6773 state.backend.clone(),
6774 )
6775 .await;
6776 }
6777 let state = RepositoryState::Local(state);
6778 let mut jobs = VecDeque::new();
6779 loop {
6780 while let Ok(next_job) = job_rx.try_recv() {
6781 jobs.push_back(next_job);
6782 }
6783
6784 if let Some(job) = jobs.pop_front() {
6785 if let Some(current_key) = &job.key
6786 && jobs
6787 .iter()
6788 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6789 {
6790 continue;
6791 }
6792 (job.job)(state.clone(), cx).await;
6793 } else if let Some(job) = job_rx.next().await {
6794 jobs.push_back(job);
6795 } else {
6796 break;
6797 }
6798 }
6799 anyhow::Ok(())
6800 })
6801 .detach_and_log_err(cx);
6802
6803 job_tx
6804 }
6805
6806 fn spawn_remote_git_worker(
6807 state: RemoteRepositoryState,
6808 cx: &mut Context<Self>,
6809 ) -> mpsc::UnboundedSender<GitJob> {
6810 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6811
6812 cx.spawn(async move |_, cx| {
6813 let state = RepositoryState::Remote(state);
6814 let mut jobs = VecDeque::new();
6815 loop {
6816 while let Ok(next_job) = job_rx.try_recv() {
6817 jobs.push_back(next_job);
6818 }
6819
6820 if let Some(job) = jobs.pop_front() {
6821 if let Some(current_key) = &job.key
6822 && jobs
6823 .iter()
6824 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6825 {
6826 continue;
6827 }
6828 (job.job)(state.clone(), cx).await;
6829 } else if let Some(job) = job_rx.next().await {
6830 jobs.push_back(job);
6831 } else {
6832 break;
6833 }
6834 }
6835 anyhow::Ok(())
6836 })
6837 .detach_and_log_err(cx);
6838
6839 job_tx
6840 }
6841
6842 fn load_staged_text(
6843 &mut self,
6844 buffer_id: BufferId,
6845 repo_path: RepoPath,
6846 cx: &App,
6847 ) -> Task<Result<Option<String>>> {
6848 let rx = self.send_job(None, move |state, _| async move {
6849 match state {
6850 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6851 anyhow::Ok(backend.load_index_text(repo_path).await)
6852 }
6853 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6854 let response = client
6855 .request(proto::OpenUnstagedDiff {
6856 project_id: project_id.to_proto(),
6857 buffer_id: buffer_id.to_proto(),
6858 })
6859 .await?;
6860 Ok(response.staged_text)
6861 }
6862 }
6863 });
6864 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6865 }
6866
6867 fn load_committed_text(
6868 &mut self,
6869 buffer_id: BufferId,
6870 repo_path: RepoPath,
6871 cx: &App,
6872 ) -> Task<Result<DiffBasesChange>> {
6873 let rx = self.send_job(None, move |state, _| async move {
6874 match state {
6875 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6876 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6877 let staged_text = backend.load_index_text(repo_path).await;
6878 let diff_bases_change = if committed_text == staged_text {
6879 DiffBasesChange::SetBoth(committed_text)
6880 } else {
6881 DiffBasesChange::SetEach {
6882 index: staged_text,
6883 head: committed_text,
6884 }
6885 };
6886 anyhow::Ok(diff_bases_change)
6887 }
6888 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6889 use proto::open_uncommitted_diff_response::Mode;
6890
6891 let response = client
6892 .request(proto::OpenUncommittedDiff {
6893 project_id: project_id.to_proto(),
6894 buffer_id: buffer_id.to_proto(),
6895 })
6896 .await?;
6897 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6898 let bases = match mode {
6899 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6900 Mode::IndexAndHead => DiffBasesChange::SetEach {
6901 head: response.committed_text,
6902 index: response.staged_text,
6903 },
6904 };
6905 Ok(bases)
6906 }
6907 }
6908 });
6909
6910 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6911 }
6912
6913 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6914 let repository_id = self.snapshot.id;
6915 let rx = self.send_job(None, move |state, _| async move {
6916 match state {
6917 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6918 backend.load_blob_content(oid).await
6919 }
6920 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6921 let response = client
6922 .request(proto::GetBlobContent {
6923 project_id: project_id.to_proto(),
6924 repository_id: repository_id.0,
6925 oid: oid.to_string(),
6926 })
6927 .await?;
6928 Ok(response.content)
6929 }
6930 }
6931 });
6932 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6933 }
6934
6935 fn paths_changed(
6936 &mut self,
6937 paths: Vec<RepoPath>,
6938 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6939 cx: &mut Context<Self>,
6940 ) {
6941 if !paths.is_empty() {
6942 self.paths_needing_status_update.push(paths);
6943 }
6944
6945 let this = cx.weak_entity();
6946 let _ = self.send_keyed_job(
6947 Some(GitJobKey::RefreshStatuses),
6948 None,
6949 |state, mut cx| async move {
6950 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6951 (
6952 this.snapshot.clone(),
6953 mem::take(&mut this.paths_needing_status_update),
6954 )
6955 })?;
6956 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6957 bail!("not a local repository")
6958 };
6959
6960 if changed_paths.is_empty() {
6961 return Ok(());
6962 }
6963
6964 let has_head = prev_snapshot.head_commit.is_some();
6965
6966 let stash_entries = backend.stash_entries().await?;
6967 let changed_path_statuses = cx
6968 .background_spawn(async move {
6969 let mut changed_paths =
6970 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6971 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6972
6973 let status_task = backend.status(&changed_paths_vec);
6974 let diff_stat_future = if has_head {
6975 backend.diff_stat(&changed_paths_vec)
6976 } else {
6977 future::ready(Ok(status::GitDiffStat {
6978 entries: Arc::default(),
6979 }))
6980 .boxed()
6981 };
6982
6983 let (statuses, diff_stats) =
6984 futures::future::try_join(status_task, diff_stat_future).await?;
6985
6986 let diff_stats: HashMap<RepoPath, DiffStat> =
6987 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6988
6989 let mut changed_path_statuses = Vec::new();
6990 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6991 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6992
6993 for (repo_path, status) in &*statuses.entries {
6994 let current_diff_stat = diff_stats.get(repo_path).copied();
6995
6996 changed_paths.remove(repo_path);
6997 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6998 && cursor.item().is_some_and(|entry| {
6999 entry.status == *status && entry.diff_stat == current_diff_stat
7000 })
7001 {
7002 continue;
7003 }
7004
7005 changed_path_statuses.push(Edit::Insert(StatusEntry {
7006 repo_path: repo_path.clone(),
7007 status: *status,
7008 diff_stat: current_diff_stat,
7009 }));
7010 }
7011 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7012 for path in changed_paths.into_iter() {
7013 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
7014 changed_path_statuses
7015 .push(Edit::Remove(PathKey(path.as_ref().clone())));
7016 }
7017 }
7018 anyhow::Ok(changed_path_statuses)
7019 })
7020 .await?;
7021
7022 this.update(&mut cx, |this, cx| {
7023 if this.snapshot.stash_entries != stash_entries {
7024 cx.emit(RepositoryEvent::StashEntriesChanged);
7025 this.snapshot.stash_entries = stash_entries;
7026 }
7027
7028 if !changed_path_statuses.is_empty() {
7029 cx.emit(RepositoryEvent::StatusesChanged);
7030 this.snapshot
7031 .statuses_by_path
7032 .edit(changed_path_statuses, ());
7033 this.snapshot.scan_id += 1;
7034 }
7035
7036 if let Some(updates_tx) = updates_tx {
7037 updates_tx
7038 .unbounded_send(DownstreamUpdate::UpdateRepository(
7039 this.snapshot.clone(),
7040 ))
7041 .ok();
7042 }
7043 })
7044 },
7045 );
7046 }
7047
7048 /// currently running git command and when it started
7049 pub fn current_job(&self) -> Option<JobInfo> {
7050 self.active_jobs.values().next().cloned()
7051 }
7052
7053 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7054 self.send_job(None, |_, _| async {})
7055 }
7056
7057 fn spawn_job_with_tracking<AsyncFn>(
7058 &mut self,
7059 paths: Vec<RepoPath>,
7060 git_status: pending_op::GitStatus,
7061 cx: &mut Context<Self>,
7062 f: AsyncFn,
7063 ) -> Task<Result<()>>
7064 where
7065 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7066 {
7067 let ids = self.new_pending_ops_for_paths(paths, git_status);
7068
7069 cx.spawn(async move |this, cx| {
7070 let (job_status, result) = match f(this.clone(), cx).await {
7071 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7072 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7073 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7074 };
7075
7076 this.update(cx, |this, _| {
7077 let mut edits = Vec::with_capacity(ids.len());
7078 for (id, entry) in ids {
7079 if let Some(mut ops) = this
7080 .pending_ops
7081 .get(&PathKey(entry.as_ref().clone()), ())
7082 .cloned()
7083 {
7084 if let Some(op) = ops.op_by_id_mut(id) {
7085 op.job_status = job_status;
7086 }
7087 edits.push(sum_tree::Edit::Insert(ops));
7088 }
7089 }
7090 this.pending_ops.edit(edits, ());
7091 })?;
7092
7093 result
7094 })
7095 }
7096
7097 fn new_pending_ops_for_paths(
7098 &mut self,
7099 paths: Vec<RepoPath>,
7100 git_status: pending_op::GitStatus,
7101 ) -> Vec<(PendingOpId, RepoPath)> {
7102 let mut edits = Vec::with_capacity(paths.len());
7103 let mut ids = Vec::with_capacity(paths.len());
7104 for path in paths {
7105 let mut ops = self
7106 .pending_ops
7107 .get(&PathKey(path.as_ref().clone()), ())
7108 .cloned()
7109 .unwrap_or_else(|| PendingOps::new(&path));
7110 let id = ops.max_id() + 1;
7111 ops.ops.push(PendingOp {
7112 id,
7113 git_status,
7114 job_status: pending_op::JobStatus::Running,
7115 });
7116 edits.push(sum_tree::Edit::Insert(ops));
7117 ids.push((id, path));
7118 }
7119 self.pending_ops.edit(edits, ());
7120 ids
7121 }
7122 pub fn default_remote_url(&self) -> Option<String> {
7123 self.remote_upstream_url
7124 .clone()
7125 .or(self.remote_origin_url.clone())
7126 }
7127}
7128
7129/// If `path` is a git linked worktree checkout, resolves it to the main
7130/// repository's working directory path. Returns `None` if `path` is a normal
7131/// repository, not a git repo, or if resolution fails.
7132///
7133/// Resolution works by:
7134/// 1. Reading the `.git` file to get the `gitdir:` pointer
7135/// 2. Following that to the worktree-specific git directory
7136/// 3. Reading the `commondir` file to find the shared `.git` directory
7137/// 4. Deriving the main repo's working directory from the common dir
7138pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7139 let dot_git = path.join(".git");
7140 let metadata = fs.metadata(&dot_git).await.ok()??;
7141 if metadata.is_dir {
7142 return None; // Normal repo, not a linked worktree
7143 }
7144 // It's a .git file — parse the gitdir: pointer
7145 let content = fs.load(&dot_git).await.ok()?;
7146 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7147 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7148 // Read commondir to find the main .git directory
7149 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7150 let common_dir = fs
7151 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7152 .await
7153 .ok()?;
7154 Some(git::repository::original_repo_path_from_common_dir(
7155 &common_dir,
7156 ))
7157}
7158
7159/// Validates that the resolved worktree directory is acceptable:
7160/// - The setting must not be an absolute path.
7161/// - The resolved path must be either a subdirectory of the working
7162/// directory or a subdirectory of its parent (i.e., a sibling).
7163///
7164/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7165pub fn worktrees_directory_for_repo(
7166 original_repo_abs_path: &Path,
7167 worktree_directory_setting: &str,
7168) -> Result<PathBuf> {
7169 // Check the original setting before trimming, since a path like "///"
7170 // is absolute but becomes "" after stripping trailing separators.
7171 // Also check for leading `/` or `\` explicitly, because on Windows
7172 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7173 // would slip through even though it's clearly not a relative path.
7174 if Path::new(worktree_directory_setting).is_absolute()
7175 || worktree_directory_setting.starts_with('/')
7176 || worktree_directory_setting.starts_with('\\')
7177 {
7178 anyhow::bail!(
7179 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7180 );
7181 }
7182
7183 if worktree_directory_setting.is_empty() {
7184 anyhow::bail!("git.worktree_directory must not be empty");
7185 }
7186
7187 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7188 if trimmed == ".." {
7189 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7190 }
7191
7192 let joined = original_repo_abs_path.join(trimmed);
7193 let resolved = util::normalize_path(&joined);
7194 let resolved = if resolved.starts_with(original_repo_abs_path) {
7195 resolved
7196 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7197 resolved.join(repo_dir_name)
7198 } else {
7199 resolved
7200 };
7201
7202 let parent = original_repo_abs_path
7203 .parent()
7204 .unwrap_or(original_repo_abs_path);
7205
7206 if !resolved.starts_with(parent) {
7207 anyhow::bail!(
7208 "git.worktree_directory resolved to {resolved:?}, which is outside \
7209 the project root and its parent directory. It must resolve to a \
7210 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7211 );
7212 }
7213
7214 Ok(resolved)
7215}
7216
7217/// Returns a short name for a linked worktree suitable for UI display
7218///
7219/// Uses the main worktree path to come up with a short name that disambiguates
7220/// the linked worktree from the main worktree.
7221pub fn linked_worktree_short_name(
7222 main_worktree_path: &Path,
7223 linked_worktree_path: &Path,
7224) -> Option<SharedString> {
7225 if main_worktree_path == linked_worktree_path {
7226 return None;
7227 }
7228
7229 let project_name = main_worktree_path.file_name()?.to_str()?;
7230 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7231 let name = if directory_name != project_name {
7232 directory_name.to_string()
7233 } else {
7234 linked_worktree_path
7235 .parent()?
7236 .file_name()?
7237 .to_str()?
7238 .to_string()
7239 };
7240 Some(name.into())
7241}
7242
7243fn get_permalink_in_rust_registry_src(
7244 provider_registry: Arc<GitHostingProviderRegistry>,
7245 path: PathBuf,
7246 selection: Range<u32>,
7247) -> Result<url::Url> {
7248 #[derive(Deserialize)]
7249 struct CargoVcsGit {
7250 sha1: String,
7251 }
7252
7253 #[derive(Deserialize)]
7254 struct CargoVcsInfo {
7255 git: CargoVcsGit,
7256 path_in_vcs: String,
7257 }
7258
7259 #[derive(Deserialize)]
7260 struct CargoPackage {
7261 repository: String,
7262 }
7263
7264 #[derive(Deserialize)]
7265 struct CargoToml {
7266 package: CargoPackage,
7267 }
7268
7269 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7270 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7271 Some((dir, json))
7272 }) else {
7273 bail!("No .cargo_vcs_info.json found in parent directories")
7274 };
7275 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7276 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7277 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7278 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7279 .context("parsing package.repository field of manifest")?;
7280 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7281 let permalink = provider.build_permalink(
7282 remote,
7283 BuildPermalinkParams::new(
7284 &cargo_vcs_info.git.sha1,
7285 &RepoPath::from_rel_path(
7286 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7287 ),
7288 Some(selection),
7289 ),
7290 );
7291 Ok(permalink)
7292}
7293
7294fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7295 let Some(blame) = blame else {
7296 return proto::BlameBufferResponse {
7297 blame_response: None,
7298 };
7299 };
7300
7301 let entries = blame
7302 .entries
7303 .into_iter()
7304 .map(|entry| proto::BlameEntry {
7305 sha: entry.sha.as_bytes().into(),
7306 start_line: entry.range.start,
7307 end_line: entry.range.end,
7308 original_line_number: entry.original_line_number,
7309 author: entry.author,
7310 author_mail: entry.author_mail,
7311 author_time: entry.author_time,
7312 author_tz: entry.author_tz,
7313 committer: entry.committer_name,
7314 committer_mail: entry.committer_email,
7315 committer_time: entry.committer_time,
7316 committer_tz: entry.committer_tz,
7317 summary: entry.summary,
7318 previous: entry.previous,
7319 filename: entry.filename,
7320 })
7321 .collect::<Vec<_>>();
7322
7323 let messages = blame
7324 .messages
7325 .into_iter()
7326 .map(|(oid, message)| proto::CommitMessage {
7327 oid: oid.as_bytes().into(),
7328 message,
7329 })
7330 .collect::<Vec<_>>();
7331
7332 proto::BlameBufferResponse {
7333 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7334 }
7335}
7336
7337fn deserialize_blame_buffer_response(
7338 response: proto::BlameBufferResponse,
7339) -> Option<git::blame::Blame> {
7340 let response = response.blame_response?;
7341 let entries = response
7342 .entries
7343 .into_iter()
7344 .filter_map(|entry| {
7345 Some(git::blame::BlameEntry {
7346 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7347 range: entry.start_line..entry.end_line,
7348 original_line_number: entry.original_line_number,
7349 committer_name: entry.committer,
7350 committer_time: entry.committer_time,
7351 committer_tz: entry.committer_tz,
7352 committer_email: entry.committer_mail,
7353 author: entry.author,
7354 author_mail: entry.author_mail,
7355 author_time: entry.author_time,
7356 author_tz: entry.author_tz,
7357 summary: entry.summary,
7358 previous: entry.previous,
7359 filename: entry.filename,
7360 })
7361 })
7362 .collect::<Vec<_>>();
7363
7364 let messages = response
7365 .messages
7366 .into_iter()
7367 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7368 .collect::<HashMap<_, _>>();
7369
7370 Some(Blame { entries, messages })
7371}
7372
7373fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7374 proto::Branch {
7375 is_head: branch.is_head,
7376 ref_name: branch.ref_name.to_string(),
7377 unix_timestamp: branch
7378 .most_recent_commit
7379 .as_ref()
7380 .map(|commit| commit.commit_timestamp as u64),
7381 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7382 ref_name: upstream.ref_name.to_string(),
7383 tracking: upstream
7384 .tracking
7385 .status()
7386 .map(|upstream| proto::UpstreamTracking {
7387 ahead: upstream.ahead as u64,
7388 behind: upstream.behind as u64,
7389 }),
7390 }),
7391 most_recent_commit: branch
7392 .most_recent_commit
7393 .as_ref()
7394 .map(|commit| proto::CommitSummary {
7395 sha: commit.sha.to_string(),
7396 subject: commit.subject.to_string(),
7397 commit_timestamp: commit.commit_timestamp,
7398 author_name: commit.author_name.to_string(),
7399 }),
7400 }
7401}
7402
7403fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7404 proto::Worktree {
7405 path: worktree.path.to_string_lossy().to_string(),
7406 ref_name: worktree
7407 .ref_name
7408 .as_ref()
7409 .map(|s| s.to_string())
7410 .unwrap_or_default(),
7411 sha: worktree.sha.to_string(),
7412 is_main: worktree.is_main,
7413 }
7414}
7415
7416fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7417 git::repository::Worktree {
7418 path: PathBuf::from(proto.path.clone()),
7419 ref_name: Some(SharedString::from(&proto.ref_name)),
7420 sha: proto.sha.clone().into(),
7421 is_main: proto.is_main,
7422 }
7423}
7424
7425fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7426 git::repository::Branch {
7427 is_head: proto.is_head,
7428 ref_name: proto.ref_name.clone().into(),
7429 upstream: proto
7430 .upstream
7431 .as_ref()
7432 .map(|upstream| git::repository::Upstream {
7433 ref_name: upstream.ref_name.to_string().into(),
7434 tracking: upstream
7435 .tracking
7436 .as_ref()
7437 .map(|tracking| {
7438 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7439 ahead: tracking.ahead as u32,
7440 behind: tracking.behind as u32,
7441 })
7442 })
7443 .unwrap_or(git::repository::UpstreamTracking::Gone),
7444 }),
7445 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7446 git::repository::CommitSummary {
7447 sha: commit.sha.to_string().into(),
7448 subject: commit.subject.to_string().into(),
7449 commit_timestamp: commit.commit_timestamp,
7450 author_name: commit.author_name.to_string().into(),
7451 has_parent: true,
7452 }
7453 }),
7454 }
7455}
7456
7457fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7458 proto::GitCommitDetails {
7459 sha: commit.sha.to_string(),
7460 message: commit.message.to_string(),
7461 commit_timestamp: commit.commit_timestamp,
7462 author_email: commit.author_email.to_string(),
7463 author_name: commit.author_name.to_string(),
7464 }
7465}
7466
7467fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7468 CommitDetails {
7469 sha: proto.sha.clone().into(),
7470 message: proto.message.clone().into(),
7471 commit_timestamp: proto.commit_timestamp,
7472 author_email: proto.author_email.clone().into(),
7473 author_name: proto.author_name.clone().into(),
7474 }
7475}
7476
7477/// This snapshot computes the repository state on the foreground thread while
7478/// running the git commands on the background thread. We update branch, head,
7479/// remotes, and worktrees first so the UI can react sooner, then compute file
7480/// state and emit those events immediately after.
7481async fn compute_snapshot(
7482 this: Entity<Repository>,
7483 backend: Arc<dyn GitRepository>,
7484 cx: &mut AsyncApp,
7485) -> Result<RepositorySnapshot> {
7486 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7487 this.paths_needing_status_update.clear();
7488 (
7489 this.id,
7490 this.work_directory_abs_path.clone(),
7491 this.snapshot.clone(),
7492 )
7493 });
7494
7495 let head_commit_future = {
7496 let backend = backend.clone();
7497 async move {
7498 Ok(match backend.head_sha().await {
7499 Some(head_sha) => backend.show(head_sha).await.log_err(),
7500 None => None,
7501 })
7502 }
7503 };
7504 let (branches, head_commit, all_worktrees) = cx
7505 .background_spawn({
7506 let backend = backend.clone();
7507 async move {
7508 futures::future::try_join3(
7509 backend.branches(),
7510 head_commit_future,
7511 backend.worktrees(),
7512 )
7513 .await
7514 }
7515 })
7516 .await?;
7517 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7518 let branch_list: Arc<[Branch]> = branches.into();
7519
7520 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7521 .into_iter()
7522 .filter(|wt| wt.path != *work_directory_abs_path)
7523 .collect();
7524
7525 let (remote_origin_url, remote_upstream_url) = cx
7526 .background_spawn({
7527 let backend = backend.clone();
7528 async move {
7529 Ok::<_, anyhow::Error>(
7530 futures::future::join(
7531 backend.remote_url("origin"),
7532 backend.remote_url("upstream"),
7533 )
7534 .await,
7535 )
7536 }
7537 })
7538 .await?;
7539
7540 let snapshot = this.update(cx, |this, cx| {
7541 let head_changed =
7542 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7543 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7544 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7545
7546 this.snapshot = RepositorySnapshot {
7547 id,
7548 work_directory_abs_path,
7549 branch,
7550 branch_list: branch_list.clone(),
7551 head_commit,
7552 remote_origin_url,
7553 remote_upstream_url,
7554 linked_worktrees,
7555 scan_id: prev_snapshot.scan_id + 1,
7556 ..prev_snapshot
7557 };
7558
7559 if head_changed {
7560 cx.emit(RepositoryEvent::HeadChanged);
7561 }
7562
7563 if branch_list_changed {
7564 cx.emit(RepositoryEvent::BranchListChanged);
7565 }
7566
7567 if worktrees_changed {
7568 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7569 }
7570
7571 this.snapshot.clone()
7572 });
7573
7574 let (statuses, diff_stats, stash_entries) = cx
7575 .background_spawn({
7576 let backend = backend.clone();
7577 let snapshot = snapshot.clone();
7578 async move {
7579 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7580 if snapshot.head_commit.is_some() {
7581 backend.diff_stat(&[])
7582 } else {
7583 future::ready(Ok(status::GitDiffStat {
7584 entries: Arc::default(),
7585 }))
7586 .boxed()
7587 };
7588 futures::future::try_join3(
7589 backend.status(&[RepoPath::from_rel_path(
7590 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7591 )]),
7592 diff_stat_future,
7593 backend.stash_entries(),
7594 )
7595 .await
7596 }
7597 })
7598 .await?;
7599
7600 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7601 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7602 let mut conflicted_paths = Vec::new();
7603 let statuses_by_path = SumTree::from_iter(
7604 statuses.entries.iter().map(|(repo_path, status)| {
7605 if status.is_conflicted() {
7606 conflicted_paths.push(repo_path.clone());
7607 }
7608 StatusEntry {
7609 repo_path: repo_path.clone(),
7610 status: *status,
7611 diff_stat: diff_stat_map.get(repo_path).copied(),
7612 }
7613 }),
7614 (),
7615 );
7616
7617 let merge_details = cx
7618 .background_spawn({
7619 let backend = backend.clone();
7620 let mut merge_details = snapshot.merge.clone();
7621 async move {
7622 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7623 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7624 }
7625 })
7626 .await?;
7627 let (merge_details, conflicts_changed) = merge_details;
7628 log::debug!("new merge details: {merge_details:?}");
7629
7630 Ok(this.update(cx, |this, cx| {
7631 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7632 cx.emit(RepositoryEvent::StatusesChanged);
7633 }
7634 if stash_entries != this.snapshot.stash_entries {
7635 cx.emit(RepositoryEvent::StashEntriesChanged);
7636 }
7637
7638 this.snapshot.scan_id += 1;
7639 this.snapshot.merge = merge_details;
7640 this.snapshot.statuses_by_path = statuses_by_path;
7641 this.snapshot.stash_entries = stash_entries;
7642
7643 this.snapshot.clone()
7644 }))
7645}
7646
7647fn status_from_proto(
7648 simple_status: i32,
7649 status: Option<proto::GitFileStatus>,
7650) -> anyhow::Result<FileStatus> {
7651 use proto::git_file_status::Variant;
7652
7653 let Some(variant) = status.and_then(|status| status.variant) else {
7654 let code = proto::GitStatus::from_i32(simple_status)
7655 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7656 let result = match code {
7657 proto::GitStatus::Added => TrackedStatus {
7658 worktree_status: StatusCode::Added,
7659 index_status: StatusCode::Unmodified,
7660 }
7661 .into(),
7662 proto::GitStatus::Modified => TrackedStatus {
7663 worktree_status: StatusCode::Modified,
7664 index_status: StatusCode::Unmodified,
7665 }
7666 .into(),
7667 proto::GitStatus::Conflict => UnmergedStatus {
7668 first_head: UnmergedStatusCode::Updated,
7669 second_head: UnmergedStatusCode::Updated,
7670 }
7671 .into(),
7672 proto::GitStatus::Deleted => TrackedStatus {
7673 worktree_status: StatusCode::Deleted,
7674 index_status: StatusCode::Unmodified,
7675 }
7676 .into(),
7677 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7678 };
7679 return Ok(result);
7680 };
7681
7682 let result = match variant {
7683 Variant::Untracked(_) => FileStatus::Untracked,
7684 Variant::Ignored(_) => FileStatus::Ignored,
7685 Variant::Unmerged(unmerged) => {
7686 let [first_head, second_head] =
7687 [unmerged.first_head, unmerged.second_head].map(|head| {
7688 let code = proto::GitStatus::from_i32(head)
7689 .with_context(|| format!("Invalid git status code: {head}"))?;
7690 let result = match code {
7691 proto::GitStatus::Added => UnmergedStatusCode::Added,
7692 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7693 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7694 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7695 };
7696 Ok(result)
7697 });
7698 let [first_head, second_head] = [first_head?, second_head?];
7699 UnmergedStatus {
7700 first_head,
7701 second_head,
7702 }
7703 .into()
7704 }
7705 Variant::Tracked(tracked) => {
7706 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7707 .map(|status| {
7708 let code = proto::GitStatus::from_i32(status)
7709 .with_context(|| format!("Invalid git status code: {status}"))?;
7710 let result = match code {
7711 proto::GitStatus::Modified => StatusCode::Modified,
7712 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7713 proto::GitStatus::Added => StatusCode::Added,
7714 proto::GitStatus::Deleted => StatusCode::Deleted,
7715 proto::GitStatus::Renamed => StatusCode::Renamed,
7716 proto::GitStatus::Copied => StatusCode::Copied,
7717 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7718 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7719 };
7720 Ok(result)
7721 });
7722 let [index_status, worktree_status] = [index_status?, worktree_status?];
7723 TrackedStatus {
7724 index_status,
7725 worktree_status,
7726 }
7727 .into()
7728 }
7729 };
7730 Ok(result)
7731}
7732
7733fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7734 use proto::git_file_status::{Tracked, Unmerged, Variant};
7735
7736 let variant = match status {
7737 FileStatus::Untracked => Variant::Untracked(Default::default()),
7738 FileStatus::Ignored => Variant::Ignored(Default::default()),
7739 FileStatus::Unmerged(UnmergedStatus {
7740 first_head,
7741 second_head,
7742 }) => Variant::Unmerged(Unmerged {
7743 first_head: unmerged_status_to_proto(first_head),
7744 second_head: unmerged_status_to_proto(second_head),
7745 }),
7746 FileStatus::Tracked(TrackedStatus {
7747 index_status,
7748 worktree_status,
7749 }) => Variant::Tracked(Tracked {
7750 index_status: tracked_status_to_proto(index_status),
7751 worktree_status: tracked_status_to_proto(worktree_status),
7752 }),
7753 };
7754 proto::GitFileStatus {
7755 variant: Some(variant),
7756 }
7757}
7758
7759fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7760 match code {
7761 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7762 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7763 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7764 }
7765}
7766
7767fn tracked_status_to_proto(code: StatusCode) -> i32 {
7768 match code {
7769 StatusCode::Added => proto::GitStatus::Added as _,
7770 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7771 StatusCode::Modified => proto::GitStatus::Modified as _,
7772 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7773 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7774 StatusCode::Copied => proto::GitStatus::Copied as _,
7775 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7776 }
7777}