1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
36 DiffType, FetchOptions, GitRepository, GitRepositoryCheckpoint, GraphCommitData,
37 InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RemoteCommandOutput,
38 RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332pub struct Repository {
333 this: WeakEntity<Self>,
334 snapshot: RepositorySnapshot,
335 commit_message_buffer: Option<Entity<Buffer>>,
336 git_store: WeakEntity<GitStore>,
337 // For a local repository, holds paths that have had worktree events since the last status scan completed,
338 // and that should be examined during the next status scan.
339 paths_needing_status_update: Vec<Vec<RepoPath>>,
340 job_sender: mpsc::UnboundedSender<GitJob>,
341 active_jobs: HashMap<JobId, JobInfo>,
342 pending_ops: SumTree<PendingOps>,
343 job_id: JobId,
344 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
345 latest_askpass_id: u64,
346 repository_state: Shared<Task<Result<RepositoryState, String>>>,
347 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
348 graph_commit_data_handler: GraphCommitHandlerState,
349 commit_data: HashMap<Oid, CommitDataState>,
350}
351
352impl std::ops::Deref for Repository {
353 type Target = RepositorySnapshot;
354
355 fn deref(&self) -> &Self::Target {
356 &self.snapshot
357 }
358}
359
360#[derive(Clone)]
361pub struct LocalRepositoryState {
362 pub fs: Arc<dyn Fs>,
363 pub backend: Arc<dyn GitRepository>,
364 pub environment: Arc<HashMap<String, String>>,
365}
366
367impl LocalRepositoryState {
368 async fn new(
369 work_directory_abs_path: Arc<Path>,
370 dot_git_abs_path: Arc<Path>,
371 project_environment: WeakEntity<ProjectEnvironment>,
372 fs: Arc<dyn Fs>,
373 is_trusted: bool,
374 cx: &mut AsyncApp,
375 ) -> anyhow::Result<Self> {
376 let environment = project_environment
377 .update(cx, |project_environment, cx| {
378 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
379 })?
380 .await
381 .unwrap_or_else(|| {
382 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
383 HashMap::default()
384 });
385 let search_paths = environment.get("PATH").map(|val| val.to_owned());
386 let backend = cx
387 .background_spawn({
388 let fs = fs.clone();
389 async move {
390 let system_git_binary_path = search_paths
391 .and_then(|search_paths| {
392 which::which_in("git", Some(search_paths), &work_directory_abs_path)
393 .ok()
394 })
395 .or_else(|| which::which("git").ok());
396 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
397 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
398 }
399 })
400 .await?;
401 backend.set_trusted(is_trusted);
402 Ok(LocalRepositoryState {
403 backend,
404 environment: Arc::new(environment),
405 fs,
406 })
407 }
408}
409
410#[derive(Clone)]
411pub struct RemoteRepositoryState {
412 pub project_id: ProjectId,
413 pub client: AnyProtoClient,
414}
415
416#[derive(Clone)]
417pub enum RepositoryState {
418 Local(LocalRepositoryState),
419 Remote(RemoteRepositoryState),
420}
421
422#[derive(Clone, Debug, PartialEq, Eq)]
423pub enum GitGraphEvent {
424 CountUpdated(usize),
425 FullyLoaded,
426 LoadingError,
427}
428
429#[derive(Clone, Debug, PartialEq, Eq)]
430pub enum RepositoryEvent {
431 StatusesChanged,
432 HeadChanged,
433 BranchListChanged,
434 StashEntriesChanged,
435 GitWorktreeListChanged,
436 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
437 GraphEvent((LogSource, LogOrder), GitGraphEvent),
438}
439
440#[derive(Clone, Debug)]
441pub struct JobsUpdated;
442
443#[derive(Debug)]
444pub enum GitStoreEvent {
445 ActiveRepositoryChanged(Option<RepositoryId>),
446 /// Bool is true when the repository that's updated is the active repository
447 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
448 RepositoryAdded,
449 RepositoryRemoved(RepositoryId),
450 IndexWriteError(anyhow::Error),
451 JobsUpdated,
452 ConflictsUpdated,
453}
454
455impl EventEmitter<RepositoryEvent> for Repository {}
456impl EventEmitter<JobsUpdated> for Repository {}
457impl EventEmitter<GitStoreEvent> for GitStore {}
458
459pub struct GitJob {
460 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
461 key: Option<GitJobKey>,
462}
463
464#[derive(PartialEq, Eq)]
465enum GitJobKey {
466 WriteIndex(Vec<RepoPath>),
467 ReloadBufferDiffBases,
468 RefreshStatuses,
469 ReloadGitState,
470}
471
472impl GitStore {
473 pub fn local(
474 worktree_store: &Entity<WorktreeStore>,
475 buffer_store: Entity<BufferStore>,
476 environment: Entity<ProjectEnvironment>,
477 fs: Arc<dyn Fs>,
478 cx: &mut Context<Self>,
479 ) -> Self {
480 Self::new(
481 worktree_store.clone(),
482 buffer_store,
483 GitStoreState::Local {
484 next_repository_id: Arc::new(AtomicU64::new(1)),
485 downstream: None,
486 project_environment: environment,
487 fs,
488 },
489 cx,
490 )
491 }
492
493 pub fn remote(
494 worktree_store: &Entity<WorktreeStore>,
495 buffer_store: Entity<BufferStore>,
496 upstream_client: AnyProtoClient,
497 project_id: u64,
498 cx: &mut Context<Self>,
499 ) -> Self {
500 Self::new(
501 worktree_store.clone(),
502 buffer_store,
503 GitStoreState::Remote {
504 upstream_client,
505 upstream_project_id: project_id,
506 downstream: None,
507 },
508 cx,
509 )
510 }
511
512 fn new(
513 worktree_store: Entity<WorktreeStore>,
514 buffer_store: Entity<BufferStore>,
515 state: GitStoreState,
516 cx: &mut Context<Self>,
517 ) -> Self {
518 let mut _subscriptions = vec![
519 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
520 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
521 ];
522
523 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
524 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
525 }
526
527 GitStore {
528 state,
529 buffer_store,
530 worktree_store,
531 repositories: HashMap::default(),
532 worktree_ids: HashMap::default(),
533 active_repo_id: None,
534 _subscriptions,
535 loading_diffs: HashMap::default(),
536 shared_diffs: HashMap::default(),
537 diffs: HashMap::default(),
538 }
539 }
540
541 pub fn init(client: &AnyProtoClient) {
542 client.add_entity_request_handler(Self::handle_get_remotes);
543 client.add_entity_request_handler(Self::handle_get_branches);
544 client.add_entity_request_handler(Self::handle_get_default_branch);
545 client.add_entity_request_handler(Self::handle_change_branch);
546 client.add_entity_request_handler(Self::handle_create_branch);
547 client.add_entity_request_handler(Self::handle_rename_branch);
548 client.add_entity_request_handler(Self::handle_create_remote);
549 client.add_entity_request_handler(Self::handle_remove_remote);
550 client.add_entity_request_handler(Self::handle_delete_branch);
551 client.add_entity_request_handler(Self::handle_git_init);
552 client.add_entity_request_handler(Self::handle_push);
553 client.add_entity_request_handler(Self::handle_pull);
554 client.add_entity_request_handler(Self::handle_fetch);
555 client.add_entity_request_handler(Self::handle_stage);
556 client.add_entity_request_handler(Self::handle_unstage);
557 client.add_entity_request_handler(Self::handle_stash);
558 client.add_entity_request_handler(Self::handle_stash_pop);
559 client.add_entity_request_handler(Self::handle_stash_apply);
560 client.add_entity_request_handler(Self::handle_stash_drop);
561 client.add_entity_request_handler(Self::handle_commit);
562 client.add_entity_request_handler(Self::handle_run_hook);
563 client.add_entity_request_handler(Self::handle_reset);
564 client.add_entity_request_handler(Self::handle_show);
565 client.add_entity_request_handler(Self::handle_create_checkpoint);
566 client.add_entity_request_handler(Self::handle_create_archive_checkpoint);
567 client.add_entity_request_handler(Self::handle_restore_checkpoint);
568 client.add_entity_request_handler(Self::handle_restore_archive_checkpoint);
569 client.add_entity_request_handler(Self::handle_compare_checkpoints);
570 client.add_entity_request_handler(Self::handle_diff_checkpoints);
571 client.add_entity_request_handler(Self::handle_load_commit_diff);
572 client.add_entity_request_handler(Self::handle_file_history);
573 client.add_entity_request_handler(Self::handle_checkout_files);
574 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
575 client.add_entity_request_handler(Self::handle_set_index_text);
576 client.add_entity_request_handler(Self::handle_askpass);
577 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
578 client.add_entity_request_handler(Self::handle_git_diff);
579 client.add_entity_request_handler(Self::handle_tree_diff);
580 client.add_entity_request_handler(Self::handle_get_blob_content);
581 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
582 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
583 client.add_entity_message_handler(Self::handle_update_diff_bases);
584 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
585 client.add_entity_request_handler(Self::handle_blame_buffer);
586 client.add_entity_message_handler(Self::handle_update_repository);
587 client.add_entity_message_handler(Self::handle_remove_repository);
588 client.add_entity_request_handler(Self::handle_git_clone);
589 client.add_entity_request_handler(Self::handle_get_worktrees);
590 client.add_entity_request_handler(Self::handle_create_worktree);
591 client.add_entity_request_handler(Self::handle_remove_worktree);
592 client.add_entity_request_handler(Self::handle_rename_worktree);
593 client.add_entity_request_handler(Self::handle_get_head_sha);
594 client.add_entity_request_handler(Self::handle_edit_ref);
595 client.add_entity_request_handler(Self::handle_repair_worktrees);
596 }
597
598 pub fn is_local(&self) -> bool {
599 matches!(self.state, GitStoreState::Local { .. })
600 }
601
602 fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context<Self>) {
603 if self.active_repo_id != Some(repo_id) {
604 self.active_repo_id = Some(repo_id);
605 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
606 }
607 }
608
609 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
610 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
611 self.set_active_repo_id(repo.read(cx).id, cx);
612 }
613 }
614
615 pub fn set_active_repo_for_worktree(
616 &mut self,
617 worktree_id: WorktreeId,
618 cx: &mut Context<Self>,
619 ) {
620 let Some(worktree) = self
621 .worktree_store
622 .read(cx)
623 .worktree_for_id(worktree_id, cx)
624 else {
625 return;
626 };
627 let worktree_abs_path = worktree.read(cx).abs_path();
628 let Some(repo_id) = self
629 .repositories
630 .values()
631 .filter(|repo| {
632 let repo_path = &repo.read(cx).work_directory_abs_path;
633 *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
634 })
635 .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
636 .map(|repo| repo.read(cx).id)
637 else {
638 return;
639 };
640
641 self.set_active_repo_id(repo_id, cx);
642 }
643
644 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
645 match &mut self.state {
646 GitStoreState::Remote {
647 downstream: downstream_client,
648 ..
649 } => {
650 for repo in self.repositories.values() {
651 let update = repo.read(cx).snapshot.initial_update(project_id);
652 for update in split_repository_update(update) {
653 client.send(update).log_err();
654 }
655 }
656 *downstream_client = Some((client, ProjectId(project_id)));
657 }
658 GitStoreState::Local {
659 downstream: downstream_client,
660 ..
661 } => {
662 let mut snapshots = HashMap::default();
663 let (updates_tx, mut updates_rx) = mpsc::unbounded();
664 for repo in self.repositories.values() {
665 updates_tx
666 .unbounded_send(DownstreamUpdate::UpdateRepository(
667 repo.read(cx).snapshot.clone(),
668 ))
669 .ok();
670 }
671 *downstream_client = Some(LocalDownstreamState {
672 client: client.clone(),
673 project_id: ProjectId(project_id),
674 updates_tx,
675 _task: cx.spawn(async move |this, cx| {
676 cx.background_spawn(async move {
677 while let Some(update) = updates_rx.next().await {
678 match update {
679 DownstreamUpdate::UpdateRepository(snapshot) => {
680 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
681 {
682 let update =
683 snapshot.build_update(old_snapshot, project_id);
684 *old_snapshot = snapshot;
685 for update in split_repository_update(update) {
686 client.send(update)?;
687 }
688 } else {
689 let update = snapshot.initial_update(project_id);
690 for update in split_repository_update(update) {
691 client.send(update)?;
692 }
693 snapshots.insert(snapshot.id, snapshot);
694 }
695 }
696 DownstreamUpdate::RemoveRepository(id) => {
697 client.send(proto::RemoveRepository {
698 project_id,
699 id: id.to_proto(),
700 })?;
701 }
702 }
703 }
704 anyhow::Ok(())
705 })
706 .await
707 .ok();
708 this.update(cx, |this, _| {
709 if let GitStoreState::Local {
710 downstream: downstream_client,
711 ..
712 } = &mut this.state
713 {
714 downstream_client.take();
715 } else {
716 unreachable!("unshared called on remote store");
717 }
718 })
719 }),
720 });
721 }
722 }
723 }
724
725 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
726 match &mut self.state {
727 GitStoreState::Local {
728 downstream: downstream_client,
729 ..
730 } => {
731 downstream_client.take();
732 }
733 GitStoreState::Remote {
734 downstream: downstream_client,
735 ..
736 } => {
737 downstream_client.take();
738 }
739 }
740 self.shared_diffs.clear();
741 }
742
743 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
744 self.shared_diffs.remove(peer_id);
745 }
746
747 pub fn active_repository(&self) -> Option<Entity<Repository>> {
748 self.active_repo_id
749 .as_ref()
750 .map(|id| self.repositories[id].clone())
751 }
752
753 pub fn open_unstaged_diff(
754 &mut self,
755 buffer: Entity<Buffer>,
756 cx: &mut Context<Self>,
757 ) -> Task<Result<Entity<BufferDiff>>> {
758 let buffer_id = buffer.read(cx).remote_id();
759 if let Some(diff_state) = self.diffs.get(&buffer_id)
760 && let Some(unstaged_diff) = diff_state
761 .read(cx)
762 .unstaged_diff
763 .as_ref()
764 .and_then(|weak| weak.upgrade())
765 {
766 if let Some(task) =
767 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
768 {
769 return cx.background_executor().spawn(async move {
770 task.await;
771 Ok(unstaged_diff)
772 });
773 }
774 return Task::ready(Ok(unstaged_diff));
775 }
776
777 let Some((repo, repo_path)) =
778 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
779 else {
780 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
781 };
782
783 let task = self
784 .loading_diffs
785 .entry((buffer_id, DiffKind::Unstaged))
786 .or_insert_with(|| {
787 let staged_text = repo.update(cx, |repo, cx| {
788 repo.load_staged_text(buffer_id, repo_path, cx)
789 });
790 cx.spawn(async move |this, cx| {
791 Self::open_diff_internal(
792 this,
793 DiffKind::Unstaged,
794 staged_text.await.map(DiffBasesChange::SetIndex),
795 buffer,
796 cx,
797 )
798 .await
799 .map_err(Arc::new)
800 })
801 .shared()
802 })
803 .clone();
804
805 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
806 }
807
808 pub fn open_diff_since(
809 &mut self,
810 oid: Option<git::Oid>,
811 buffer: Entity<Buffer>,
812 repo: Entity<Repository>,
813 cx: &mut Context<Self>,
814 ) -> Task<Result<Entity<BufferDiff>>> {
815 let buffer_id = buffer.read(cx).remote_id();
816
817 if let Some(diff_state) = self.diffs.get(&buffer_id)
818 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
819 {
820 if let Some(task) =
821 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
822 {
823 return cx.background_executor().spawn(async move {
824 task.await;
825 Ok(oid_diff)
826 });
827 }
828 return Task::ready(Ok(oid_diff));
829 }
830
831 let diff_kind = DiffKind::SinceOid(oid);
832 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
833 let task = task.clone();
834 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
835 }
836
837 let task = cx
838 .spawn(async move |this, cx| {
839 let result: Result<Entity<BufferDiff>> = async {
840 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
841 let language_registry =
842 buffer.update(cx, |buffer, _| buffer.language_registry());
843 let content: Option<Arc<str>> = match oid {
844 None => None,
845 Some(oid) => Some(
846 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
847 .await?
848 .into(),
849 ),
850 };
851 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
852
853 buffer_diff
854 .update(cx, |buffer_diff, cx| {
855 buffer_diff.language_changed(
856 buffer_snapshot.language().cloned(),
857 language_registry,
858 cx,
859 );
860 buffer_diff.set_base_text(
861 content.clone(),
862 buffer_snapshot.language().cloned(),
863 buffer_snapshot.text,
864 cx,
865 )
866 })
867 .await?;
868 let unstaged_diff = this
869 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
870 .await?;
871 buffer_diff.update(cx, |buffer_diff, _| {
872 buffer_diff.set_secondary_diff(unstaged_diff);
873 });
874
875 this.update(cx, |this, cx| {
876 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
877 .detach();
878
879 this.loading_diffs.remove(&(buffer_id, diff_kind));
880
881 let git_store = cx.weak_entity();
882 let diff_state = this
883 .diffs
884 .entry(buffer_id)
885 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
886
887 diff_state.update(cx, |state, _| {
888 if let Some(oid) = oid {
889 if let Some(content) = content {
890 state.oid_texts.insert(oid, content);
891 }
892 }
893 state.oid_diffs.insert(oid, buffer_diff.downgrade());
894 });
895 })?;
896
897 Ok(buffer_diff)
898 }
899 .await;
900 result.map_err(Arc::new)
901 })
902 .shared();
903
904 self.loading_diffs
905 .insert((buffer_id, diff_kind), task.clone());
906 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
907 }
908
909 #[ztracing::instrument(skip_all)]
910 pub fn open_uncommitted_diff(
911 &mut self,
912 buffer: Entity<Buffer>,
913 cx: &mut Context<Self>,
914 ) -> Task<Result<Entity<BufferDiff>>> {
915 let buffer_id = buffer.read(cx).remote_id();
916
917 if let Some(diff_state) = self.diffs.get(&buffer_id)
918 && let Some(uncommitted_diff) = diff_state
919 .read(cx)
920 .uncommitted_diff
921 .as_ref()
922 .and_then(|weak| weak.upgrade())
923 {
924 if let Some(task) =
925 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
926 {
927 return cx.background_executor().spawn(async move {
928 task.await;
929 Ok(uncommitted_diff)
930 });
931 }
932 return Task::ready(Ok(uncommitted_diff));
933 }
934
935 let Some((repo, repo_path)) =
936 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
937 else {
938 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
939 };
940
941 let task = self
942 .loading_diffs
943 .entry((buffer_id, DiffKind::Uncommitted))
944 .or_insert_with(|| {
945 let changes = repo.update(cx, |repo, cx| {
946 repo.load_committed_text(buffer_id, repo_path, cx)
947 });
948
949 // todo(lw): hot foreground spawn
950 cx.spawn(async move |this, cx| {
951 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
952 .await
953 .map_err(Arc::new)
954 })
955 .shared()
956 })
957 .clone();
958
959 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
960 }
961
962 #[ztracing::instrument(skip_all)]
963 async fn open_diff_internal(
964 this: WeakEntity<Self>,
965 kind: DiffKind,
966 texts: Result<DiffBasesChange>,
967 buffer_entity: Entity<Buffer>,
968 cx: &mut AsyncApp,
969 ) -> Result<Entity<BufferDiff>> {
970 let diff_bases_change = match texts {
971 Err(e) => {
972 this.update(cx, |this, cx| {
973 let buffer = buffer_entity.read(cx);
974 let buffer_id = buffer.remote_id();
975 this.loading_diffs.remove(&(buffer_id, kind));
976 })?;
977 return Err(e);
978 }
979 Ok(change) => change,
980 };
981
982 this.update(cx, |this, cx| {
983 let buffer = buffer_entity.read(cx);
984 let buffer_id = buffer.remote_id();
985 let language = buffer.language().cloned();
986 let language_registry = buffer.language_registry();
987 let text_snapshot = buffer.text_snapshot();
988 this.loading_diffs.remove(&(buffer_id, kind));
989
990 let git_store = cx.weak_entity();
991 let diff_state = this
992 .diffs
993 .entry(buffer_id)
994 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
995
996 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
997
998 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
999 diff_state.update(cx, |diff_state, cx| {
1000 diff_state.language_changed = true;
1001 diff_state.language = language;
1002 diff_state.language_registry = language_registry;
1003
1004 match kind {
1005 DiffKind::Unstaged => {
1006 diff_state.unstaged_diff.get_or_insert(diff.downgrade());
1007 }
1008 DiffKind::Uncommitted => {
1009 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1010 diff
1011 } else {
1012 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1013 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1014 unstaged_diff
1015 };
1016
1017 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1018 diff_state.uncommitted_diff = Some(diff.downgrade())
1019 }
1020 DiffKind::SinceOid(_) => {
1021 unreachable!("open_diff_internal is not used for OID diffs")
1022 }
1023 }
1024
1025 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
1026 let rx = diff_state.wait_for_recalculation();
1027
1028 anyhow::Ok(async move {
1029 if let Some(rx) = rx {
1030 rx.await;
1031 }
1032 Ok(diff)
1033 })
1034 })
1035 })??
1036 .await
1037 }
1038
1039 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1040 let diff_state = self.diffs.get(&buffer_id)?;
1041 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1042 }
1043
1044 pub fn get_uncommitted_diff(
1045 &self,
1046 buffer_id: BufferId,
1047 cx: &App,
1048 ) -> Option<Entity<BufferDiff>> {
1049 let diff_state = self.diffs.get(&buffer_id)?;
1050 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1051 }
1052
1053 pub fn get_diff_since_oid(
1054 &self,
1055 buffer_id: BufferId,
1056 oid: Option<git::Oid>,
1057 cx: &App,
1058 ) -> Option<Entity<BufferDiff>> {
1059 let diff_state = self.diffs.get(&buffer_id)?;
1060 diff_state.read(cx).oid_diff(oid)
1061 }
1062
1063 pub fn open_conflict_set(
1064 &mut self,
1065 buffer: Entity<Buffer>,
1066 cx: &mut Context<Self>,
1067 ) -> Entity<ConflictSet> {
1068 log::debug!("open conflict set");
1069 let buffer_id = buffer.read(cx).remote_id();
1070
1071 if let Some(git_state) = self.diffs.get(&buffer_id)
1072 && let Some(conflict_set) = git_state
1073 .read(cx)
1074 .conflict_set
1075 .as_ref()
1076 .and_then(|weak| weak.upgrade())
1077 {
1078 let conflict_set = conflict_set;
1079 let buffer_snapshot = buffer.read(cx).text_snapshot();
1080
1081 git_state.update(cx, |state, cx| {
1082 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1083 });
1084
1085 return conflict_set;
1086 }
1087
1088 let is_unmerged = self
1089 .repository_and_path_for_buffer_id(buffer_id, cx)
1090 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1091 let git_store = cx.weak_entity();
1092 let buffer_git_state = self
1093 .diffs
1094 .entry(buffer_id)
1095 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1096 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1097
1098 self._subscriptions
1099 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1100 cx.emit(GitStoreEvent::ConflictsUpdated);
1101 }));
1102
1103 buffer_git_state.update(cx, |state, cx| {
1104 state.conflict_set = Some(conflict_set.downgrade());
1105 let buffer_snapshot = buffer.read(cx).text_snapshot();
1106 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1107 });
1108
1109 conflict_set
1110 }
1111
1112 pub fn project_path_git_status(
1113 &self,
1114 project_path: &ProjectPath,
1115 cx: &App,
1116 ) -> Option<FileStatus> {
1117 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1118 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1119 }
1120
1121 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1122 let mut work_directory_abs_paths = Vec::new();
1123 let mut checkpoints = Vec::new();
1124 for repository in self.repositories.values() {
1125 repository.update(cx, |repository, _| {
1126 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1127 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1128 });
1129 }
1130
1131 cx.background_executor().spawn(async move {
1132 let checkpoints = future::try_join_all(checkpoints).await?;
1133 Ok(GitStoreCheckpoint {
1134 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1135 .into_iter()
1136 .zip(checkpoints)
1137 .collect(),
1138 })
1139 })
1140 }
1141
1142 pub fn restore_checkpoint(
1143 &self,
1144 checkpoint: GitStoreCheckpoint,
1145 cx: &mut App,
1146 ) -> Task<Result<()>> {
1147 let repositories_by_work_dir_abs_path = self
1148 .repositories
1149 .values()
1150 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1151 .collect::<HashMap<_, _>>();
1152
1153 let mut tasks = Vec::new();
1154 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1155 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1156 let restore = repository.update(cx, |repository, _| {
1157 repository.restore_checkpoint(checkpoint)
1158 });
1159 tasks.push(async move { restore.await? });
1160 }
1161 }
1162 cx.background_spawn(async move {
1163 future::try_join_all(tasks).await?;
1164 Ok(())
1165 })
1166 }
1167
1168 /// Compares two checkpoints, returning true if they are equal.
1169 pub fn compare_checkpoints(
1170 &self,
1171 left: GitStoreCheckpoint,
1172 mut right: GitStoreCheckpoint,
1173 cx: &mut App,
1174 ) -> Task<Result<bool>> {
1175 let repositories_by_work_dir_abs_path = self
1176 .repositories
1177 .values()
1178 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1179 .collect::<HashMap<_, _>>();
1180
1181 let mut tasks = Vec::new();
1182 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1183 if let Some(right_checkpoint) = right
1184 .checkpoints_by_work_dir_abs_path
1185 .remove(&work_dir_abs_path)
1186 {
1187 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1188 {
1189 let compare = repository.update(cx, |repository, _| {
1190 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1191 });
1192
1193 tasks.push(async move { compare.await? });
1194 }
1195 } else {
1196 return Task::ready(Ok(false));
1197 }
1198 }
1199 cx.background_spawn(async move {
1200 Ok(future::try_join_all(tasks)
1201 .await?
1202 .into_iter()
1203 .all(|result| result))
1204 })
1205 }
1206
1207 /// Blames a buffer.
1208 pub fn blame_buffer(
1209 &self,
1210 buffer: &Entity<Buffer>,
1211 version: Option<clock::Global>,
1212 cx: &mut Context<Self>,
1213 ) -> Task<Result<Option<Blame>>> {
1214 let buffer = buffer.read(cx);
1215 let Some((repo, repo_path)) =
1216 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1217 else {
1218 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1219 };
1220 let content = match &version {
1221 Some(version) => buffer.rope_for_version(version),
1222 None => buffer.as_rope().clone(),
1223 };
1224 let line_ending = buffer.line_ending();
1225 let version = version.unwrap_or(buffer.version());
1226 let buffer_id = buffer.remote_id();
1227
1228 let repo = repo.downgrade();
1229 cx.spawn(async move |_, cx| {
1230 let repository_state = repo
1231 .update(cx, |repo, _| repo.repository_state.clone())?
1232 .await
1233 .map_err(|err| anyhow::anyhow!(err))?;
1234 match repository_state {
1235 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1236 .blame(repo_path.clone(), content, line_ending)
1237 .await
1238 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1239 .map(Some),
1240 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1241 let response = client
1242 .request(proto::BlameBuffer {
1243 project_id: project_id.to_proto(),
1244 buffer_id: buffer_id.into(),
1245 version: serialize_version(&version),
1246 })
1247 .await?;
1248 Ok(deserialize_blame_buffer_response(response))
1249 }
1250 }
1251 })
1252 }
1253
1254 pub fn file_history(
1255 &self,
1256 repo: &Entity<Repository>,
1257 path: RepoPath,
1258 cx: &mut App,
1259 ) -> Task<Result<git::repository::FileHistory>> {
1260 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1261
1262 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1263 }
1264
1265 pub fn file_history_paginated(
1266 &self,
1267 repo: &Entity<Repository>,
1268 path: RepoPath,
1269 skip: usize,
1270 limit: Option<usize>,
1271 cx: &mut App,
1272 ) -> Task<Result<git::repository::FileHistory>> {
1273 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1274
1275 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1276 }
1277
1278 pub fn get_permalink_to_line(
1279 &self,
1280 buffer: &Entity<Buffer>,
1281 selection: Range<u32>,
1282 cx: &mut App,
1283 ) -> Task<Result<url::Url>> {
1284 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1285 return Task::ready(Err(anyhow!("buffer has no file")));
1286 };
1287
1288 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1289 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1290 cx,
1291 ) else {
1292 // If we're not in a Git repo, check whether this is a Rust source
1293 // file in the Cargo registry (presumably opened with go-to-definition
1294 // from a normal Rust file). If so, we can put together a permalink
1295 // using crate metadata.
1296 if buffer
1297 .read(cx)
1298 .language()
1299 .is_none_or(|lang| lang.name() != "Rust")
1300 {
1301 return Task::ready(Err(anyhow!("no permalink available")));
1302 }
1303 let file_path = file.worktree.read(cx).absolutize(&file.path);
1304 return cx.spawn(async move |cx| {
1305 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1306 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1307 .context("no permalink available")
1308 });
1309 };
1310
1311 let buffer_id = buffer.read(cx).remote_id();
1312 let branch = repo.read(cx).branch.clone();
1313 let remote = branch
1314 .as_ref()
1315 .and_then(|b| b.upstream.as_ref())
1316 .and_then(|b| b.remote_name())
1317 .unwrap_or("origin")
1318 .to_string();
1319
1320 let rx = repo.update(cx, |repo, _| {
1321 repo.send_job(None, move |state, cx| async move {
1322 match state {
1323 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1324 let origin_url = backend
1325 .remote_url(&remote)
1326 .await
1327 .with_context(|| format!("remote \"{remote}\" not found"))?;
1328
1329 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1330
1331 let provider_registry =
1332 cx.update(GitHostingProviderRegistry::default_global);
1333
1334 let (provider, remote) =
1335 parse_git_remote_url(provider_registry, &origin_url)
1336 .context("parsing Git remote URL")?;
1337
1338 Ok(provider.build_permalink(
1339 remote,
1340 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1341 ))
1342 }
1343 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1344 let response = client
1345 .request(proto::GetPermalinkToLine {
1346 project_id: project_id.to_proto(),
1347 buffer_id: buffer_id.into(),
1348 selection: Some(proto::Range {
1349 start: selection.start as u64,
1350 end: selection.end as u64,
1351 }),
1352 })
1353 .await?;
1354
1355 url::Url::parse(&response.permalink).context("failed to parse permalink")
1356 }
1357 }
1358 })
1359 });
1360 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1361 }
1362
1363 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1364 match &self.state {
1365 GitStoreState::Local {
1366 downstream: downstream_client,
1367 ..
1368 } => downstream_client
1369 .as_ref()
1370 .map(|state| (state.client.clone(), state.project_id)),
1371 GitStoreState::Remote {
1372 downstream: downstream_client,
1373 ..
1374 } => downstream_client.clone(),
1375 }
1376 }
1377
1378 fn upstream_client(&self) -> Option<AnyProtoClient> {
1379 match &self.state {
1380 GitStoreState::Local { .. } => None,
1381 GitStoreState::Remote {
1382 upstream_client, ..
1383 } => Some(upstream_client.clone()),
1384 }
1385 }
1386
1387 fn on_worktree_store_event(
1388 &mut self,
1389 worktree_store: Entity<WorktreeStore>,
1390 event: &WorktreeStoreEvent,
1391 cx: &mut Context<Self>,
1392 ) {
1393 let GitStoreState::Local {
1394 project_environment,
1395 downstream,
1396 next_repository_id,
1397 fs,
1398 } = &self.state
1399 else {
1400 return;
1401 };
1402
1403 match event {
1404 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1405 if let Some(worktree) = self
1406 .worktree_store
1407 .read(cx)
1408 .worktree_for_id(*worktree_id, cx)
1409 {
1410 let paths_by_git_repo =
1411 self.process_updated_entries(&worktree, updated_entries, cx);
1412 let downstream = downstream
1413 .as_ref()
1414 .map(|downstream| downstream.updates_tx.clone());
1415 cx.spawn(async move |_, cx| {
1416 let paths_by_git_repo = paths_by_git_repo.await;
1417 for (repo, paths) in paths_by_git_repo {
1418 repo.update(cx, |repo, cx| {
1419 repo.paths_changed(paths, downstream.clone(), cx);
1420 });
1421 }
1422 })
1423 .detach();
1424 }
1425 }
1426 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1427 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1428 else {
1429 return;
1430 };
1431 if !worktree.read(cx).is_visible() {
1432 log::debug!(
1433 "not adding repositories for local worktree {:?} because it's not visible",
1434 worktree.read(cx).abs_path()
1435 );
1436 return;
1437 }
1438 self.update_repositories_from_worktree(
1439 *worktree_id,
1440 project_environment.clone(),
1441 next_repository_id.clone(),
1442 downstream
1443 .as_ref()
1444 .map(|downstream| downstream.updates_tx.clone()),
1445 changed_repos.clone(),
1446 fs.clone(),
1447 cx,
1448 );
1449 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1450 }
1451 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1452 let repos_without_worktree: Vec<RepositoryId> = self
1453 .worktree_ids
1454 .iter_mut()
1455 .filter_map(|(repo_id, worktree_ids)| {
1456 worktree_ids.remove(worktree_id);
1457 if worktree_ids.is_empty() {
1458 Some(*repo_id)
1459 } else {
1460 None
1461 }
1462 })
1463 .collect();
1464 let is_active_repo_removed = repos_without_worktree
1465 .iter()
1466 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1467
1468 for repo_id in repos_without_worktree {
1469 self.repositories.remove(&repo_id);
1470 self.worktree_ids.remove(&repo_id);
1471 if let Some(updates_tx) =
1472 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1473 {
1474 updates_tx
1475 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1476 .ok();
1477 }
1478 }
1479
1480 if is_active_repo_removed {
1481 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1482 self.active_repo_id = Some(repo_id);
1483 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1484 } else {
1485 self.active_repo_id = None;
1486 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1487 }
1488 }
1489 }
1490 _ => {}
1491 }
1492 }
1493 fn on_repository_event(
1494 &mut self,
1495 repo: Entity<Repository>,
1496 event: &RepositoryEvent,
1497 cx: &mut Context<Self>,
1498 ) {
1499 let id = repo.read(cx).id;
1500 let repo_snapshot = repo.read(cx).snapshot.clone();
1501 for (buffer_id, diff) in self.diffs.iter() {
1502 if let Some((buffer_repo, repo_path)) =
1503 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1504 && buffer_repo == repo
1505 {
1506 diff.update(cx, |diff, cx| {
1507 if let Some(conflict_set) = &diff.conflict_set {
1508 let conflict_status_changed =
1509 conflict_set.update(cx, |conflict_set, cx| {
1510 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1511 conflict_set.set_has_conflict(has_conflict, cx)
1512 })?;
1513 if conflict_status_changed {
1514 let buffer_store = self.buffer_store.read(cx);
1515 if let Some(buffer) = buffer_store.get(*buffer_id) {
1516 let _ = diff
1517 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1518 }
1519 }
1520 }
1521 anyhow::Ok(())
1522 })
1523 .ok();
1524 }
1525 }
1526 cx.emit(GitStoreEvent::RepositoryUpdated(
1527 id,
1528 event.clone(),
1529 self.active_repo_id == Some(id),
1530 ))
1531 }
1532
1533 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1534 cx.emit(GitStoreEvent::JobsUpdated)
1535 }
1536
1537 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1538 fn update_repositories_from_worktree(
1539 &mut self,
1540 worktree_id: WorktreeId,
1541 project_environment: Entity<ProjectEnvironment>,
1542 next_repository_id: Arc<AtomicU64>,
1543 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1544 updated_git_repositories: UpdatedGitRepositoriesSet,
1545 fs: Arc<dyn Fs>,
1546 cx: &mut Context<Self>,
1547 ) {
1548 let mut removed_ids = Vec::new();
1549 for update in updated_git_repositories.iter() {
1550 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1551 let existing_work_directory_abs_path =
1552 repo.read(cx).work_directory_abs_path.clone();
1553 Some(&existing_work_directory_abs_path)
1554 == update.old_work_directory_abs_path.as_ref()
1555 || Some(&existing_work_directory_abs_path)
1556 == update.new_work_directory_abs_path.as_ref()
1557 }) {
1558 let repo_id = *id;
1559 if let Some(new_work_directory_abs_path) =
1560 update.new_work_directory_abs_path.clone()
1561 {
1562 self.worktree_ids
1563 .entry(repo_id)
1564 .or_insert_with(HashSet::new)
1565 .insert(worktree_id);
1566 existing.update(cx, |existing, cx| {
1567 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1568 existing.schedule_scan(updates_tx.clone(), cx);
1569 });
1570 } else {
1571 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1572 worktree_ids.remove(&worktree_id);
1573 if worktree_ids.is_empty() {
1574 removed_ids.push(repo_id);
1575 }
1576 }
1577 }
1578 } else if let UpdatedGitRepository {
1579 new_work_directory_abs_path: Some(work_directory_abs_path),
1580 dot_git_abs_path: Some(dot_git_abs_path),
1581 repository_dir_abs_path: Some(repository_dir_abs_path),
1582 common_dir_abs_path: Some(common_dir_abs_path),
1583 ..
1584 } = update
1585 {
1586 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1587 work_directory_abs_path,
1588 common_dir_abs_path,
1589 repository_dir_abs_path,
1590 )
1591 .into();
1592 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1593 let is_trusted = TrustedWorktrees::try_get_global(cx)
1594 .map(|trusted_worktrees| {
1595 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1596 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1597 })
1598 })
1599 .unwrap_or(false);
1600 let git_store = cx.weak_entity();
1601 let repo = cx.new(|cx| {
1602 let mut repo = Repository::local(
1603 id,
1604 work_directory_abs_path.clone(),
1605 original_repo_abs_path.clone(),
1606 dot_git_abs_path.clone(),
1607 project_environment.downgrade(),
1608 fs.clone(),
1609 is_trusted,
1610 git_store,
1611 cx,
1612 );
1613 if let Some(updates_tx) = updates_tx.as_ref() {
1614 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1615 updates_tx
1616 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1617 .ok();
1618 }
1619 repo.schedule_scan(updates_tx.clone(), cx);
1620 repo
1621 });
1622 self._subscriptions
1623 .push(cx.subscribe(&repo, Self::on_repository_event));
1624 self._subscriptions
1625 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1626 self.repositories.insert(id, repo);
1627 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1628 cx.emit(GitStoreEvent::RepositoryAdded);
1629 self.active_repo_id.get_or_insert_with(|| {
1630 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1631 id
1632 });
1633 }
1634 }
1635
1636 for id in removed_ids {
1637 if self.active_repo_id == Some(id) {
1638 self.active_repo_id = None;
1639 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1640 }
1641 self.repositories.remove(&id);
1642 if let Some(updates_tx) = updates_tx.as_ref() {
1643 updates_tx
1644 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1645 .ok();
1646 }
1647 }
1648 }
1649
1650 fn on_trusted_worktrees_event(
1651 &mut self,
1652 _: Entity<TrustedWorktreesStore>,
1653 event: &TrustedWorktreesEvent,
1654 cx: &mut Context<Self>,
1655 ) {
1656 if !matches!(self.state, GitStoreState::Local { .. }) {
1657 return;
1658 }
1659
1660 let (is_trusted, event_paths) = match event {
1661 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1662 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1663 };
1664
1665 for (repo_id, worktree_ids) in &self.worktree_ids {
1666 if worktree_ids
1667 .iter()
1668 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1669 {
1670 if let Some(repo) = self.repositories.get(repo_id) {
1671 let repository_state = repo.read(cx).repository_state.clone();
1672 cx.background_spawn(async move {
1673 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1674 state.backend.set_trusted(is_trusted);
1675 }
1676 })
1677 .detach();
1678 }
1679 }
1680 }
1681 }
1682
1683 fn on_buffer_store_event(
1684 &mut self,
1685 _: Entity<BufferStore>,
1686 event: &BufferStoreEvent,
1687 cx: &mut Context<Self>,
1688 ) {
1689 match event {
1690 BufferStoreEvent::BufferAdded(buffer) => {
1691 cx.subscribe(buffer, |this, buffer, event, cx| {
1692 if let BufferEvent::LanguageChanged(_) = event {
1693 let buffer_id = buffer.read(cx).remote_id();
1694 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1695 diff_state.update(cx, |diff_state, cx| {
1696 diff_state.buffer_language_changed(buffer, cx);
1697 });
1698 }
1699 }
1700 })
1701 .detach();
1702 }
1703 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1704 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1705 diffs.remove(buffer_id);
1706 }
1707 }
1708 BufferStoreEvent::BufferDropped(buffer_id) => {
1709 self.diffs.remove(buffer_id);
1710 for diffs in self.shared_diffs.values_mut() {
1711 diffs.remove(buffer_id);
1712 }
1713 }
1714 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1715 // Whenever a buffer's file path changes, it's possible that the
1716 // new path is actually a path that is being tracked by a git
1717 // repository. In that case, we'll want to update the buffer's
1718 // `BufferDiffState`, in case it already has one.
1719 let buffer_id = buffer.read(cx).remote_id();
1720 let diff_state = self.diffs.get(&buffer_id);
1721 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1722
1723 if let Some(diff_state) = diff_state
1724 && let Some((repo, repo_path)) = repo
1725 {
1726 let buffer = buffer.clone();
1727 let diff_state = diff_state.clone();
1728
1729 cx.spawn(async move |_git_store, cx| {
1730 async {
1731 let diff_bases_change = repo
1732 .update(cx, |repo, cx| {
1733 repo.load_committed_text(buffer_id, repo_path, cx)
1734 })
1735 .await?;
1736
1737 diff_state.update(cx, |diff_state, cx| {
1738 let buffer_snapshot = buffer.read(cx).text_snapshot();
1739 diff_state.diff_bases_changed(
1740 buffer_snapshot,
1741 Some(diff_bases_change),
1742 cx,
1743 );
1744 });
1745 anyhow::Ok(())
1746 }
1747 .await
1748 .log_err();
1749 })
1750 .detach();
1751 }
1752 }
1753 }
1754 }
1755
1756 pub fn recalculate_buffer_diffs(
1757 &mut self,
1758 buffers: Vec<Entity<Buffer>>,
1759 cx: &mut Context<Self>,
1760 ) -> impl Future<Output = ()> + use<> {
1761 let mut futures = Vec::new();
1762 for buffer in buffers {
1763 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1764 let buffer = buffer.read(cx).text_snapshot();
1765 diff_state.update(cx, |diff_state, cx| {
1766 diff_state.recalculate_diffs(buffer.clone(), cx);
1767 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1768 });
1769 futures.push(diff_state.update(cx, |diff_state, cx| {
1770 diff_state
1771 .reparse_conflict_markers(buffer, cx)
1772 .map(|_| {})
1773 .boxed()
1774 }));
1775 }
1776 }
1777 async move {
1778 futures::future::join_all(futures).await;
1779 }
1780 }
1781
1782 fn on_buffer_diff_event(
1783 &mut self,
1784 diff: Entity<buffer_diff::BufferDiff>,
1785 event: &BufferDiffEvent,
1786 cx: &mut Context<Self>,
1787 ) {
1788 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1789 let buffer_id = diff.read(cx).buffer_id;
1790 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1791 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1792 diff_state.hunk_staging_operation_count += 1;
1793 diff_state.hunk_staging_operation_count
1794 });
1795 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1796 let recv = repo.update(cx, |repo, cx| {
1797 log::debug!("hunks changed for {}", path.as_unix_str());
1798 repo.spawn_set_index_text_job(
1799 path,
1800 new_index_text.as_ref().map(|rope| rope.to_string()),
1801 Some(hunk_staging_operation_count),
1802 cx,
1803 )
1804 });
1805 let diff = diff.downgrade();
1806 cx.spawn(async move |this, cx| {
1807 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1808 diff.update(cx, |diff, cx| {
1809 diff.clear_pending_hunks(cx);
1810 })
1811 .ok();
1812 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1813 .ok();
1814 }
1815 })
1816 .detach();
1817 }
1818 }
1819 }
1820 }
1821
1822 fn local_worktree_git_repos_changed(
1823 &mut self,
1824 worktree: Entity<Worktree>,
1825 changed_repos: &UpdatedGitRepositoriesSet,
1826 cx: &mut Context<Self>,
1827 ) {
1828 log::debug!("local worktree repos changed");
1829 debug_assert!(worktree.read(cx).is_local());
1830
1831 for repository in self.repositories.values() {
1832 repository.update(cx, |repository, cx| {
1833 let repo_abs_path = &repository.work_directory_abs_path;
1834 if changed_repos.iter().any(|update| {
1835 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1836 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1837 }) {
1838 repository.reload_buffer_diff_bases(cx);
1839 }
1840 });
1841 }
1842 }
1843
1844 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1845 &self.repositories
1846 }
1847
1848 /// Returns the original (main) repository working directory for the given worktree.
1849 /// For normal checkouts this equals the worktree's own path; for linked
1850 /// worktrees it points back to the original repo.
1851 pub fn original_repo_path_for_worktree(
1852 &self,
1853 worktree_id: WorktreeId,
1854 cx: &App,
1855 ) -> Option<Arc<Path>> {
1856 self.active_repo_id
1857 .iter()
1858 .chain(self.worktree_ids.keys())
1859 .find(|repo_id| {
1860 self.worktree_ids
1861 .get(repo_id)
1862 .is_some_and(|ids| ids.contains(&worktree_id))
1863 })
1864 .and_then(|repo_id| self.repositories.get(repo_id))
1865 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1866 }
1867
1868 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1869 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1870 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1871 Some(status.status)
1872 }
1873
1874 pub fn repository_and_path_for_buffer_id(
1875 &self,
1876 buffer_id: BufferId,
1877 cx: &App,
1878 ) -> Option<(Entity<Repository>, RepoPath)> {
1879 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1880 let project_path = buffer.read(cx).project_path(cx)?;
1881 self.repository_and_path_for_project_path(&project_path, cx)
1882 }
1883
1884 pub fn repository_and_path_for_project_path(
1885 &self,
1886 path: &ProjectPath,
1887 cx: &App,
1888 ) -> Option<(Entity<Repository>, RepoPath)> {
1889 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1890 self.repositories
1891 .values()
1892 .filter_map(|repo| {
1893 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1894 Some((repo.clone(), repo_path))
1895 })
1896 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1897 }
1898
1899 pub fn git_init(
1900 &self,
1901 path: Arc<Path>,
1902 fallback_branch_name: String,
1903 cx: &App,
1904 ) -> Task<Result<()>> {
1905 match &self.state {
1906 GitStoreState::Local { fs, .. } => {
1907 let fs = fs.clone();
1908 cx.background_executor()
1909 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1910 }
1911 GitStoreState::Remote {
1912 upstream_client,
1913 upstream_project_id: project_id,
1914 ..
1915 } => {
1916 let client = upstream_client.clone();
1917 let project_id = *project_id;
1918 cx.background_executor().spawn(async move {
1919 client
1920 .request(proto::GitInit {
1921 project_id: project_id,
1922 abs_path: path.to_string_lossy().into_owned(),
1923 fallback_branch_name,
1924 })
1925 .await?;
1926 Ok(())
1927 })
1928 }
1929 }
1930 }
1931
1932 pub fn git_clone(
1933 &self,
1934 repo: String,
1935 path: impl Into<Arc<std::path::Path>>,
1936 cx: &App,
1937 ) -> Task<Result<()>> {
1938 let path = path.into();
1939 match &self.state {
1940 GitStoreState::Local { fs, .. } => {
1941 let fs = fs.clone();
1942 cx.background_executor()
1943 .spawn(async move { fs.git_clone(&repo, &path).await })
1944 }
1945 GitStoreState::Remote {
1946 upstream_client,
1947 upstream_project_id,
1948 ..
1949 } => {
1950 if upstream_client.is_via_collab() {
1951 return Task::ready(Err(anyhow!(
1952 "Git Clone isn't supported for project guests"
1953 )));
1954 }
1955 let request = upstream_client.request(proto::GitClone {
1956 project_id: *upstream_project_id,
1957 abs_path: path.to_string_lossy().into_owned(),
1958 remote_repo: repo,
1959 });
1960
1961 cx.background_spawn(async move {
1962 let result = request.await?;
1963
1964 match result.success {
1965 true => Ok(()),
1966 false => Err(anyhow!("Git Clone failed")),
1967 }
1968 })
1969 }
1970 }
1971 }
1972
1973 async fn handle_update_repository(
1974 this: Entity<Self>,
1975 envelope: TypedEnvelope<proto::UpdateRepository>,
1976 mut cx: AsyncApp,
1977 ) -> Result<()> {
1978 this.update(&mut cx, |this, cx| {
1979 let path_style = this.worktree_store.read(cx).path_style();
1980 let mut update = envelope.payload;
1981
1982 let id = RepositoryId::from_proto(update.id);
1983 let client = this.upstream_client().context("no upstream client")?;
1984
1985 let original_repo_abs_path: Option<Arc<Path>> = update
1986 .original_repo_abs_path
1987 .as_deref()
1988 .map(|p| Path::new(p).into());
1989
1990 let mut repo_subscription = None;
1991 let repo = this.repositories.entry(id).or_insert_with(|| {
1992 let git_store = cx.weak_entity();
1993 let repo = cx.new(|cx| {
1994 Repository::remote(
1995 id,
1996 Path::new(&update.abs_path).into(),
1997 original_repo_abs_path.clone(),
1998 path_style,
1999 ProjectId(update.project_id),
2000 client,
2001 git_store,
2002 cx,
2003 )
2004 });
2005 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
2006 cx.emit(GitStoreEvent::RepositoryAdded);
2007 repo
2008 });
2009 this._subscriptions.extend(repo_subscription);
2010
2011 repo.update(cx, {
2012 let update = update.clone();
2013 |repo, cx| repo.apply_remote_update(update, cx)
2014 })?;
2015
2016 this.active_repo_id.get_or_insert_with(|| {
2017 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2018 id
2019 });
2020
2021 if let Some((client, project_id)) = this.downstream_client() {
2022 update.project_id = project_id.to_proto();
2023 client.send(update).log_err();
2024 }
2025 Ok(())
2026 })
2027 }
2028
2029 async fn handle_remove_repository(
2030 this: Entity<Self>,
2031 envelope: TypedEnvelope<proto::RemoveRepository>,
2032 mut cx: AsyncApp,
2033 ) -> Result<()> {
2034 this.update(&mut cx, |this, cx| {
2035 let mut update = envelope.payload;
2036 let id = RepositoryId::from_proto(update.id);
2037 this.repositories.remove(&id);
2038 if let Some((client, project_id)) = this.downstream_client() {
2039 update.project_id = project_id.to_proto();
2040 client.send(update).log_err();
2041 }
2042 if this.active_repo_id == Some(id) {
2043 this.active_repo_id = None;
2044 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2045 }
2046 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2047 });
2048 Ok(())
2049 }
2050
2051 async fn handle_git_init(
2052 this: Entity<Self>,
2053 envelope: TypedEnvelope<proto::GitInit>,
2054 cx: AsyncApp,
2055 ) -> Result<proto::Ack> {
2056 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2057 let name = envelope.payload.fallback_branch_name;
2058 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2059 .await?;
2060
2061 Ok(proto::Ack {})
2062 }
2063
2064 async fn handle_git_clone(
2065 this: Entity<Self>,
2066 envelope: TypedEnvelope<proto::GitClone>,
2067 cx: AsyncApp,
2068 ) -> Result<proto::GitCloneResponse> {
2069 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2070 let repo_name = envelope.payload.remote_repo;
2071 let result = cx
2072 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2073 .await;
2074
2075 Ok(proto::GitCloneResponse {
2076 success: result.is_ok(),
2077 })
2078 }
2079
2080 async fn handle_fetch(
2081 this: Entity<Self>,
2082 envelope: TypedEnvelope<proto::Fetch>,
2083 mut cx: AsyncApp,
2084 ) -> Result<proto::RemoteMessageResponse> {
2085 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2086 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2087 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2088 let askpass_id = envelope.payload.askpass_id;
2089
2090 let askpass = make_remote_delegate(
2091 this,
2092 envelope.payload.project_id,
2093 repository_id,
2094 askpass_id,
2095 &mut cx,
2096 );
2097
2098 let remote_output = repository_handle
2099 .update(&mut cx, |repository_handle, cx| {
2100 repository_handle.fetch(fetch_options, askpass, cx)
2101 })
2102 .await??;
2103
2104 Ok(proto::RemoteMessageResponse {
2105 stdout: remote_output.stdout,
2106 stderr: remote_output.stderr,
2107 })
2108 }
2109
2110 async fn handle_push(
2111 this: Entity<Self>,
2112 envelope: TypedEnvelope<proto::Push>,
2113 mut cx: AsyncApp,
2114 ) -> Result<proto::RemoteMessageResponse> {
2115 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2116 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2117
2118 let askpass_id = envelope.payload.askpass_id;
2119 let askpass = make_remote_delegate(
2120 this,
2121 envelope.payload.project_id,
2122 repository_id,
2123 askpass_id,
2124 &mut cx,
2125 );
2126
2127 let options = envelope
2128 .payload
2129 .options
2130 .as_ref()
2131 .map(|_| match envelope.payload.options() {
2132 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2133 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2134 });
2135
2136 let branch_name = envelope.payload.branch_name.into();
2137 let remote_branch_name = envelope.payload.remote_branch_name.into();
2138 let remote_name = envelope.payload.remote_name.into();
2139
2140 let remote_output = repository_handle
2141 .update(&mut cx, |repository_handle, cx| {
2142 repository_handle.push(
2143 branch_name,
2144 remote_branch_name,
2145 remote_name,
2146 options,
2147 askpass,
2148 cx,
2149 )
2150 })
2151 .await??;
2152 Ok(proto::RemoteMessageResponse {
2153 stdout: remote_output.stdout,
2154 stderr: remote_output.stderr,
2155 })
2156 }
2157
2158 async fn handle_pull(
2159 this: Entity<Self>,
2160 envelope: TypedEnvelope<proto::Pull>,
2161 mut cx: AsyncApp,
2162 ) -> Result<proto::RemoteMessageResponse> {
2163 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2164 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2165 let askpass_id = envelope.payload.askpass_id;
2166 let askpass = make_remote_delegate(
2167 this,
2168 envelope.payload.project_id,
2169 repository_id,
2170 askpass_id,
2171 &mut cx,
2172 );
2173
2174 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2175 let remote_name = envelope.payload.remote_name.into();
2176 let rebase = envelope.payload.rebase;
2177
2178 let remote_message = repository_handle
2179 .update(&mut cx, |repository_handle, cx| {
2180 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2181 })
2182 .await??;
2183
2184 Ok(proto::RemoteMessageResponse {
2185 stdout: remote_message.stdout,
2186 stderr: remote_message.stderr,
2187 })
2188 }
2189
2190 async fn handle_stage(
2191 this: Entity<Self>,
2192 envelope: TypedEnvelope<proto::Stage>,
2193 mut cx: AsyncApp,
2194 ) -> Result<proto::Ack> {
2195 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2196 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2197
2198 let entries = envelope
2199 .payload
2200 .paths
2201 .into_iter()
2202 .map(|path| RepoPath::new(&path))
2203 .collect::<Result<Vec<_>>>()?;
2204
2205 repository_handle
2206 .update(&mut cx, |repository_handle, cx| {
2207 repository_handle.stage_entries(entries, cx)
2208 })
2209 .await?;
2210 Ok(proto::Ack {})
2211 }
2212
2213 async fn handle_unstage(
2214 this: Entity<Self>,
2215 envelope: TypedEnvelope<proto::Unstage>,
2216 mut cx: AsyncApp,
2217 ) -> Result<proto::Ack> {
2218 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2219 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2220
2221 let entries = envelope
2222 .payload
2223 .paths
2224 .into_iter()
2225 .map(|path| RepoPath::new(&path))
2226 .collect::<Result<Vec<_>>>()?;
2227
2228 repository_handle
2229 .update(&mut cx, |repository_handle, cx| {
2230 repository_handle.unstage_entries(entries, cx)
2231 })
2232 .await?;
2233
2234 Ok(proto::Ack {})
2235 }
2236
2237 async fn handle_stash(
2238 this: Entity<Self>,
2239 envelope: TypedEnvelope<proto::Stash>,
2240 mut cx: AsyncApp,
2241 ) -> Result<proto::Ack> {
2242 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2243 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2244
2245 let entries = envelope
2246 .payload
2247 .paths
2248 .into_iter()
2249 .map(|path| RepoPath::new(&path))
2250 .collect::<Result<Vec<_>>>()?;
2251
2252 repository_handle
2253 .update(&mut cx, |repository_handle, cx| {
2254 repository_handle.stash_entries(entries, cx)
2255 })
2256 .await?;
2257
2258 Ok(proto::Ack {})
2259 }
2260
2261 async fn handle_stash_pop(
2262 this: Entity<Self>,
2263 envelope: TypedEnvelope<proto::StashPop>,
2264 mut cx: AsyncApp,
2265 ) -> Result<proto::Ack> {
2266 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2267 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2268 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2269
2270 repository_handle
2271 .update(&mut cx, |repository_handle, cx| {
2272 repository_handle.stash_pop(stash_index, cx)
2273 })
2274 .await?;
2275
2276 Ok(proto::Ack {})
2277 }
2278
2279 async fn handle_stash_apply(
2280 this: Entity<Self>,
2281 envelope: TypedEnvelope<proto::StashApply>,
2282 mut cx: AsyncApp,
2283 ) -> Result<proto::Ack> {
2284 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2285 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2286 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2287
2288 repository_handle
2289 .update(&mut cx, |repository_handle, cx| {
2290 repository_handle.stash_apply(stash_index, cx)
2291 })
2292 .await?;
2293
2294 Ok(proto::Ack {})
2295 }
2296
2297 async fn handle_stash_drop(
2298 this: Entity<Self>,
2299 envelope: TypedEnvelope<proto::StashDrop>,
2300 mut cx: AsyncApp,
2301 ) -> Result<proto::Ack> {
2302 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2303 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2304 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2305
2306 repository_handle
2307 .update(&mut cx, |repository_handle, cx| {
2308 repository_handle.stash_drop(stash_index, cx)
2309 })
2310 .await??;
2311
2312 Ok(proto::Ack {})
2313 }
2314
2315 async fn handle_set_index_text(
2316 this: Entity<Self>,
2317 envelope: TypedEnvelope<proto::SetIndexText>,
2318 mut cx: AsyncApp,
2319 ) -> Result<proto::Ack> {
2320 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2321 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2322 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2323
2324 repository_handle
2325 .update(&mut cx, |repository_handle, cx| {
2326 repository_handle.spawn_set_index_text_job(
2327 repo_path,
2328 envelope.payload.text,
2329 None,
2330 cx,
2331 )
2332 })
2333 .await??;
2334 Ok(proto::Ack {})
2335 }
2336
2337 async fn handle_run_hook(
2338 this: Entity<Self>,
2339 envelope: TypedEnvelope<proto::RunGitHook>,
2340 mut cx: AsyncApp,
2341 ) -> Result<proto::Ack> {
2342 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2343 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2344 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2345 repository_handle
2346 .update(&mut cx, |repository_handle, cx| {
2347 repository_handle.run_hook(hook, cx)
2348 })
2349 .await??;
2350 Ok(proto::Ack {})
2351 }
2352
2353 async fn handle_commit(
2354 this: Entity<Self>,
2355 envelope: TypedEnvelope<proto::Commit>,
2356 mut cx: AsyncApp,
2357 ) -> Result<proto::Ack> {
2358 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2359 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2360 let askpass_id = envelope.payload.askpass_id;
2361
2362 let askpass = make_remote_delegate(
2363 this,
2364 envelope.payload.project_id,
2365 repository_id,
2366 askpass_id,
2367 &mut cx,
2368 );
2369
2370 let message = SharedString::from(envelope.payload.message);
2371 let name = envelope.payload.name.map(SharedString::from);
2372 let email = envelope.payload.email.map(SharedString::from);
2373 let options = envelope.payload.options.unwrap_or_default();
2374
2375 repository_handle
2376 .update(&mut cx, |repository_handle, cx| {
2377 repository_handle.commit(
2378 message,
2379 name.zip(email),
2380 CommitOptions {
2381 amend: options.amend,
2382 signoff: options.signoff,
2383 allow_empty: options.allow_empty,
2384 },
2385 askpass,
2386 cx,
2387 )
2388 })
2389 .await??;
2390 Ok(proto::Ack {})
2391 }
2392
2393 async fn handle_get_remotes(
2394 this: Entity<Self>,
2395 envelope: TypedEnvelope<proto::GetRemotes>,
2396 mut cx: AsyncApp,
2397 ) -> Result<proto::GetRemotesResponse> {
2398 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2399 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2400
2401 let branch_name = envelope.payload.branch_name;
2402 let is_push = envelope.payload.is_push;
2403
2404 let remotes = repository_handle
2405 .update(&mut cx, |repository_handle, _| {
2406 repository_handle.get_remotes(branch_name, is_push)
2407 })
2408 .await??;
2409
2410 Ok(proto::GetRemotesResponse {
2411 remotes: remotes
2412 .into_iter()
2413 .map(|remotes| proto::get_remotes_response::Remote {
2414 name: remotes.name.to_string(),
2415 })
2416 .collect::<Vec<_>>(),
2417 })
2418 }
2419
2420 async fn handle_get_worktrees(
2421 this: Entity<Self>,
2422 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2423 mut cx: AsyncApp,
2424 ) -> Result<proto::GitWorktreesResponse> {
2425 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2426 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2427
2428 let worktrees = repository_handle
2429 .update(&mut cx, |repository_handle, _| {
2430 repository_handle.worktrees()
2431 })
2432 .await??;
2433
2434 Ok(proto::GitWorktreesResponse {
2435 worktrees: worktrees
2436 .into_iter()
2437 .map(|worktree| worktree_to_proto(&worktree))
2438 .collect::<Vec<_>>(),
2439 })
2440 }
2441
2442 async fn handle_create_worktree(
2443 this: Entity<Self>,
2444 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2445 mut cx: AsyncApp,
2446 ) -> Result<proto::Ack> {
2447 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2448 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2449 let directory = PathBuf::from(envelope.payload.directory);
2450 let name = envelope.payload.name;
2451 let commit = envelope.payload.commit;
2452 let use_existing_branch = envelope.payload.use_existing_branch;
2453 let target = if name.is_empty() {
2454 CreateWorktreeTarget::Detached { base_sha: commit }
2455 } else if use_existing_branch {
2456 CreateWorktreeTarget::ExistingBranch { branch_name: name }
2457 } else {
2458 CreateWorktreeTarget::NewBranch {
2459 branch_name: name,
2460 base_sha: commit,
2461 }
2462 };
2463
2464 repository_handle
2465 .update(&mut cx, |repository_handle, _| {
2466 repository_handle.create_worktree(target, directory)
2467 })
2468 .await??;
2469
2470 Ok(proto::Ack {})
2471 }
2472
2473 async fn handle_remove_worktree(
2474 this: Entity<Self>,
2475 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2476 mut cx: AsyncApp,
2477 ) -> Result<proto::Ack> {
2478 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2479 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2480 let path = PathBuf::from(envelope.payload.path);
2481 let force = envelope.payload.force;
2482
2483 repository_handle
2484 .update(&mut cx, |repository_handle, _| {
2485 repository_handle.remove_worktree(path, force)
2486 })
2487 .await??;
2488
2489 Ok(proto::Ack {})
2490 }
2491
2492 async fn handle_rename_worktree(
2493 this: Entity<Self>,
2494 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2495 mut cx: AsyncApp,
2496 ) -> Result<proto::Ack> {
2497 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2498 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2499 let old_path = PathBuf::from(envelope.payload.old_path);
2500 let new_path = PathBuf::from(envelope.payload.new_path);
2501
2502 repository_handle
2503 .update(&mut cx, |repository_handle, _| {
2504 repository_handle.rename_worktree(old_path, new_path)
2505 })
2506 .await??;
2507
2508 Ok(proto::Ack {})
2509 }
2510
2511 async fn handle_get_head_sha(
2512 this: Entity<Self>,
2513 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2514 mut cx: AsyncApp,
2515 ) -> Result<proto::GitGetHeadShaResponse> {
2516 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2517 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2518
2519 let head_sha = repository_handle
2520 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2521 .await??;
2522
2523 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2524 }
2525
2526 async fn handle_edit_ref(
2527 this: Entity<Self>,
2528 envelope: TypedEnvelope<proto::GitEditRef>,
2529 mut cx: AsyncApp,
2530 ) -> Result<proto::Ack> {
2531 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2532 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2533 let ref_name = envelope.payload.ref_name;
2534 let commit = match envelope.payload.action {
2535 Some(proto::git_edit_ref::Action::UpdateToCommit(sha)) => Some(sha),
2536 Some(proto::git_edit_ref::Action::Delete(_)) => None,
2537 None => anyhow::bail!("GitEditRef missing action"),
2538 };
2539
2540 repository_handle
2541 .update(&mut cx, |repository_handle, _| {
2542 repository_handle.edit_ref(ref_name, commit)
2543 })
2544 .await??;
2545
2546 Ok(proto::Ack {})
2547 }
2548
2549 async fn handle_repair_worktrees(
2550 this: Entity<Self>,
2551 envelope: TypedEnvelope<proto::GitRepairWorktrees>,
2552 mut cx: AsyncApp,
2553 ) -> Result<proto::Ack> {
2554 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2555 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2556
2557 repository_handle
2558 .update(&mut cx, |repository_handle, _| {
2559 repository_handle.repair_worktrees()
2560 })
2561 .await??;
2562
2563 Ok(proto::Ack {})
2564 }
2565
2566 async fn handle_get_branches(
2567 this: Entity<Self>,
2568 envelope: TypedEnvelope<proto::GitGetBranches>,
2569 mut cx: AsyncApp,
2570 ) -> Result<proto::GitBranchesResponse> {
2571 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2572 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2573
2574 let branches = repository_handle
2575 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2576 .await??;
2577
2578 Ok(proto::GitBranchesResponse {
2579 branches: branches
2580 .into_iter()
2581 .map(|branch| branch_to_proto(&branch))
2582 .collect::<Vec<_>>(),
2583 })
2584 }
2585 async fn handle_get_default_branch(
2586 this: Entity<Self>,
2587 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2588 mut cx: AsyncApp,
2589 ) -> Result<proto::GetDefaultBranchResponse> {
2590 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2591 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2592
2593 let branch = repository_handle
2594 .update(&mut cx, |repository_handle, _| {
2595 repository_handle.default_branch(false)
2596 })
2597 .await??
2598 .map(Into::into);
2599
2600 Ok(proto::GetDefaultBranchResponse { branch })
2601 }
2602 async fn handle_create_branch(
2603 this: Entity<Self>,
2604 envelope: TypedEnvelope<proto::GitCreateBranch>,
2605 mut cx: AsyncApp,
2606 ) -> Result<proto::Ack> {
2607 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2608 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2609 let branch_name = envelope.payload.branch_name;
2610
2611 repository_handle
2612 .update(&mut cx, |repository_handle, _| {
2613 repository_handle.create_branch(branch_name, None)
2614 })
2615 .await??;
2616
2617 Ok(proto::Ack {})
2618 }
2619
2620 async fn handle_change_branch(
2621 this: Entity<Self>,
2622 envelope: TypedEnvelope<proto::GitChangeBranch>,
2623 mut cx: AsyncApp,
2624 ) -> Result<proto::Ack> {
2625 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2626 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2627 let branch_name = envelope.payload.branch_name;
2628
2629 repository_handle
2630 .update(&mut cx, |repository_handle, _| {
2631 repository_handle.change_branch(branch_name)
2632 })
2633 .await??;
2634
2635 Ok(proto::Ack {})
2636 }
2637
2638 async fn handle_rename_branch(
2639 this: Entity<Self>,
2640 envelope: TypedEnvelope<proto::GitRenameBranch>,
2641 mut cx: AsyncApp,
2642 ) -> Result<proto::Ack> {
2643 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2644 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2645 let branch = envelope.payload.branch;
2646 let new_name = envelope.payload.new_name;
2647
2648 repository_handle
2649 .update(&mut cx, |repository_handle, _| {
2650 repository_handle.rename_branch(branch, new_name)
2651 })
2652 .await??;
2653
2654 Ok(proto::Ack {})
2655 }
2656
2657 async fn handle_create_remote(
2658 this: Entity<Self>,
2659 envelope: TypedEnvelope<proto::GitCreateRemote>,
2660 mut cx: AsyncApp,
2661 ) -> Result<proto::Ack> {
2662 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2663 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2664 let remote_name = envelope.payload.remote_name;
2665 let remote_url = envelope.payload.remote_url;
2666
2667 repository_handle
2668 .update(&mut cx, |repository_handle, _| {
2669 repository_handle.create_remote(remote_name, remote_url)
2670 })
2671 .await??;
2672
2673 Ok(proto::Ack {})
2674 }
2675
2676 async fn handle_delete_branch(
2677 this: Entity<Self>,
2678 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2679 mut cx: AsyncApp,
2680 ) -> Result<proto::Ack> {
2681 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2682 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2683 let is_remote = envelope.payload.is_remote;
2684 let branch_name = envelope.payload.branch_name;
2685
2686 repository_handle
2687 .update(&mut cx, |repository_handle, _| {
2688 repository_handle.delete_branch(is_remote, branch_name)
2689 })
2690 .await??;
2691
2692 Ok(proto::Ack {})
2693 }
2694
2695 async fn handle_remove_remote(
2696 this: Entity<Self>,
2697 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2698 mut cx: AsyncApp,
2699 ) -> Result<proto::Ack> {
2700 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2701 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2702 let remote_name = envelope.payload.remote_name;
2703
2704 repository_handle
2705 .update(&mut cx, |repository_handle, _| {
2706 repository_handle.remove_remote(remote_name)
2707 })
2708 .await??;
2709
2710 Ok(proto::Ack {})
2711 }
2712
2713 async fn handle_show(
2714 this: Entity<Self>,
2715 envelope: TypedEnvelope<proto::GitShow>,
2716 mut cx: AsyncApp,
2717 ) -> Result<proto::GitCommitDetails> {
2718 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2719 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2720
2721 let commit = repository_handle
2722 .update(&mut cx, |repository_handle, _| {
2723 repository_handle.show(envelope.payload.commit)
2724 })
2725 .await??;
2726 Ok(proto::GitCommitDetails {
2727 sha: commit.sha.into(),
2728 message: commit.message.into(),
2729 commit_timestamp: commit.commit_timestamp,
2730 author_email: commit.author_email.into(),
2731 author_name: commit.author_name.into(),
2732 })
2733 }
2734
2735 async fn handle_create_checkpoint(
2736 this: Entity<Self>,
2737 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2738 mut cx: AsyncApp,
2739 ) -> Result<proto::GitCreateCheckpointResponse> {
2740 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2741 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2742
2743 let checkpoint = repository_handle
2744 .update(&mut cx, |repository, _| repository.checkpoint())
2745 .await??;
2746
2747 Ok(proto::GitCreateCheckpointResponse {
2748 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2749 })
2750 }
2751
2752 async fn handle_create_archive_checkpoint(
2753 this: Entity<Self>,
2754 envelope: TypedEnvelope<proto::GitCreateArchiveCheckpoint>,
2755 mut cx: AsyncApp,
2756 ) -> Result<proto::GitCreateArchiveCheckpointResponse> {
2757 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2758 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2759
2760 let (staged_commit_sha, unstaged_commit_sha) = repository_handle
2761 .update(&mut cx, |repository, _| {
2762 repository.create_archive_checkpoint()
2763 })
2764 .await??;
2765
2766 Ok(proto::GitCreateArchiveCheckpointResponse {
2767 staged_commit_sha,
2768 unstaged_commit_sha,
2769 })
2770 }
2771
2772 async fn handle_restore_checkpoint(
2773 this: Entity<Self>,
2774 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2775 mut cx: AsyncApp,
2776 ) -> Result<proto::Ack> {
2777 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2778 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2779
2780 let checkpoint = GitRepositoryCheckpoint {
2781 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2782 };
2783
2784 repository_handle
2785 .update(&mut cx, |repository, _| {
2786 repository.restore_checkpoint(checkpoint)
2787 })
2788 .await??;
2789
2790 Ok(proto::Ack {})
2791 }
2792
2793 async fn handle_restore_archive_checkpoint(
2794 this: Entity<Self>,
2795 envelope: TypedEnvelope<proto::GitRestoreArchiveCheckpoint>,
2796 mut cx: AsyncApp,
2797 ) -> Result<proto::Ack> {
2798 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2799 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2800 let staged_commit_sha = envelope.payload.staged_commit_sha;
2801 let unstaged_commit_sha = envelope.payload.unstaged_commit_sha;
2802
2803 repository_handle
2804 .update(&mut cx, |repository, _| {
2805 repository.restore_archive_checkpoint(staged_commit_sha, unstaged_commit_sha)
2806 })
2807 .await??;
2808
2809 Ok(proto::Ack {})
2810 }
2811
2812 async fn handle_compare_checkpoints(
2813 this: Entity<Self>,
2814 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2815 mut cx: AsyncApp,
2816 ) -> Result<proto::GitCompareCheckpointsResponse> {
2817 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2818 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2819
2820 let left = GitRepositoryCheckpoint {
2821 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2822 };
2823 let right = GitRepositoryCheckpoint {
2824 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2825 };
2826
2827 let equal = repository_handle
2828 .update(&mut cx, |repository, _| {
2829 repository.compare_checkpoints(left, right)
2830 })
2831 .await??;
2832
2833 Ok(proto::GitCompareCheckpointsResponse { equal })
2834 }
2835
2836 async fn handle_diff_checkpoints(
2837 this: Entity<Self>,
2838 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2839 mut cx: AsyncApp,
2840 ) -> Result<proto::GitDiffCheckpointsResponse> {
2841 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2842 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2843
2844 let base = GitRepositoryCheckpoint {
2845 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2846 };
2847 let target = GitRepositoryCheckpoint {
2848 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2849 };
2850
2851 let diff = repository_handle
2852 .update(&mut cx, |repository, _| {
2853 repository.diff_checkpoints(base, target)
2854 })
2855 .await??;
2856
2857 Ok(proto::GitDiffCheckpointsResponse { diff })
2858 }
2859
2860 async fn handle_load_commit_diff(
2861 this: Entity<Self>,
2862 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2863 mut cx: AsyncApp,
2864 ) -> Result<proto::LoadCommitDiffResponse> {
2865 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2866 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2867
2868 let commit_diff = repository_handle
2869 .update(&mut cx, |repository_handle, _| {
2870 repository_handle.load_commit_diff(envelope.payload.commit)
2871 })
2872 .await??;
2873 Ok(proto::LoadCommitDiffResponse {
2874 files: commit_diff
2875 .files
2876 .into_iter()
2877 .map(|file| proto::CommitFile {
2878 path: file.path.to_proto(),
2879 old_text: file.old_text,
2880 new_text: file.new_text,
2881 is_binary: file.is_binary,
2882 })
2883 .collect(),
2884 })
2885 }
2886
2887 async fn handle_file_history(
2888 this: Entity<Self>,
2889 envelope: TypedEnvelope<proto::GitFileHistory>,
2890 mut cx: AsyncApp,
2891 ) -> Result<proto::GitFileHistoryResponse> {
2892 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2893 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2894 let path = RepoPath::from_proto(&envelope.payload.path)?;
2895 let skip = envelope.payload.skip as usize;
2896 let limit = envelope.payload.limit.map(|l| l as usize);
2897
2898 let file_history = repository_handle
2899 .update(&mut cx, |repository_handle, _| {
2900 repository_handle.file_history_paginated(path, skip, limit)
2901 })
2902 .await??;
2903
2904 Ok(proto::GitFileHistoryResponse {
2905 entries: file_history
2906 .entries
2907 .into_iter()
2908 .map(|entry| proto::FileHistoryEntry {
2909 sha: entry.sha.to_string(),
2910 subject: entry.subject.to_string(),
2911 message: entry.message.to_string(),
2912 commit_timestamp: entry.commit_timestamp,
2913 author_name: entry.author_name.to_string(),
2914 author_email: entry.author_email.to_string(),
2915 })
2916 .collect(),
2917 path: file_history.path.to_proto(),
2918 })
2919 }
2920
2921 async fn handle_reset(
2922 this: Entity<Self>,
2923 envelope: TypedEnvelope<proto::GitReset>,
2924 mut cx: AsyncApp,
2925 ) -> Result<proto::Ack> {
2926 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2927 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2928
2929 let mode = match envelope.payload.mode() {
2930 git_reset::ResetMode::Soft => ResetMode::Soft,
2931 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2932 };
2933
2934 repository_handle
2935 .update(&mut cx, |repository_handle, cx| {
2936 repository_handle.reset(envelope.payload.commit, mode, cx)
2937 })
2938 .await??;
2939 Ok(proto::Ack {})
2940 }
2941
2942 async fn handle_checkout_files(
2943 this: Entity<Self>,
2944 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2945 mut cx: AsyncApp,
2946 ) -> Result<proto::Ack> {
2947 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2948 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2949 let paths = envelope
2950 .payload
2951 .paths
2952 .iter()
2953 .map(|s| RepoPath::from_proto(s))
2954 .collect::<Result<Vec<_>>>()?;
2955
2956 repository_handle
2957 .update(&mut cx, |repository_handle, cx| {
2958 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2959 })
2960 .await?;
2961 Ok(proto::Ack {})
2962 }
2963
2964 async fn handle_open_commit_message_buffer(
2965 this: Entity<Self>,
2966 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2967 mut cx: AsyncApp,
2968 ) -> Result<proto::OpenBufferResponse> {
2969 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2970 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2971 let buffer = repository
2972 .update(&mut cx, |repository, cx| {
2973 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2974 })
2975 .await?;
2976
2977 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2978 this.update(&mut cx, |this, cx| {
2979 this.buffer_store.update(cx, |buffer_store, cx| {
2980 buffer_store
2981 .create_buffer_for_peer(
2982 &buffer,
2983 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2984 cx,
2985 )
2986 .detach_and_log_err(cx);
2987 })
2988 });
2989
2990 Ok(proto::OpenBufferResponse {
2991 buffer_id: buffer_id.to_proto(),
2992 })
2993 }
2994
2995 async fn handle_askpass(
2996 this: Entity<Self>,
2997 envelope: TypedEnvelope<proto::AskPassRequest>,
2998 mut cx: AsyncApp,
2999 ) -> Result<proto::AskPassResponse> {
3000 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3001 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
3002
3003 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
3004 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
3005 debug_panic!("no askpass found");
3006 anyhow::bail!("no askpass found");
3007 };
3008
3009 let response = askpass
3010 .ask_password(envelope.payload.prompt)
3011 .await
3012 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
3013
3014 delegates
3015 .lock()
3016 .insert(envelope.payload.askpass_id, askpass);
3017
3018 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
3019 Ok(proto::AskPassResponse {
3020 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
3021 })
3022 }
3023
3024 async fn handle_check_for_pushed_commits(
3025 this: Entity<Self>,
3026 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
3027 mut cx: AsyncApp,
3028 ) -> Result<proto::CheckForPushedCommitsResponse> {
3029 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3030 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3031
3032 let branches = repository_handle
3033 .update(&mut cx, |repository_handle, _| {
3034 repository_handle.check_for_pushed_commits()
3035 })
3036 .await??;
3037 Ok(proto::CheckForPushedCommitsResponse {
3038 pushed_to: branches
3039 .into_iter()
3040 .map(|commit| commit.to_string())
3041 .collect(),
3042 })
3043 }
3044
3045 async fn handle_git_diff(
3046 this: Entity<Self>,
3047 envelope: TypedEnvelope<proto::GitDiff>,
3048 mut cx: AsyncApp,
3049 ) -> Result<proto::GitDiffResponse> {
3050 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3051 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3052 let diff_type = match envelope.payload.diff_type() {
3053 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
3054 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
3055 proto::git_diff::DiffType::MergeBase => {
3056 let base_ref = envelope
3057 .payload
3058 .merge_base_ref
3059 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
3060 DiffType::MergeBase {
3061 base_ref: base_ref.into(),
3062 }
3063 }
3064 };
3065
3066 let mut diff = repository_handle
3067 .update(&mut cx, |repository_handle, cx| {
3068 repository_handle.diff(diff_type, cx)
3069 })
3070 .await??;
3071 const ONE_MB: usize = 1_000_000;
3072 if diff.len() > ONE_MB {
3073 diff = diff.chars().take(ONE_MB).collect()
3074 }
3075
3076 Ok(proto::GitDiffResponse { diff })
3077 }
3078
3079 async fn handle_tree_diff(
3080 this: Entity<Self>,
3081 request: TypedEnvelope<proto::GetTreeDiff>,
3082 mut cx: AsyncApp,
3083 ) -> Result<proto::GetTreeDiffResponse> {
3084 let repository_id = RepositoryId(request.payload.repository_id);
3085 let diff_type = if request.payload.is_merge {
3086 DiffTreeType::MergeBase {
3087 base: request.payload.base.into(),
3088 head: request.payload.head.into(),
3089 }
3090 } else {
3091 DiffTreeType::Since {
3092 base: request.payload.base.into(),
3093 head: request.payload.head.into(),
3094 }
3095 };
3096
3097 let diff = this
3098 .update(&mut cx, |this, cx| {
3099 let repository = this.repositories().get(&repository_id)?;
3100 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
3101 })
3102 .context("missing repository")?
3103 .await??;
3104
3105 Ok(proto::GetTreeDiffResponse {
3106 entries: diff
3107 .entries
3108 .into_iter()
3109 .map(|(path, status)| proto::TreeDiffStatus {
3110 path: path.as_ref().to_proto(),
3111 status: match status {
3112 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
3113 TreeDiffStatus::Modified { .. } => {
3114 proto::tree_diff_status::Status::Modified.into()
3115 }
3116 TreeDiffStatus::Deleted { .. } => {
3117 proto::tree_diff_status::Status::Deleted.into()
3118 }
3119 },
3120 oid: match status {
3121 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3122 Some(old.to_string())
3123 }
3124 TreeDiffStatus::Added => None,
3125 },
3126 })
3127 .collect(),
3128 })
3129 }
3130
3131 async fn handle_get_blob_content(
3132 this: Entity<Self>,
3133 request: TypedEnvelope<proto::GetBlobContent>,
3134 mut cx: AsyncApp,
3135 ) -> Result<proto::GetBlobContentResponse> {
3136 let oid = git::Oid::from_str(&request.payload.oid)?;
3137 let repository_id = RepositoryId(request.payload.repository_id);
3138 let content = this
3139 .update(&mut cx, |this, cx| {
3140 let repository = this.repositories().get(&repository_id)?;
3141 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3142 })
3143 .context("missing repository")?
3144 .await?;
3145 Ok(proto::GetBlobContentResponse { content })
3146 }
3147
3148 async fn handle_open_unstaged_diff(
3149 this: Entity<Self>,
3150 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3151 mut cx: AsyncApp,
3152 ) -> Result<proto::OpenUnstagedDiffResponse> {
3153 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3154 let diff = this
3155 .update(&mut cx, |this, cx| {
3156 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3157 Some(this.open_unstaged_diff(buffer, cx))
3158 })
3159 .context("missing buffer")?
3160 .await?;
3161 this.update(&mut cx, |this, _| {
3162 let shared_diffs = this
3163 .shared_diffs
3164 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3165 .or_default();
3166 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3167 });
3168 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3169 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3170 }
3171
3172 async fn handle_open_uncommitted_diff(
3173 this: Entity<Self>,
3174 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3175 mut cx: AsyncApp,
3176 ) -> Result<proto::OpenUncommittedDiffResponse> {
3177 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3178 let diff = this
3179 .update(&mut cx, |this, cx| {
3180 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3181 Some(this.open_uncommitted_diff(buffer, cx))
3182 })
3183 .context("missing buffer")?
3184 .await?;
3185 this.update(&mut cx, |this, _| {
3186 let shared_diffs = this
3187 .shared_diffs
3188 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3189 .or_default();
3190 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3191 });
3192 Ok(diff.read_with(&cx, |diff, cx| {
3193 use proto::open_uncommitted_diff_response::Mode;
3194
3195 let unstaged_diff = diff.secondary_diff();
3196 let index_snapshot = unstaged_diff.and_then(|diff| {
3197 let diff = diff.read(cx);
3198 diff.base_text_exists().then(|| diff.base_text(cx))
3199 });
3200
3201 let mode;
3202 let staged_text;
3203 let committed_text;
3204 if diff.base_text_exists() {
3205 let committed_snapshot = diff.base_text(cx);
3206 committed_text = Some(committed_snapshot.text());
3207 if let Some(index_text) = index_snapshot {
3208 if index_text.remote_id() == committed_snapshot.remote_id() {
3209 mode = Mode::IndexMatchesHead;
3210 staged_text = None;
3211 } else {
3212 mode = Mode::IndexAndHead;
3213 staged_text = Some(index_text.text());
3214 }
3215 } else {
3216 mode = Mode::IndexAndHead;
3217 staged_text = None;
3218 }
3219 } else {
3220 mode = Mode::IndexAndHead;
3221 committed_text = None;
3222 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3223 }
3224
3225 proto::OpenUncommittedDiffResponse {
3226 committed_text,
3227 staged_text,
3228 mode: mode.into(),
3229 }
3230 }))
3231 }
3232
3233 async fn handle_update_diff_bases(
3234 this: Entity<Self>,
3235 request: TypedEnvelope<proto::UpdateDiffBases>,
3236 mut cx: AsyncApp,
3237 ) -> Result<()> {
3238 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3239 this.update(&mut cx, |this, cx| {
3240 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3241 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3242 {
3243 let buffer = buffer.read(cx).text_snapshot();
3244 diff_state.update(cx, |diff_state, cx| {
3245 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3246 })
3247 }
3248 });
3249 Ok(())
3250 }
3251
3252 async fn handle_blame_buffer(
3253 this: Entity<Self>,
3254 envelope: TypedEnvelope<proto::BlameBuffer>,
3255 mut cx: AsyncApp,
3256 ) -> Result<proto::BlameBufferResponse> {
3257 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3258 let version = deserialize_version(&envelope.payload.version);
3259 let buffer = this.read_with(&cx, |this, cx| {
3260 this.buffer_store.read(cx).get_existing(buffer_id)
3261 })?;
3262 buffer
3263 .update(&mut cx, |buffer, _| {
3264 buffer.wait_for_version(version.clone())
3265 })
3266 .await?;
3267 let blame = this
3268 .update(&mut cx, |this, cx| {
3269 this.blame_buffer(&buffer, Some(version), cx)
3270 })
3271 .await?;
3272 Ok(serialize_blame_buffer_response(blame))
3273 }
3274
3275 async fn handle_get_permalink_to_line(
3276 this: Entity<Self>,
3277 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3278 mut cx: AsyncApp,
3279 ) -> Result<proto::GetPermalinkToLineResponse> {
3280 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3281 // let version = deserialize_version(&envelope.payload.version);
3282 let selection = {
3283 let proto_selection = envelope
3284 .payload
3285 .selection
3286 .context("no selection to get permalink for defined")?;
3287 proto_selection.start as u32..proto_selection.end as u32
3288 };
3289 let buffer = this.read_with(&cx, |this, cx| {
3290 this.buffer_store.read(cx).get_existing(buffer_id)
3291 })?;
3292 let permalink = this
3293 .update(&mut cx, |this, cx| {
3294 this.get_permalink_to_line(&buffer, selection, cx)
3295 })
3296 .await?;
3297 Ok(proto::GetPermalinkToLineResponse {
3298 permalink: permalink.to_string(),
3299 })
3300 }
3301
3302 fn repository_for_request(
3303 this: &Entity<Self>,
3304 id: RepositoryId,
3305 cx: &mut AsyncApp,
3306 ) -> Result<Entity<Repository>> {
3307 this.read_with(cx, |this, _| {
3308 this.repositories
3309 .get(&id)
3310 .context("missing repository handle")
3311 .cloned()
3312 })
3313 }
3314
3315 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3316 self.repositories
3317 .iter()
3318 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3319 .collect()
3320 }
3321
3322 fn process_updated_entries(
3323 &self,
3324 worktree: &Entity<Worktree>,
3325 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3326 cx: &mut App,
3327 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3328 let path_style = worktree.read(cx).path_style();
3329 let mut repo_paths = self
3330 .repositories
3331 .values()
3332 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3333 .collect::<Vec<_>>();
3334 let mut entries: Vec<_> = updated_entries
3335 .iter()
3336 .map(|(path, _, _)| path.clone())
3337 .collect();
3338 entries.sort();
3339 let worktree = worktree.read(cx);
3340
3341 let entries = entries
3342 .into_iter()
3343 .map(|path| worktree.absolutize(&path))
3344 .collect::<Arc<[_]>>();
3345
3346 let executor = cx.background_executor().clone();
3347 cx.background_executor().spawn(async move {
3348 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3349 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3350 let mut tasks = FuturesOrdered::new();
3351 for (repo_path, repo) in repo_paths.into_iter().rev() {
3352 let entries = entries.clone();
3353 let task = executor.spawn(async move {
3354 // Find all repository paths that belong to this repo
3355 let mut ix = entries.partition_point(|path| path < &*repo_path);
3356 if ix == entries.len() {
3357 return None;
3358 };
3359
3360 let mut paths = Vec::new();
3361 // All paths prefixed by a given repo will constitute a continuous range.
3362 while let Some(path) = entries.get(ix)
3363 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3364 &repo_path, path, path_style,
3365 )
3366 {
3367 paths.push((repo_path, ix));
3368 ix += 1;
3369 }
3370 if paths.is_empty() {
3371 None
3372 } else {
3373 Some((repo, paths))
3374 }
3375 });
3376 tasks.push_back(task);
3377 }
3378
3379 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3380 let mut path_was_used = vec![false; entries.len()];
3381 let tasks = tasks.collect::<Vec<_>>().await;
3382 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3383 // We always want to assign a path to it's innermost repository.
3384 for t in tasks {
3385 let Some((repo, paths)) = t else {
3386 continue;
3387 };
3388 let entry = paths_by_git_repo.entry(repo).or_default();
3389 for (repo_path, ix) in paths {
3390 if path_was_used[ix] {
3391 continue;
3392 }
3393 path_was_used[ix] = true;
3394 entry.push(repo_path);
3395 }
3396 }
3397
3398 paths_by_git_repo
3399 })
3400 }
3401}
3402
3403impl BufferGitState {
3404 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3405 Self {
3406 unstaged_diff: Default::default(),
3407 uncommitted_diff: Default::default(),
3408 oid_diffs: Default::default(),
3409 recalculate_diff_task: Default::default(),
3410 language: Default::default(),
3411 language_registry: Default::default(),
3412 recalculating_tx: postage::watch::channel_with(false).0,
3413 hunk_staging_operation_count: 0,
3414 hunk_staging_operation_count_as_of_write: 0,
3415 head_text: Default::default(),
3416 index_text: Default::default(),
3417 oid_texts: Default::default(),
3418 head_changed: Default::default(),
3419 index_changed: Default::default(),
3420 language_changed: Default::default(),
3421 conflict_updated_futures: Default::default(),
3422 conflict_set: Default::default(),
3423 reparse_conflict_markers_task: Default::default(),
3424 }
3425 }
3426
3427 #[ztracing::instrument(skip_all)]
3428 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3429 self.language = buffer.read(cx).language().cloned();
3430 self.language_changed = true;
3431 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3432 }
3433
3434 fn reparse_conflict_markers(
3435 &mut self,
3436 buffer: text::BufferSnapshot,
3437 cx: &mut Context<Self>,
3438 ) -> oneshot::Receiver<()> {
3439 let (tx, rx) = oneshot::channel();
3440
3441 let Some(conflict_set) = self
3442 .conflict_set
3443 .as_ref()
3444 .and_then(|conflict_set| conflict_set.upgrade())
3445 else {
3446 return rx;
3447 };
3448
3449 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3450 if conflict_set.has_conflict {
3451 Some(conflict_set.snapshot())
3452 } else {
3453 None
3454 }
3455 });
3456
3457 if let Some(old_snapshot) = old_snapshot {
3458 self.conflict_updated_futures.push(tx);
3459 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3460 let (snapshot, changed_range) = cx
3461 .background_spawn(async move {
3462 let new_snapshot = ConflictSet::parse(&buffer);
3463 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3464 (new_snapshot, changed_range)
3465 })
3466 .await;
3467 this.update(cx, |this, cx| {
3468 if let Some(conflict_set) = &this.conflict_set {
3469 conflict_set
3470 .update(cx, |conflict_set, cx| {
3471 conflict_set.set_snapshot(snapshot, changed_range, cx);
3472 })
3473 .ok();
3474 }
3475 let futures = std::mem::take(&mut this.conflict_updated_futures);
3476 for tx in futures {
3477 tx.send(()).ok();
3478 }
3479 })
3480 }))
3481 }
3482
3483 rx
3484 }
3485
3486 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3487 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3488 }
3489
3490 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3491 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3492 }
3493
3494 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3495 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3496 }
3497
3498 fn handle_base_texts_updated(
3499 &mut self,
3500 buffer: text::BufferSnapshot,
3501 message: proto::UpdateDiffBases,
3502 cx: &mut Context<Self>,
3503 ) {
3504 use proto::update_diff_bases::Mode;
3505
3506 let Some(mode) = Mode::from_i32(message.mode) else {
3507 return;
3508 };
3509
3510 let diff_bases_change = match mode {
3511 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3512 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3513 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3514 Mode::IndexAndHead => DiffBasesChange::SetEach {
3515 index: message.staged_text,
3516 head: message.committed_text,
3517 },
3518 };
3519
3520 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3521 }
3522
3523 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3524 if *self.recalculating_tx.borrow() {
3525 let mut rx = self.recalculating_tx.subscribe();
3526 Some(async move {
3527 loop {
3528 let is_recalculating = rx.recv().await;
3529 if is_recalculating != Some(true) {
3530 break;
3531 }
3532 }
3533 })
3534 } else {
3535 None
3536 }
3537 }
3538
3539 fn diff_bases_changed(
3540 &mut self,
3541 buffer: text::BufferSnapshot,
3542 diff_bases_change: Option<DiffBasesChange>,
3543 cx: &mut Context<Self>,
3544 ) {
3545 match diff_bases_change {
3546 Some(DiffBasesChange::SetIndex(index)) => {
3547 self.index_text = index.map(|mut index| {
3548 text::LineEnding::normalize(&mut index);
3549 Arc::from(index.as_str())
3550 });
3551 self.index_changed = true;
3552 }
3553 Some(DiffBasesChange::SetHead(head)) => {
3554 self.head_text = head.map(|mut head| {
3555 text::LineEnding::normalize(&mut head);
3556 Arc::from(head.as_str())
3557 });
3558 self.head_changed = true;
3559 }
3560 Some(DiffBasesChange::SetBoth(text)) => {
3561 let text = text.map(|mut text| {
3562 text::LineEnding::normalize(&mut text);
3563 Arc::from(text.as_str())
3564 });
3565 self.head_text = text.clone();
3566 self.index_text = text;
3567 self.head_changed = true;
3568 self.index_changed = true;
3569 }
3570 Some(DiffBasesChange::SetEach { index, head }) => {
3571 self.index_text = index.map(|mut index| {
3572 text::LineEnding::normalize(&mut index);
3573 Arc::from(index.as_str())
3574 });
3575 self.index_changed = true;
3576 self.head_text = head.map(|mut head| {
3577 text::LineEnding::normalize(&mut head);
3578 Arc::from(head.as_str())
3579 });
3580 self.head_changed = true;
3581 }
3582 None => {}
3583 }
3584
3585 self.recalculate_diffs(buffer, cx)
3586 }
3587
3588 #[ztracing::instrument(skip_all)]
3589 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3590 *self.recalculating_tx.borrow_mut() = true;
3591
3592 let language = self.language.clone();
3593 let language_registry = self.language_registry.clone();
3594 let unstaged_diff = self.unstaged_diff();
3595 let uncommitted_diff = self.uncommitted_diff();
3596 let head = self.head_text.clone();
3597 let index = self.index_text.clone();
3598 let index_changed = self.index_changed;
3599 let head_changed = self.head_changed;
3600 let language_changed = self.language_changed;
3601 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3602 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3603 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3604 (None, None) => true,
3605 _ => false,
3606 };
3607
3608 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3609 .oid_diffs
3610 .iter()
3611 .filter_map(|(oid, weak)| {
3612 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3613 weak.upgrade().map(|diff| (*oid, diff, base_text))
3614 })
3615 .collect();
3616
3617 self.oid_diffs.retain(|oid, weak| {
3618 let alive = weak.upgrade().is_some();
3619 if !alive {
3620 if let Some(oid) = oid {
3621 self.oid_texts.remove(oid);
3622 }
3623 }
3624 alive
3625 });
3626 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3627 log::debug!(
3628 "start recalculating diffs for buffer {}",
3629 buffer.remote_id()
3630 );
3631
3632 let mut new_unstaged_diff = None;
3633 if let Some(unstaged_diff) = &unstaged_diff {
3634 new_unstaged_diff = Some(
3635 cx.update(|cx| {
3636 unstaged_diff.read(cx).update_diff(
3637 buffer.clone(),
3638 index,
3639 index_changed.then_some(false),
3640 language.clone(),
3641 cx,
3642 )
3643 })
3644 .await,
3645 );
3646 }
3647
3648 // Dropping BufferDiff can be expensive, so yield back to the event loop
3649 // for a bit
3650 yield_now().await;
3651
3652 let mut new_uncommitted_diff = None;
3653 if let Some(uncommitted_diff) = &uncommitted_diff {
3654 new_uncommitted_diff = if index_matches_head {
3655 new_unstaged_diff.clone()
3656 } else {
3657 Some(
3658 cx.update(|cx| {
3659 uncommitted_diff.read(cx).update_diff(
3660 buffer.clone(),
3661 head,
3662 head_changed.then_some(true),
3663 language.clone(),
3664 cx,
3665 )
3666 })
3667 .await,
3668 )
3669 }
3670 }
3671
3672 // Dropping BufferDiff can be expensive, so yield back to the event loop
3673 // for a bit
3674 yield_now().await;
3675
3676 let cancel = this.update(cx, |this, _| {
3677 // This checks whether all pending stage/unstage operations
3678 // have quiesced (i.e. both the corresponding write and the
3679 // read of that write have completed). If not, then we cancel
3680 // this recalculation attempt to avoid invalidating pending
3681 // state too quickly; another recalculation will come along
3682 // later and clear the pending state once the state of the index has settled.
3683 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3684 *this.recalculating_tx.borrow_mut() = false;
3685 true
3686 } else {
3687 false
3688 }
3689 })?;
3690 if cancel {
3691 log::debug!(
3692 concat!(
3693 "aborting recalculating diffs for buffer {}",
3694 "due to subsequent hunk operations",
3695 ),
3696 buffer.remote_id()
3697 );
3698 return Ok(());
3699 }
3700
3701 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3702 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3703 {
3704 let task = unstaged_diff.update(cx, |diff, cx| {
3705 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3706 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3707 // view multibuffers.
3708 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3709 });
3710 Some(task.await)
3711 } else {
3712 None
3713 };
3714
3715 yield_now().await;
3716
3717 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3718 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3719 {
3720 uncommitted_diff
3721 .update(cx, |diff, cx| {
3722 if language_changed {
3723 diff.language_changed(language.clone(), language_registry.clone(), cx);
3724 }
3725 diff.set_snapshot_with_secondary(
3726 new_uncommitted_diff,
3727 &buffer,
3728 unstaged_changed_range.flatten(),
3729 true,
3730 cx,
3731 )
3732 })
3733 .await;
3734 }
3735
3736 yield_now().await;
3737
3738 for (oid, oid_diff, base_text) in oid_diffs {
3739 let new_oid_diff = cx
3740 .update(|cx| {
3741 oid_diff.read(cx).update_diff(
3742 buffer.clone(),
3743 base_text,
3744 None,
3745 language.clone(),
3746 cx,
3747 )
3748 })
3749 .await;
3750
3751 oid_diff
3752 .update(cx, |diff, cx| {
3753 if language_changed {
3754 diff.language_changed(language.clone(), language_registry.clone(), cx);
3755 }
3756 diff.set_snapshot(new_oid_diff, &buffer, cx)
3757 })
3758 .await;
3759
3760 log::debug!(
3761 "finished recalculating oid diff for buffer {} oid {:?}",
3762 buffer.remote_id(),
3763 oid
3764 );
3765
3766 yield_now().await;
3767 }
3768
3769 log::debug!(
3770 "finished recalculating diffs for buffer {}",
3771 buffer.remote_id()
3772 );
3773
3774 if let Some(this) = this.upgrade() {
3775 this.update(cx, |this, _| {
3776 this.index_changed = false;
3777 this.head_changed = false;
3778 this.language_changed = false;
3779 *this.recalculating_tx.borrow_mut() = false;
3780 });
3781 }
3782
3783 Ok(())
3784 }));
3785 }
3786}
3787
3788fn make_remote_delegate(
3789 this: Entity<GitStore>,
3790 project_id: u64,
3791 repository_id: RepositoryId,
3792 askpass_id: u64,
3793 cx: &mut AsyncApp,
3794) -> AskPassDelegate {
3795 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3796 this.update(cx, |this, cx| {
3797 let Some((client, _)) = this.downstream_client() else {
3798 return;
3799 };
3800 let response = client.request(proto::AskPassRequest {
3801 project_id,
3802 repository_id: repository_id.to_proto(),
3803 askpass_id,
3804 prompt,
3805 });
3806 cx.spawn(async move |_, _| {
3807 let mut response = response.await?.response;
3808 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3809 .ok();
3810 response.zeroize();
3811 anyhow::Ok(())
3812 })
3813 .detach_and_log_err(cx);
3814 });
3815 })
3816}
3817
3818impl RepositoryId {
3819 pub fn to_proto(self) -> u64 {
3820 self.0
3821 }
3822
3823 pub fn from_proto(id: u64) -> Self {
3824 RepositoryId(id)
3825 }
3826}
3827
3828impl RepositorySnapshot {
3829 fn empty(
3830 id: RepositoryId,
3831 work_directory_abs_path: Arc<Path>,
3832 original_repo_abs_path: Option<Arc<Path>>,
3833 path_style: PathStyle,
3834 ) -> Self {
3835 Self {
3836 id,
3837 statuses_by_path: Default::default(),
3838 original_repo_abs_path: original_repo_abs_path
3839 .unwrap_or_else(|| work_directory_abs_path.clone()),
3840 work_directory_abs_path,
3841 branch: None,
3842 branch_list: Arc::from([]),
3843 head_commit: None,
3844 scan_id: 0,
3845 merge: Default::default(),
3846 remote_origin_url: None,
3847 remote_upstream_url: None,
3848 stash_entries: Default::default(),
3849 linked_worktrees: Arc::from([]),
3850 path_style,
3851 }
3852 }
3853
3854 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3855 proto::UpdateRepository {
3856 branch_summary: self.branch.as_ref().map(branch_to_proto),
3857 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3858 updated_statuses: self
3859 .statuses_by_path
3860 .iter()
3861 .map(|entry| entry.to_proto())
3862 .collect(),
3863 removed_statuses: Default::default(),
3864 current_merge_conflicts: self
3865 .merge
3866 .merge_heads_by_conflicted_path
3867 .iter()
3868 .map(|(repo_path, _)| repo_path.to_proto())
3869 .collect(),
3870 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3871 project_id,
3872 id: self.id.to_proto(),
3873 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3874 entry_ids: vec![self.id.to_proto()],
3875 scan_id: self.scan_id,
3876 is_last_update: true,
3877 stash_entries: self
3878 .stash_entries
3879 .entries
3880 .iter()
3881 .map(stash_to_proto)
3882 .collect(),
3883 remote_upstream_url: self.remote_upstream_url.clone(),
3884 remote_origin_url: self.remote_origin_url.clone(),
3885 original_repo_abs_path: Some(
3886 self.original_repo_abs_path.to_string_lossy().into_owned(),
3887 ),
3888 linked_worktrees: self
3889 .linked_worktrees
3890 .iter()
3891 .map(worktree_to_proto)
3892 .collect(),
3893 }
3894 }
3895
3896 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3897 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3898 let mut removed_statuses: Vec<String> = Vec::new();
3899
3900 let mut new_statuses = self.statuses_by_path.iter().peekable();
3901 let mut old_statuses = old.statuses_by_path.iter().peekable();
3902
3903 let mut current_new_entry = new_statuses.next();
3904 let mut current_old_entry = old_statuses.next();
3905 loop {
3906 match (current_new_entry, current_old_entry) {
3907 (Some(new_entry), Some(old_entry)) => {
3908 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3909 Ordering::Less => {
3910 updated_statuses.push(new_entry.to_proto());
3911 current_new_entry = new_statuses.next();
3912 }
3913 Ordering::Equal => {
3914 if new_entry.status != old_entry.status
3915 || new_entry.diff_stat != old_entry.diff_stat
3916 {
3917 updated_statuses.push(new_entry.to_proto());
3918 }
3919 current_old_entry = old_statuses.next();
3920 current_new_entry = new_statuses.next();
3921 }
3922 Ordering::Greater => {
3923 removed_statuses.push(old_entry.repo_path.to_proto());
3924 current_old_entry = old_statuses.next();
3925 }
3926 }
3927 }
3928 (None, Some(old_entry)) => {
3929 removed_statuses.push(old_entry.repo_path.to_proto());
3930 current_old_entry = old_statuses.next();
3931 }
3932 (Some(new_entry), None) => {
3933 updated_statuses.push(new_entry.to_proto());
3934 current_new_entry = new_statuses.next();
3935 }
3936 (None, None) => break,
3937 }
3938 }
3939
3940 proto::UpdateRepository {
3941 branch_summary: self.branch.as_ref().map(branch_to_proto),
3942 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3943 updated_statuses,
3944 removed_statuses,
3945 current_merge_conflicts: self
3946 .merge
3947 .merge_heads_by_conflicted_path
3948 .iter()
3949 .map(|(path, _)| path.to_proto())
3950 .collect(),
3951 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3952 project_id,
3953 id: self.id.to_proto(),
3954 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3955 entry_ids: vec![],
3956 scan_id: self.scan_id,
3957 is_last_update: true,
3958 stash_entries: self
3959 .stash_entries
3960 .entries
3961 .iter()
3962 .map(stash_to_proto)
3963 .collect(),
3964 remote_upstream_url: self.remote_upstream_url.clone(),
3965 remote_origin_url: self.remote_origin_url.clone(),
3966 original_repo_abs_path: Some(
3967 self.original_repo_abs_path.to_string_lossy().into_owned(),
3968 ),
3969 linked_worktrees: self
3970 .linked_worktrees
3971 .iter()
3972 .map(worktree_to_proto)
3973 .collect(),
3974 }
3975 }
3976
3977 /// The main worktree is the original checkout that other worktrees were
3978 /// created from.
3979 ///
3980 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3981 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3982 ///
3983 /// Submodules also return `true` here, since they are not linked worktrees.
3984 pub fn is_main_worktree(&self) -> bool {
3985 self.work_directory_abs_path == self.original_repo_abs_path
3986 }
3987
3988 /// Returns true if this repository is a linked worktree, that is, one that
3989 /// was created from another worktree.
3990 ///
3991 /// Returns `false` for both the main worktree and submodules.
3992 pub fn is_linked_worktree(&self) -> bool {
3993 !self.is_main_worktree()
3994 }
3995
3996 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3997 &self.linked_worktrees
3998 }
3999
4000 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
4001 self.statuses_by_path.iter().cloned()
4002 }
4003
4004 pub fn status_summary(&self) -> GitSummary {
4005 self.statuses_by_path.summary().item_summary
4006 }
4007
4008 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
4009 self.statuses_by_path
4010 .get(&PathKey(path.as_ref().clone()), ())
4011 .cloned()
4012 }
4013
4014 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4015 self.statuses_by_path
4016 .get(&PathKey(path.as_ref().clone()), ())
4017 .and_then(|entry| entry.diff_stat)
4018 }
4019
4020 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
4021 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
4022 }
4023
4024 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
4025 self.path_style
4026 .join(&self.work_directory_abs_path, repo_path.as_std_path())
4027 .unwrap()
4028 .into()
4029 }
4030
4031 #[inline]
4032 fn abs_path_to_repo_path_inner(
4033 work_directory_abs_path: &Path,
4034 abs_path: &Path,
4035 path_style: PathStyle,
4036 ) -> Option<RepoPath> {
4037 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
4038 Some(RepoPath::from_rel_path(&rel_path))
4039 }
4040
4041 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
4042 self.merge
4043 .merge_heads_by_conflicted_path
4044 .contains_key(repo_path)
4045 }
4046
4047 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
4048 let had_conflict_on_last_merge_head_change = self
4049 .merge
4050 .merge_heads_by_conflicted_path
4051 .contains_key(repo_path);
4052 let has_conflict_currently = self
4053 .status_for_path(repo_path)
4054 .is_some_and(|entry| entry.status.is_conflicted());
4055 had_conflict_on_last_merge_head_change || has_conflict_currently
4056 }
4057
4058 /// This is the name that will be displayed in the repository selector for this repository.
4059 pub fn display_name(&self) -> SharedString {
4060 self.work_directory_abs_path
4061 .file_name()
4062 .unwrap_or_default()
4063 .to_string_lossy()
4064 .to_string()
4065 .into()
4066 }
4067}
4068
4069pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
4070 proto::StashEntry {
4071 oid: entry.oid.as_bytes().to_vec(),
4072 message: entry.message.clone(),
4073 branch: entry.branch.clone(),
4074 index: entry.index as u64,
4075 timestamp: entry.timestamp,
4076 }
4077}
4078
4079pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
4080 Ok(StashEntry {
4081 oid: Oid::from_bytes(&entry.oid)?,
4082 message: entry.message.clone(),
4083 index: entry.index as usize,
4084 branch: entry.branch.clone(),
4085 timestamp: entry.timestamp,
4086 })
4087}
4088
4089impl MergeDetails {
4090 async fn update(
4091 &mut self,
4092 backend: &Arc<dyn GitRepository>,
4093 current_conflicted_paths: Vec<RepoPath>,
4094 ) -> Result<bool> {
4095 log::debug!("load merge details");
4096 self.message = backend.merge_message().await.map(SharedString::from);
4097 let heads = backend
4098 .revparse_batch(vec![
4099 "MERGE_HEAD".into(),
4100 "CHERRY_PICK_HEAD".into(),
4101 "REBASE_HEAD".into(),
4102 "REVERT_HEAD".into(),
4103 "APPLY_HEAD".into(),
4104 ])
4105 .await
4106 .log_err()
4107 .unwrap_or_default()
4108 .into_iter()
4109 .map(|opt| opt.map(SharedString::from))
4110 .collect::<Vec<_>>();
4111
4112 let mut conflicts_changed = false;
4113
4114 // Record the merge state for newly conflicted paths
4115 for path in ¤t_conflicted_paths {
4116 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
4117 conflicts_changed = true;
4118 self.merge_heads_by_conflicted_path
4119 .insert(path.clone(), heads.clone());
4120 }
4121 }
4122
4123 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4124 self.merge_heads_by_conflicted_path
4125 .retain(|path, old_merge_heads| {
4126 let keep = current_conflicted_paths.contains(path)
4127 || (old_merge_heads == &heads
4128 && old_merge_heads.iter().any(|head| head.is_some()));
4129 if !keep {
4130 conflicts_changed = true;
4131 }
4132 keep
4133 });
4134
4135 Ok(conflicts_changed)
4136 }
4137}
4138
4139impl Repository {
4140 pub fn is_trusted(&self) -> bool {
4141 match self.repository_state.peek() {
4142 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4143 _ => false,
4144 }
4145 }
4146
4147 pub fn snapshot(&self) -> RepositorySnapshot {
4148 self.snapshot.clone()
4149 }
4150
4151 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4152 self.pending_ops.iter().cloned()
4153 }
4154
4155 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4156 self.pending_ops.summary().clone()
4157 }
4158
4159 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4160 self.pending_ops
4161 .get(&PathKey(path.as_ref().clone()), ())
4162 .cloned()
4163 }
4164
4165 fn local(
4166 id: RepositoryId,
4167 work_directory_abs_path: Arc<Path>,
4168 original_repo_abs_path: Arc<Path>,
4169 dot_git_abs_path: Arc<Path>,
4170 project_environment: WeakEntity<ProjectEnvironment>,
4171 fs: Arc<dyn Fs>,
4172 is_trusted: bool,
4173 git_store: WeakEntity<GitStore>,
4174 cx: &mut Context<Self>,
4175 ) -> Self {
4176 let snapshot = RepositorySnapshot::empty(
4177 id,
4178 work_directory_abs_path.clone(),
4179 Some(original_repo_abs_path),
4180 PathStyle::local(),
4181 );
4182 let state = cx
4183 .spawn(async move |_, cx| {
4184 LocalRepositoryState::new(
4185 work_directory_abs_path,
4186 dot_git_abs_path,
4187 project_environment,
4188 fs,
4189 is_trusted,
4190 cx,
4191 )
4192 .await
4193 .map_err(|err| err.to_string())
4194 })
4195 .shared();
4196 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4197 let state = cx
4198 .spawn(async move |_, _| {
4199 let state = state.await?;
4200 Ok(RepositoryState::Local(state))
4201 })
4202 .shared();
4203
4204 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4205 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4206 if this.scan_id > 1 {
4207 this.initial_graph_data.clear();
4208 }
4209 }
4210 RepositoryEvent::StashEntriesChanged => {
4211 if this.scan_id > 1 {
4212 this.initial_graph_data
4213 .retain(|(log_source, _), _| *log_source != LogSource::All);
4214 }
4215 }
4216 _ => {}
4217 })
4218 .detach();
4219
4220 Repository {
4221 this: cx.weak_entity(),
4222 git_store,
4223 snapshot,
4224 pending_ops: Default::default(),
4225 repository_state: state,
4226 commit_message_buffer: None,
4227 askpass_delegates: Default::default(),
4228 paths_needing_status_update: Default::default(),
4229 latest_askpass_id: 0,
4230 job_sender,
4231 job_id: 0,
4232 active_jobs: Default::default(),
4233 initial_graph_data: Default::default(),
4234 commit_data: Default::default(),
4235 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4236 }
4237 }
4238
4239 fn remote(
4240 id: RepositoryId,
4241 work_directory_abs_path: Arc<Path>,
4242 original_repo_abs_path: Option<Arc<Path>>,
4243 path_style: PathStyle,
4244 project_id: ProjectId,
4245 client: AnyProtoClient,
4246 git_store: WeakEntity<GitStore>,
4247 cx: &mut Context<Self>,
4248 ) -> Self {
4249 let snapshot = RepositorySnapshot::empty(
4250 id,
4251 work_directory_abs_path,
4252 original_repo_abs_path,
4253 path_style,
4254 );
4255 let repository_state = RemoteRepositoryState { project_id, client };
4256 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4257 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4258 Self {
4259 this: cx.weak_entity(),
4260 snapshot,
4261 commit_message_buffer: None,
4262 git_store,
4263 pending_ops: Default::default(),
4264 paths_needing_status_update: Default::default(),
4265 job_sender,
4266 repository_state,
4267 askpass_delegates: Default::default(),
4268 latest_askpass_id: 0,
4269 active_jobs: Default::default(),
4270 job_id: 0,
4271 initial_graph_data: Default::default(),
4272 commit_data: Default::default(),
4273 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4274 }
4275 }
4276
4277 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4278 self.git_store.upgrade()
4279 }
4280
4281 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4282 let this = cx.weak_entity();
4283 let git_store = self.git_store.clone();
4284 let _ = self.send_keyed_job(
4285 Some(GitJobKey::ReloadBufferDiffBases),
4286 None,
4287 |state, mut cx| async move {
4288 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4289 log::error!("tried to recompute diffs for a non-local repository");
4290 return Ok(());
4291 };
4292
4293 let Some(this) = this.upgrade() else {
4294 return Ok(());
4295 };
4296
4297 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4298 git_store.update(cx, |git_store, cx| {
4299 git_store
4300 .diffs
4301 .iter()
4302 .filter_map(|(buffer_id, diff_state)| {
4303 let buffer_store = git_store.buffer_store.read(cx);
4304 let buffer = buffer_store.get(*buffer_id)?;
4305 let file = File::from_dyn(buffer.read(cx).file())?;
4306 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4307 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4308 log::debug!(
4309 "start reload diff bases for repo path {}",
4310 repo_path.as_unix_str()
4311 );
4312 diff_state.update(cx, |diff_state, _| {
4313 let has_unstaged_diff = diff_state
4314 .unstaged_diff
4315 .as_ref()
4316 .is_some_and(|diff| diff.is_upgradable());
4317 let has_uncommitted_diff = diff_state
4318 .uncommitted_diff
4319 .as_ref()
4320 .is_some_and(|set| set.is_upgradable());
4321
4322 Some((
4323 buffer,
4324 repo_path,
4325 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4326 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4327 ))
4328 })
4329 })
4330 .collect::<Vec<_>>()
4331 })
4332 })?;
4333
4334 let buffer_diff_base_changes = cx
4335 .background_spawn(async move {
4336 let mut changes = Vec::new();
4337 for (buffer, repo_path, current_index_text, current_head_text) in
4338 &repo_diff_state_updates
4339 {
4340 let index_text = if current_index_text.is_some() {
4341 backend.load_index_text(repo_path.clone()).await
4342 } else {
4343 None
4344 };
4345 let head_text = if current_head_text.is_some() {
4346 backend.load_committed_text(repo_path.clone()).await
4347 } else {
4348 None
4349 };
4350
4351 let change =
4352 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4353 (Some(current_index), Some(current_head)) => {
4354 let index_changed =
4355 index_text.as_deref() != current_index.as_deref();
4356 let head_changed =
4357 head_text.as_deref() != current_head.as_deref();
4358 if index_changed && head_changed {
4359 if index_text == head_text {
4360 Some(DiffBasesChange::SetBoth(head_text))
4361 } else {
4362 Some(DiffBasesChange::SetEach {
4363 index: index_text,
4364 head: head_text,
4365 })
4366 }
4367 } else if index_changed {
4368 Some(DiffBasesChange::SetIndex(index_text))
4369 } else if head_changed {
4370 Some(DiffBasesChange::SetHead(head_text))
4371 } else {
4372 None
4373 }
4374 }
4375 (Some(current_index), None) => {
4376 let index_changed =
4377 index_text.as_deref() != current_index.as_deref();
4378 index_changed
4379 .then_some(DiffBasesChange::SetIndex(index_text))
4380 }
4381 (None, Some(current_head)) => {
4382 let head_changed =
4383 head_text.as_deref() != current_head.as_deref();
4384 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4385 }
4386 (None, None) => None,
4387 };
4388
4389 changes.push((buffer.clone(), change))
4390 }
4391 changes
4392 })
4393 .await;
4394
4395 git_store.update(&mut cx, |git_store, cx| {
4396 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4397 let buffer_snapshot = buffer.read(cx).text_snapshot();
4398 let buffer_id = buffer_snapshot.remote_id();
4399 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4400 continue;
4401 };
4402
4403 let downstream_client = git_store.downstream_client();
4404 diff_state.update(cx, |diff_state, cx| {
4405 use proto::update_diff_bases::Mode;
4406
4407 if let Some((diff_bases_change, (client, project_id))) =
4408 diff_bases_change.clone().zip(downstream_client)
4409 {
4410 let (staged_text, committed_text, mode) = match diff_bases_change {
4411 DiffBasesChange::SetIndex(index) => {
4412 (index, None, Mode::IndexOnly)
4413 }
4414 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4415 DiffBasesChange::SetEach { index, head } => {
4416 (index, head, Mode::IndexAndHead)
4417 }
4418 DiffBasesChange::SetBoth(text) => {
4419 (None, text, Mode::IndexMatchesHead)
4420 }
4421 };
4422 client
4423 .send(proto::UpdateDiffBases {
4424 project_id: project_id.to_proto(),
4425 buffer_id: buffer_id.to_proto(),
4426 staged_text,
4427 committed_text,
4428 mode: mode as i32,
4429 })
4430 .log_err();
4431 }
4432
4433 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4434 });
4435 }
4436 })
4437 },
4438 );
4439 }
4440
4441 pub fn send_job<F, Fut, R>(
4442 &mut self,
4443 status: Option<SharedString>,
4444 job: F,
4445 ) -> oneshot::Receiver<R>
4446 where
4447 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4448 Fut: Future<Output = R> + 'static,
4449 R: Send + 'static,
4450 {
4451 self.send_keyed_job(None, status, job)
4452 }
4453
4454 fn send_keyed_job<F, Fut, R>(
4455 &mut self,
4456 key: Option<GitJobKey>,
4457 status: Option<SharedString>,
4458 job: F,
4459 ) -> oneshot::Receiver<R>
4460 where
4461 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4462 Fut: Future<Output = R> + 'static,
4463 R: Send + 'static,
4464 {
4465 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4466 let job_id = post_inc(&mut self.job_id);
4467 let this = self.this.clone();
4468 self.job_sender
4469 .unbounded_send(GitJob {
4470 key,
4471 job: Box::new(move |state, cx: &mut AsyncApp| {
4472 let job = job(state, cx.clone());
4473 cx.spawn(async move |cx| {
4474 if let Some(s) = status.clone() {
4475 this.update(cx, |this, cx| {
4476 this.active_jobs.insert(
4477 job_id,
4478 JobInfo {
4479 start: Instant::now(),
4480 message: s.clone(),
4481 },
4482 );
4483
4484 cx.notify();
4485 })
4486 .ok();
4487 }
4488 let result = job.await;
4489
4490 this.update(cx, |this, cx| {
4491 this.active_jobs.remove(&job_id);
4492 cx.notify();
4493 })
4494 .ok();
4495
4496 result_tx.send(result).ok();
4497 })
4498 }),
4499 })
4500 .ok();
4501 result_rx
4502 }
4503
4504 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4505 let Some(git_store) = self.git_store.upgrade() else {
4506 return;
4507 };
4508 let entity = cx.entity();
4509 git_store.update(cx, |git_store, cx| {
4510 let Some((&id, _)) = git_store
4511 .repositories
4512 .iter()
4513 .find(|(_, handle)| *handle == &entity)
4514 else {
4515 return;
4516 };
4517 git_store.active_repo_id = Some(id);
4518 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4519 });
4520 }
4521
4522 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4523 self.snapshot.status()
4524 }
4525
4526 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4527 self.snapshot.diff_stat_for_path(path)
4528 }
4529
4530 pub fn cached_stash(&self) -> GitStash {
4531 self.snapshot.stash_entries.clone()
4532 }
4533
4534 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4535 let git_store = self.git_store.upgrade()?;
4536 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4537 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4538 let abs_path = SanitizedPath::new(&abs_path);
4539 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4540 Some(ProjectPath {
4541 worktree_id: worktree.read(cx).id(),
4542 path: relative_path,
4543 })
4544 }
4545
4546 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4547 let git_store = self.git_store.upgrade()?;
4548 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4549 let abs_path = worktree_store.absolutize(path, cx)?;
4550 self.snapshot.abs_path_to_repo_path(&abs_path)
4551 }
4552
4553 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4554 other
4555 .read(cx)
4556 .snapshot
4557 .work_directory_abs_path
4558 .starts_with(&self.snapshot.work_directory_abs_path)
4559 }
4560
4561 pub fn open_commit_buffer(
4562 &mut self,
4563 languages: Option<Arc<LanguageRegistry>>,
4564 buffer_store: Entity<BufferStore>,
4565 cx: &mut Context<Self>,
4566 ) -> Task<Result<Entity<Buffer>>> {
4567 let id = self.id;
4568 if let Some(buffer) = self.commit_message_buffer.clone() {
4569 return Task::ready(Ok(buffer));
4570 }
4571 let this = cx.weak_entity();
4572
4573 let rx = self.send_job(None, move |state, mut cx| async move {
4574 let Some(this) = this.upgrade() else {
4575 bail!("git store was dropped");
4576 };
4577 match state {
4578 RepositoryState::Local(..) => {
4579 this.update(&mut cx, |_, cx| {
4580 Self::open_local_commit_buffer(languages, buffer_store, cx)
4581 })
4582 .await
4583 }
4584 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4585 let request = client.request(proto::OpenCommitMessageBuffer {
4586 project_id: project_id.0,
4587 repository_id: id.to_proto(),
4588 });
4589 let response = request.await.context("requesting to open commit buffer")?;
4590 let buffer_id = BufferId::new(response.buffer_id)?;
4591 let buffer = buffer_store
4592 .update(&mut cx, |buffer_store, cx| {
4593 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4594 })
4595 .await?;
4596 if let Some(language_registry) = languages {
4597 let git_commit_language =
4598 language_registry.language_for_name("Git Commit").await?;
4599 buffer.update(&mut cx, |buffer, cx| {
4600 buffer.set_language(Some(git_commit_language), cx);
4601 });
4602 }
4603 this.update(&mut cx, |this, _| {
4604 this.commit_message_buffer = Some(buffer.clone());
4605 });
4606 Ok(buffer)
4607 }
4608 }
4609 });
4610
4611 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4612 }
4613
4614 fn open_local_commit_buffer(
4615 language_registry: Option<Arc<LanguageRegistry>>,
4616 buffer_store: Entity<BufferStore>,
4617 cx: &mut Context<Self>,
4618 ) -> Task<Result<Entity<Buffer>>> {
4619 cx.spawn(async move |repository, cx| {
4620 let git_commit_language = match language_registry {
4621 Some(language_registry) => {
4622 Some(language_registry.language_for_name("Git Commit").await?)
4623 }
4624 None => None,
4625 };
4626 let buffer = buffer_store
4627 .update(cx, |buffer_store, cx| {
4628 buffer_store.create_buffer(git_commit_language, false, cx)
4629 })
4630 .await?;
4631
4632 repository.update(cx, |repository, _| {
4633 repository.commit_message_buffer = Some(buffer.clone());
4634 })?;
4635 Ok(buffer)
4636 })
4637 }
4638
4639 pub fn checkout_files(
4640 &mut self,
4641 commit: &str,
4642 paths: Vec<RepoPath>,
4643 cx: &mut Context<Self>,
4644 ) -> Task<Result<()>> {
4645 let commit = commit.to_string();
4646 let id = self.id;
4647
4648 self.spawn_job_with_tracking(
4649 paths.clone(),
4650 pending_op::GitStatus::Reverted,
4651 cx,
4652 async move |this, cx| {
4653 this.update(cx, |this, _cx| {
4654 this.send_job(
4655 Some(format!("git checkout {}", commit).into()),
4656 move |git_repo, _| async move {
4657 match git_repo {
4658 RepositoryState::Local(LocalRepositoryState {
4659 backend,
4660 environment,
4661 ..
4662 }) => {
4663 backend
4664 .checkout_files(commit, paths, environment.clone())
4665 .await
4666 }
4667 RepositoryState::Remote(RemoteRepositoryState {
4668 project_id,
4669 client,
4670 }) => {
4671 client
4672 .request(proto::GitCheckoutFiles {
4673 project_id: project_id.0,
4674 repository_id: id.to_proto(),
4675 commit,
4676 paths: paths
4677 .into_iter()
4678 .map(|p| p.to_proto())
4679 .collect(),
4680 })
4681 .await?;
4682
4683 Ok(())
4684 }
4685 }
4686 },
4687 )
4688 })?
4689 .await?
4690 },
4691 )
4692 }
4693
4694 pub fn reset(
4695 &mut self,
4696 commit: String,
4697 reset_mode: ResetMode,
4698 _cx: &mut App,
4699 ) -> oneshot::Receiver<Result<()>> {
4700 let id = self.id;
4701
4702 self.send_job(None, move |git_repo, _| async move {
4703 match git_repo {
4704 RepositoryState::Local(LocalRepositoryState {
4705 backend,
4706 environment,
4707 ..
4708 }) => backend.reset(commit, reset_mode, environment).await,
4709 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4710 client
4711 .request(proto::GitReset {
4712 project_id: project_id.0,
4713 repository_id: id.to_proto(),
4714 commit,
4715 mode: match reset_mode {
4716 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4717 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4718 },
4719 })
4720 .await?;
4721
4722 Ok(())
4723 }
4724 }
4725 })
4726 }
4727
4728 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4729 let id = self.id;
4730 self.send_job(None, move |git_repo, _cx| async move {
4731 match git_repo {
4732 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4733 backend.show(commit).await
4734 }
4735 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4736 let resp = client
4737 .request(proto::GitShow {
4738 project_id: project_id.0,
4739 repository_id: id.to_proto(),
4740 commit,
4741 })
4742 .await?;
4743
4744 Ok(CommitDetails {
4745 sha: resp.sha.into(),
4746 message: resp.message.into(),
4747 commit_timestamp: resp.commit_timestamp,
4748 author_email: resp.author_email.into(),
4749 author_name: resp.author_name.into(),
4750 })
4751 }
4752 }
4753 })
4754 }
4755
4756 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4757 let id = self.id;
4758 self.send_job(None, move |git_repo, cx| async move {
4759 match git_repo {
4760 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4761 backend.load_commit(commit, cx).await
4762 }
4763 RepositoryState::Remote(RemoteRepositoryState {
4764 client, project_id, ..
4765 }) => {
4766 let response = client
4767 .request(proto::LoadCommitDiff {
4768 project_id: project_id.0,
4769 repository_id: id.to_proto(),
4770 commit,
4771 })
4772 .await?;
4773 Ok(CommitDiff {
4774 files: response
4775 .files
4776 .into_iter()
4777 .map(|file| {
4778 Ok(CommitFile {
4779 path: RepoPath::from_proto(&file.path)?,
4780 old_text: file.old_text,
4781 new_text: file.new_text,
4782 is_binary: file.is_binary,
4783 })
4784 })
4785 .collect::<Result<Vec<_>>>()?,
4786 })
4787 }
4788 }
4789 })
4790 }
4791
4792 pub fn file_history(
4793 &mut self,
4794 path: RepoPath,
4795 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4796 self.file_history_paginated(path, 0, None)
4797 }
4798
4799 pub fn file_history_paginated(
4800 &mut self,
4801 path: RepoPath,
4802 skip: usize,
4803 limit: Option<usize>,
4804 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4805 let id = self.id;
4806 self.send_job(None, move |git_repo, _cx| async move {
4807 match git_repo {
4808 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4809 backend.file_history_paginated(path, skip, limit).await
4810 }
4811 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4812 let response = client
4813 .request(proto::GitFileHistory {
4814 project_id: project_id.0,
4815 repository_id: id.to_proto(),
4816 path: path.to_proto(),
4817 skip: skip as u64,
4818 limit: limit.map(|l| l as u64),
4819 })
4820 .await?;
4821 Ok(git::repository::FileHistory {
4822 entries: response
4823 .entries
4824 .into_iter()
4825 .map(|entry| git::repository::FileHistoryEntry {
4826 sha: entry.sha.into(),
4827 subject: entry.subject.into(),
4828 message: entry.message.into(),
4829 commit_timestamp: entry.commit_timestamp,
4830 author_name: entry.author_name.into(),
4831 author_email: entry.author_email.into(),
4832 })
4833 .collect(),
4834 path: RepoPath::from_proto(&response.path)?,
4835 })
4836 }
4837 }
4838 })
4839 }
4840
4841 pub fn get_graph_data(
4842 &self,
4843 log_source: LogSource,
4844 log_order: LogOrder,
4845 ) -> Option<&InitialGitGraphData> {
4846 self.initial_graph_data.get(&(log_source, log_order))
4847 }
4848
4849 pub fn search_commits(
4850 &mut self,
4851 log_source: LogSource,
4852 search_args: SearchCommitArgs,
4853 request_tx: smol::channel::Sender<Oid>,
4854 cx: &mut Context<Self>,
4855 ) {
4856 let repository_state = self.repository_state.clone();
4857
4858 cx.background_spawn(async move {
4859 let repo_state = repository_state.await;
4860
4861 match repo_state {
4862 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4863 backend
4864 .search_commits(log_source, search_args, request_tx)
4865 .await
4866 .log_err();
4867 }
4868 Ok(RepositoryState::Remote(_)) => {}
4869 Err(_) => {}
4870 };
4871 })
4872 .detach();
4873 }
4874
4875 pub fn graph_data(
4876 &mut self,
4877 log_source: LogSource,
4878 log_order: LogOrder,
4879 range: Range<usize>,
4880 cx: &mut Context<Self>,
4881 ) -> GraphDataResponse<'_> {
4882 let initial_commit_data = self
4883 .initial_graph_data
4884 .entry((log_source.clone(), log_order))
4885 .or_insert_with(|| {
4886 let state = self.repository_state.clone();
4887 let log_source = log_source.clone();
4888
4889 let fetch_task = cx.spawn(async move |repository, cx| {
4890 let state = state.await;
4891 let result = match state {
4892 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4893 Self::local_git_graph_data(
4894 repository.clone(),
4895 backend,
4896 log_source.clone(),
4897 log_order,
4898 cx,
4899 )
4900 .await
4901 }
4902 Ok(RepositoryState::Remote(_)) => {
4903 Err("Git graph is not supported for collab yet".into())
4904 }
4905 Err(e) => Err(SharedString::from(e)),
4906 };
4907
4908 if let Err(fetch_task_error) = result {
4909 repository
4910 .update(cx, |repository, _| {
4911 if let Some(data) = repository
4912 .initial_graph_data
4913 .get_mut(&(log_source, log_order))
4914 {
4915 data.error = Some(fetch_task_error);
4916 } else {
4917 debug_panic!(
4918 "This task would be dropped if this entry doesn't exist"
4919 );
4920 }
4921 })
4922 .ok();
4923 }
4924 });
4925
4926 InitialGitGraphData {
4927 fetch_task,
4928 error: None,
4929 commit_data: Vec::new(),
4930 commit_oid_to_index: HashMap::default(),
4931 }
4932 });
4933
4934 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4935 let max_end = initial_commit_data.commit_data.len();
4936
4937 GraphDataResponse {
4938 commits: &initial_commit_data.commit_data
4939 [range.start.min(max_start)..range.end.min(max_end)],
4940 is_loading: !initial_commit_data.fetch_task.is_ready(),
4941 error: initial_commit_data.error.clone(),
4942 }
4943 }
4944
4945 async fn local_git_graph_data(
4946 this: WeakEntity<Self>,
4947 backend: Arc<dyn GitRepository>,
4948 log_source: LogSource,
4949 log_order: LogOrder,
4950 cx: &mut AsyncApp,
4951 ) -> Result<(), SharedString> {
4952 let (request_tx, request_rx) =
4953 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4954
4955 let task = cx.background_executor().spawn({
4956 let log_source = log_source.clone();
4957 async move {
4958 backend
4959 .initial_graph_data(log_source, log_order, request_tx)
4960 .await
4961 .map_err(|err| SharedString::from(err.to_string()))
4962 }
4963 });
4964
4965 let graph_data_key = (log_source, log_order);
4966
4967 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4968 this.update(cx, |repository, cx| {
4969 let graph_data = repository
4970 .initial_graph_data
4971 .entry(graph_data_key.clone())
4972 .and_modify(|graph_data| {
4973 for commit_data in initial_graph_commit_data {
4974 graph_data
4975 .commit_oid_to_index
4976 .insert(commit_data.sha, graph_data.commit_data.len());
4977 graph_data.commit_data.push(commit_data);
4978 }
4979 cx.emit(RepositoryEvent::GraphEvent(
4980 graph_data_key.clone(),
4981 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4982 ));
4983 });
4984
4985 match &graph_data {
4986 Entry::Occupied(_) => {}
4987 Entry::Vacant(_) => {
4988 debug_panic!("This task should be dropped if data doesn't exist");
4989 }
4990 }
4991 })
4992 .ok();
4993 }
4994
4995 task.await?;
4996 Ok(())
4997 }
4998
4999 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
5000 if !self.commit_data.contains_key(&sha) {
5001 match &self.graph_commit_data_handler {
5002 GraphCommitHandlerState::Open(handler) => {
5003 if handler.commit_data_request.try_send(sha).is_ok() {
5004 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
5005 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
5006 }
5007 }
5008 GraphCommitHandlerState::Closed => {
5009 self.open_graph_commit_data_handler(cx);
5010 }
5011 GraphCommitHandlerState::Starting => {}
5012 }
5013 }
5014
5015 self.commit_data
5016 .get(&sha)
5017 .unwrap_or(&CommitDataState::Loading)
5018 }
5019
5020 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
5021 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
5022
5023 let state = self.repository_state.clone();
5024 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
5025 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
5026
5027 let foreground_task = cx.spawn(async move |this, cx| {
5028 while let Ok((sha, commit_data)) = result_rx.recv().await {
5029 let result = this.update(cx, |this, cx| {
5030 let old_value = this
5031 .commit_data
5032 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
5033 debug_assert!(
5034 !matches!(old_value, Some(CommitDataState::Loaded(_))),
5035 "We should never overwrite commit data"
5036 );
5037
5038 cx.notify();
5039 });
5040 if result.is_err() {
5041 break;
5042 }
5043 }
5044
5045 this.update(cx, |this, _cx| {
5046 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
5047 })
5048 .ok();
5049 });
5050
5051 let request_tx_for_handler = request_tx;
5052 let background_executor = cx.background_executor().clone();
5053
5054 cx.background_spawn(async move {
5055 let backend = match state.await {
5056 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
5057 Ok(RepositoryState::Remote(_)) => {
5058 log::error!("commit_data_reader not supported for remote repositories");
5059 return;
5060 }
5061 Err(error) => {
5062 log::error!("failed to get repository state: {error}");
5063 return;
5064 }
5065 };
5066
5067 let reader = match backend.commit_data_reader() {
5068 Ok(reader) => reader,
5069 Err(error) => {
5070 log::error!("failed to create commit data reader: {error:?}");
5071 return;
5072 }
5073 };
5074
5075 loop {
5076 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
5077
5078 futures::select_biased! {
5079 sha = futures::FutureExt::fuse(request_rx.recv()) => {
5080 let Ok(sha) = sha else {
5081 break;
5082 };
5083
5084 match reader.read(sha).await {
5085 Ok(commit_data) => {
5086 if result_tx.send((sha, commit_data)).await.is_err() {
5087 break;
5088 }
5089 }
5090 Err(error) => {
5091 log::error!("failed to read commit data for {sha}: {error:?}");
5092 }
5093 }
5094 }
5095 _ = futures::FutureExt::fuse(timeout) => {
5096 break;
5097 }
5098 }
5099 }
5100
5101 drop(result_tx);
5102 })
5103 .detach();
5104
5105 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
5106 _task: foreground_task,
5107 commit_data_request: request_tx_for_handler,
5108 });
5109 }
5110
5111 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
5112 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
5113 }
5114
5115 fn save_buffers<'a>(
5116 &self,
5117 entries: impl IntoIterator<Item = &'a RepoPath>,
5118 cx: &mut Context<Self>,
5119 ) -> Vec<Task<anyhow::Result<()>>> {
5120 let mut save_futures = Vec::new();
5121 if let Some(buffer_store) = self.buffer_store(cx) {
5122 buffer_store.update(cx, |buffer_store, cx| {
5123 for path in entries {
5124 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5125 continue;
5126 };
5127 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5128 && buffer
5129 .read(cx)
5130 .file()
5131 .is_some_and(|file| file.disk_state().exists())
5132 && buffer.read(cx).has_unsaved_edits()
5133 {
5134 save_futures.push(buffer_store.save_buffer(buffer, cx));
5135 }
5136 }
5137 })
5138 }
5139 save_futures
5140 }
5141
5142 pub fn stage_entries(
5143 &mut self,
5144 entries: Vec<RepoPath>,
5145 cx: &mut Context<Self>,
5146 ) -> Task<anyhow::Result<()>> {
5147 self.stage_or_unstage_entries(true, entries, cx)
5148 }
5149
5150 pub fn unstage_entries(
5151 &mut self,
5152 entries: Vec<RepoPath>,
5153 cx: &mut Context<Self>,
5154 ) -> Task<anyhow::Result<()>> {
5155 self.stage_or_unstage_entries(false, entries, cx)
5156 }
5157
5158 fn stage_or_unstage_entries(
5159 &mut self,
5160 stage: bool,
5161 entries: Vec<RepoPath>,
5162 cx: &mut Context<Self>,
5163 ) -> Task<anyhow::Result<()>> {
5164 if entries.is_empty() {
5165 return Task::ready(Ok(()));
5166 }
5167 let Some(git_store) = self.git_store.upgrade() else {
5168 return Task::ready(Ok(()));
5169 };
5170 let id = self.id;
5171 let save_tasks = self.save_buffers(&entries, cx);
5172 let paths = entries
5173 .iter()
5174 .map(|p| p.as_unix_str())
5175 .collect::<Vec<_>>()
5176 .join(" ");
5177 let status = if stage {
5178 format!("git add {paths}")
5179 } else {
5180 format!("git reset {paths}")
5181 };
5182 let job_key = GitJobKey::WriteIndex(entries.clone());
5183
5184 self.spawn_job_with_tracking(
5185 entries.clone(),
5186 if stage {
5187 pending_op::GitStatus::Staged
5188 } else {
5189 pending_op::GitStatus::Unstaged
5190 },
5191 cx,
5192 async move |this, cx| {
5193 for save_task in save_tasks {
5194 save_task.await?;
5195 }
5196
5197 this.update(cx, |this, cx| {
5198 let weak_this = cx.weak_entity();
5199 this.send_keyed_job(
5200 Some(job_key),
5201 Some(status.into()),
5202 move |git_repo, mut cx| async move {
5203 let hunk_staging_operation_counts = weak_this
5204 .update(&mut cx, |this, cx| {
5205 let mut hunk_staging_operation_counts = HashMap::default();
5206 for path in &entries {
5207 let Some(project_path) =
5208 this.repo_path_to_project_path(path, cx)
5209 else {
5210 continue;
5211 };
5212 let Some(buffer) = git_store
5213 .read(cx)
5214 .buffer_store
5215 .read(cx)
5216 .get_by_path(&project_path)
5217 else {
5218 continue;
5219 };
5220 let Some(diff_state) = git_store
5221 .read(cx)
5222 .diffs
5223 .get(&buffer.read(cx).remote_id())
5224 .cloned()
5225 else {
5226 continue;
5227 };
5228 let Some(uncommitted_diff) =
5229 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5230 |uncommitted_diff| uncommitted_diff.upgrade(),
5231 )
5232 else {
5233 continue;
5234 };
5235 let buffer_snapshot = buffer.read(cx).text_snapshot();
5236 let file_exists = buffer
5237 .read(cx)
5238 .file()
5239 .is_some_and(|file| file.disk_state().exists());
5240 let hunk_staging_operation_count =
5241 diff_state.update(cx, |diff_state, cx| {
5242 uncommitted_diff.update(
5243 cx,
5244 |uncommitted_diff, cx| {
5245 uncommitted_diff
5246 .stage_or_unstage_all_hunks(
5247 stage,
5248 &buffer_snapshot,
5249 file_exists,
5250 cx,
5251 );
5252 },
5253 );
5254
5255 diff_state.hunk_staging_operation_count += 1;
5256 diff_state.hunk_staging_operation_count
5257 });
5258 hunk_staging_operation_counts.insert(
5259 diff_state.downgrade(),
5260 hunk_staging_operation_count,
5261 );
5262 }
5263 hunk_staging_operation_counts
5264 })
5265 .unwrap_or_default();
5266
5267 let result = match git_repo {
5268 RepositoryState::Local(LocalRepositoryState {
5269 backend,
5270 environment,
5271 ..
5272 }) => {
5273 if stage {
5274 backend.stage_paths(entries, environment.clone()).await
5275 } else {
5276 backend.unstage_paths(entries, environment.clone()).await
5277 }
5278 }
5279 RepositoryState::Remote(RemoteRepositoryState {
5280 project_id,
5281 client,
5282 }) => {
5283 if stage {
5284 client
5285 .request(proto::Stage {
5286 project_id: project_id.0,
5287 repository_id: id.to_proto(),
5288 paths: entries
5289 .into_iter()
5290 .map(|repo_path| repo_path.to_proto())
5291 .collect(),
5292 })
5293 .await
5294 .context("sending stage request")
5295 .map(|_| ())
5296 } else {
5297 client
5298 .request(proto::Unstage {
5299 project_id: project_id.0,
5300 repository_id: id.to_proto(),
5301 paths: entries
5302 .into_iter()
5303 .map(|repo_path| repo_path.to_proto())
5304 .collect(),
5305 })
5306 .await
5307 .context("sending unstage request")
5308 .map(|_| ())
5309 }
5310 }
5311 };
5312
5313 for (diff_state, hunk_staging_operation_count) in
5314 hunk_staging_operation_counts
5315 {
5316 diff_state
5317 .update(&mut cx, |diff_state, cx| {
5318 if result.is_ok() {
5319 diff_state.hunk_staging_operation_count_as_of_write =
5320 hunk_staging_operation_count;
5321 } else if let Some(uncommitted_diff) =
5322 &diff_state.uncommitted_diff
5323 {
5324 uncommitted_diff
5325 .update(cx, |uncommitted_diff, cx| {
5326 uncommitted_diff.clear_pending_hunks(cx);
5327 })
5328 .ok();
5329 }
5330 })
5331 .ok();
5332 }
5333
5334 result
5335 },
5336 )
5337 })?
5338 .await?
5339 },
5340 )
5341 }
5342
5343 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5344 let snapshot = self.snapshot.clone();
5345 let pending_ops = self.pending_ops.clone();
5346 let to_stage = cx.background_spawn(async move {
5347 snapshot
5348 .status()
5349 .filter_map(|entry| {
5350 if let Some(ops) =
5351 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5352 {
5353 if ops.staging() || ops.staged() {
5354 None
5355 } else {
5356 Some(entry.repo_path)
5357 }
5358 } else if entry.status.staging().is_fully_staged() {
5359 None
5360 } else {
5361 Some(entry.repo_path)
5362 }
5363 })
5364 .collect()
5365 });
5366
5367 cx.spawn(async move |this, cx| {
5368 let to_stage = to_stage.await;
5369 this.update(cx, |this, cx| {
5370 this.stage_or_unstage_entries(true, to_stage, cx)
5371 })?
5372 .await
5373 })
5374 }
5375
5376 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5377 let snapshot = self.snapshot.clone();
5378 let pending_ops = self.pending_ops.clone();
5379 let to_unstage = cx.background_spawn(async move {
5380 snapshot
5381 .status()
5382 .filter_map(|entry| {
5383 if let Some(ops) =
5384 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5385 {
5386 if !ops.staging() && !ops.staged() {
5387 None
5388 } else {
5389 Some(entry.repo_path)
5390 }
5391 } else if entry.status.staging().is_fully_unstaged() {
5392 None
5393 } else {
5394 Some(entry.repo_path)
5395 }
5396 })
5397 .collect()
5398 });
5399
5400 cx.spawn(async move |this, cx| {
5401 let to_unstage = to_unstage.await;
5402 this.update(cx, |this, cx| {
5403 this.stage_or_unstage_entries(false, to_unstage, cx)
5404 })?
5405 .await
5406 })
5407 }
5408
5409 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5410 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5411
5412 self.stash_entries(to_stash, cx)
5413 }
5414
5415 pub fn stash_entries(
5416 &mut self,
5417 entries: Vec<RepoPath>,
5418 cx: &mut Context<Self>,
5419 ) -> Task<anyhow::Result<()>> {
5420 let id = self.id;
5421
5422 cx.spawn(async move |this, cx| {
5423 this.update(cx, |this, _| {
5424 this.send_job(None, move |git_repo, _cx| async move {
5425 match git_repo {
5426 RepositoryState::Local(LocalRepositoryState {
5427 backend,
5428 environment,
5429 ..
5430 }) => backend.stash_paths(entries, environment).await,
5431 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5432 client
5433 .request(proto::Stash {
5434 project_id: project_id.0,
5435 repository_id: id.to_proto(),
5436 paths: entries
5437 .into_iter()
5438 .map(|repo_path| repo_path.to_proto())
5439 .collect(),
5440 })
5441 .await?;
5442 Ok(())
5443 }
5444 }
5445 })
5446 })?
5447 .await??;
5448 Ok(())
5449 })
5450 }
5451
5452 pub fn stash_pop(
5453 &mut self,
5454 index: Option<usize>,
5455 cx: &mut Context<Self>,
5456 ) -> Task<anyhow::Result<()>> {
5457 let id = self.id;
5458 cx.spawn(async move |this, cx| {
5459 this.update(cx, |this, _| {
5460 this.send_job(None, move |git_repo, _cx| async move {
5461 match git_repo {
5462 RepositoryState::Local(LocalRepositoryState {
5463 backend,
5464 environment,
5465 ..
5466 }) => backend.stash_pop(index, environment).await,
5467 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5468 client
5469 .request(proto::StashPop {
5470 project_id: project_id.0,
5471 repository_id: id.to_proto(),
5472 stash_index: index.map(|i| i as u64),
5473 })
5474 .await
5475 .context("sending stash pop request")?;
5476 Ok(())
5477 }
5478 }
5479 })
5480 })?
5481 .await??;
5482 Ok(())
5483 })
5484 }
5485
5486 pub fn stash_apply(
5487 &mut self,
5488 index: Option<usize>,
5489 cx: &mut Context<Self>,
5490 ) -> Task<anyhow::Result<()>> {
5491 let id = self.id;
5492 cx.spawn(async move |this, cx| {
5493 this.update(cx, |this, _| {
5494 this.send_job(None, move |git_repo, _cx| async move {
5495 match git_repo {
5496 RepositoryState::Local(LocalRepositoryState {
5497 backend,
5498 environment,
5499 ..
5500 }) => backend.stash_apply(index, environment).await,
5501 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5502 client
5503 .request(proto::StashApply {
5504 project_id: project_id.0,
5505 repository_id: id.to_proto(),
5506 stash_index: index.map(|i| i as u64),
5507 })
5508 .await
5509 .context("sending stash apply request")?;
5510 Ok(())
5511 }
5512 }
5513 })
5514 })?
5515 .await??;
5516 Ok(())
5517 })
5518 }
5519
5520 pub fn stash_drop(
5521 &mut self,
5522 index: Option<usize>,
5523 cx: &mut Context<Self>,
5524 ) -> oneshot::Receiver<anyhow::Result<()>> {
5525 let id = self.id;
5526 let updates_tx = self
5527 .git_store()
5528 .and_then(|git_store| match &git_store.read(cx).state {
5529 GitStoreState::Local { downstream, .. } => downstream
5530 .as_ref()
5531 .map(|downstream| downstream.updates_tx.clone()),
5532 _ => None,
5533 });
5534 let this = cx.weak_entity();
5535 self.send_job(None, move |git_repo, mut cx| async move {
5536 match git_repo {
5537 RepositoryState::Local(LocalRepositoryState {
5538 backend,
5539 environment,
5540 ..
5541 }) => {
5542 // TODO would be nice to not have to do this manually
5543 let result = backend.stash_drop(index, environment).await;
5544 if result.is_ok()
5545 && let Ok(stash_entries) = backend.stash_entries().await
5546 {
5547 let snapshot = this.update(&mut cx, |this, cx| {
5548 this.snapshot.stash_entries = stash_entries;
5549 cx.emit(RepositoryEvent::StashEntriesChanged);
5550 this.snapshot.clone()
5551 })?;
5552 if let Some(updates_tx) = updates_tx {
5553 updates_tx
5554 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5555 .ok();
5556 }
5557 }
5558
5559 result
5560 }
5561 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5562 client
5563 .request(proto::StashDrop {
5564 project_id: project_id.0,
5565 repository_id: id.to_proto(),
5566 stash_index: index.map(|i| i as u64),
5567 })
5568 .await
5569 .context("sending stash pop request")?;
5570 Ok(())
5571 }
5572 }
5573 })
5574 }
5575
5576 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5577 let id = self.id;
5578 self.send_job(
5579 Some(format!("git hook {}", hook.as_str()).into()),
5580 move |git_repo, _cx| async move {
5581 match git_repo {
5582 RepositoryState::Local(LocalRepositoryState {
5583 backend,
5584 environment,
5585 ..
5586 }) => backend.run_hook(hook, environment.clone()).await,
5587 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5588 client
5589 .request(proto::RunGitHook {
5590 project_id: project_id.0,
5591 repository_id: id.to_proto(),
5592 hook: hook.to_proto(),
5593 })
5594 .await?;
5595
5596 Ok(())
5597 }
5598 }
5599 },
5600 )
5601 }
5602
5603 pub fn commit(
5604 &mut self,
5605 message: SharedString,
5606 name_and_email: Option<(SharedString, SharedString)>,
5607 options: CommitOptions,
5608 askpass: AskPassDelegate,
5609 cx: &mut App,
5610 ) -> oneshot::Receiver<Result<()>> {
5611 let id = self.id;
5612 let askpass_delegates = self.askpass_delegates.clone();
5613 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5614
5615 let rx = self.run_hook(RunHook::PreCommit, cx);
5616
5617 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5618 rx.await??;
5619
5620 match git_repo {
5621 RepositoryState::Local(LocalRepositoryState {
5622 backend,
5623 environment,
5624 ..
5625 }) => {
5626 backend
5627 .commit(message, name_and_email, options, askpass, environment)
5628 .await
5629 }
5630 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5631 askpass_delegates.lock().insert(askpass_id, askpass);
5632 let _defer = util::defer(|| {
5633 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5634 debug_assert!(askpass_delegate.is_some());
5635 });
5636 let (name, email) = name_and_email.unzip();
5637 client
5638 .request(proto::Commit {
5639 project_id: project_id.0,
5640 repository_id: id.to_proto(),
5641 message: String::from(message),
5642 name: name.map(String::from),
5643 email: email.map(String::from),
5644 options: Some(proto::commit::CommitOptions {
5645 amend: options.amend,
5646 signoff: options.signoff,
5647 allow_empty: options.allow_empty,
5648 }),
5649 askpass_id,
5650 })
5651 .await?;
5652
5653 Ok(())
5654 }
5655 }
5656 })
5657 }
5658
5659 pub fn fetch(
5660 &mut self,
5661 fetch_options: FetchOptions,
5662 askpass: AskPassDelegate,
5663 _cx: &mut App,
5664 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5665 let askpass_delegates = self.askpass_delegates.clone();
5666 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5667 let id = self.id;
5668
5669 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5670 match git_repo {
5671 RepositoryState::Local(LocalRepositoryState {
5672 backend,
5673 environment,
5674 ..
5675 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5676 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5677 askpass_delegates.lock().insert(askpass_id, askpass);
5678 let _defer = util::defer(|| {
5679 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5680 debug_assert!(askpass_delegate.is_some());
5681 });
5682
5683 let response = client
5684 .request(proto::Fetch {
5685 project_id: project_id.0,
5686 repository_id: id.to_proto(),
5687 askpass_id,
5688 remote: fetch_options.to_proto(),
5689 })
5690 .await?;
5691
5692 Ok(RemoteCommandOutput {
5693 stdout: response.stdout,
5694 stderr: response.stderr,
5695 })
5696 }
5697 }
5698 })
5699 }
5700
5701 pub fn push(
5702 &mut self,
5703 branch: SharedString,
5704 remote_branch: SharedString,
5705 remote: SharedString,
5706 options: Option<PushOptions>,
5707 askpass: AskPassDelegate,
5708 cx: &mut Context<Self>,
5709 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5710 let askpass_delegates = self.askpass_delegates.clone();
5711 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5712 let id = self.id;
5713
5714 let args = options
5715 .map(|option| match option {
5716 PushOptions::SetUpstream => " --set-upstream",
5717 PushOptions::Force => " --force-with-lease",
5718 })
5719 .unwrap_or("");
5720
5721 let updates_tx = self
5722 .git_store()
5723 .and_then(|git_store| match &git_store.read(cx).state {
5724 GitStoreState::Local { downstream, .. } => downstream
5725 .as_ref()
5726 .map(|downstream| downstream.updates_tx.clone()),
5727 _ => None,
5728 });
5729
5730 let this = cx.weak_entity();
5731 self.send_job(
5732 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5733 move |git_repo, mut cx| async move {
5734 match git_repo {
5735 RepositoryState::Local(LocalRepositoryState {
5736 backend,
5737 environment,
5738 ..
5739 }) => {
5740 let result = backend
5741 .push(
5742 branch.to_string(),
5743 remote_branch.to_string(),
5744 remote.to_string(),
5745 options,
5746 askpass,
5747 environment.clone(),
5748 cx.clone(),
5749 )
5750 .await;
5751 // TODO would be nice to not have to do this manually
5752 if result.is_ok() {
5753 let branches = backend.branches().await?;
5754 let branch = branches.into_iter().find(|branch| branch.is_head);
5755 log::info!("head branch after scan is {branch:?}");
5756 let snapshot = this.update(&mut cx, |this, cx| {
5757 this.snapshot.branch = branch;
5758 cx.emit(RepositoryEvent::HeadChanged);
5759 this.snapshot.clone()
5760 })?;
5761 if let Some(updates_tx) = updates_tx {
5762 updates_tx
5763 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5764 .ok();
5765 }
5766 }
5767 result
5768 }
5769 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5770 askpass_delegates.lock().insert(askpass_id, askpass);
5771 let _defer = util::defer(|| {
5772 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5773 debug_assert!(askpass_delegate.is_some());
5774 });
5775 let response = client
5776 .request(proto::Push {
5777 project_id: project_id.0,
5778 repository_id: id.to_proto(),
5779 askpass_id,
5780 branch_name: branch.to_string(),
5781 remote_branch_name: remote_branch.to_string(),
5782 remote_name: remote.to_string(),
5783 options: options.map(|options| match options {
5784 PushOptions::Force => proto::push::PushOptions::Force,
5785 PushOptions::SetUpstream => {
5786 proto::push::PushOptions::SetUpstream
5787 }
5788 }
5789 as i32),
5790 })
5791 .await?;
5792
5793 Ok(RemoteCommandOutput {
5794 stdout: response.stdout,
5795 stderr: response.stderr,
5796 })
5797 }
5798 }
5799 },
5800 )
5801 }
5802
5803 pub fn pull(
5804 &mut self,
5805 branch: Option<SharedString>,
5806 remote: SharedString,
5807 rebase: bool,
5808 askpass: AskPassDelegate,
5809 _cx: &mut App,
5810 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5811 let askpass_delegates = self.askpass_delegates.clone();
5812 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5813 let id = self.id;
5814
5815 let mut status = "git pull".to_string();
5816 if rebase {
5817 status.push_str(" --rebase");
5818 }
5819 status.push_str(&format!(" {}", remote));
5820 if let Some(b) = &branch {
5821 status.push_str(&format!(" {}", b));
5822 }
5823
5824 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5825 match git_repo {
5826 RepositoryState::Local(LocalRepositoryState {
5827 backend,
5828 environment,
5829 ..
5830 }) => {
5831 backend
5832 .pull(
5833 branch.as_ref().map(|b| b.to_string()),
5834 remote.to_string(),
5835 rebase,
5836 askpass,
5837 environment.clone(),
5838 cx,
5839 )
5840 .await
5841 }
5842 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5843 askpass_delegates.lock().insert(askpass_id, askpass);
5844 let _defer = util::defer(|| {
5845 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5846 debug_assert!(askpass_delegate.is_some());
5847 });
5848 let response = client
5849 .request(proto::Pull {
5850 project_id: project_id.0,
5851 repository_id: id.to_proto(),
5852 askpass_id,
5853 rebase,
5854 branch_name: branch.as_ref().map(|b| b.to_string()),
5855 remote_name: remote.to_string(),
5856 })
5857 .await?;
5858
5859 Ok(RemoteCommandOutput {
5860 stdout: response.stdout,
5861 stderr: response.stderr,
5862 })
5863 }
5864 }
5865 })
5866 }
5867
5868 fn spawn_set_index_text_job(
5869 &mut self,
5870 path: RepoPath,
5871 content: Option<String>,
5872 hunk_staging_operation_count: Option<usize>,
5873 cx: &mut Context<Self>,
5874 ) -> oneshot::Receiver<anyhow::Result<()>> {
5875 let id = self.id;
5876 let this = cx.weak_entity();
5877 let git_store = self.git_store.clone();
5878 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5879 self.send_keyed_job(
5880 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5881 None,
5882 move |git_repo, mut cx| async move {
5883 log::debug!(
5884 "start updating index text for buffer {}",
5885 path.as_unix_str()
5886 );
5887
5888 match git_repo {
5889 RepositoryState::Local(LocalRepositoryState {
5890 fs,
5891 backend,
5892 environment,
5893 ..
5894 }) => {
5895 let executable = match fs.metadata(&abs_path).await {
5896 Ok(Some(meta)) => meta.is_executable,
5897 Ok(None) => false,
5898 Err(_err) => false,
5899 };
5900 backend
5901 .set_index_text(path.clone(), content, environment.clone(), executable)
5902 .await?;
5903 }
5904 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5905 client
5906 .request(proto::SetIndexText {
5907 project_id: project_id.0,
5908 repository_id: id.to_proto(),
5909 path: path.to_proto(),
5910 text: content,
5911 })
5912 .await?;
5913 }
5914 }
5915 log::debug!(
5916 "finish updating index text for buffer {}",
5917 path.as_unix_str()
5918 );
5919
5920 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5921 let project_path = this
5922 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5923 .ok()
5924 .flatten();
5925 git_store
5926 .update(&mut cx, |git_store, cx| {
5927 let buffer_id = git_store
5928 .buffer_store
5929 .read(cx)
5930 .get_by_path(&project_path?)?
5931 .read(cx)
5932 .remote_id();
5933 let diff_state = git_store.diffs.get(&buffer_id)?;
5934 diff_state.update(cx, |diff_state, _| {
5935 diff_state.hunk_staging_operation_count_as_of_write =
5936 hunk_staging_operation_count;
5937 });
5938 Some(())
5939 })
5940 .context("Git store dropped")?;
5941 }
5942 Ok(())
5943 },
5944 )
5945 }
5946
5947 pub fn create_remote(
5948 &mut self,
5949 remote_name: String,
5950 remote_url: String,
5951 ) -> oneshot::Receiver<Result<()>> {
5952 let id = self.id;
5953 self.send_job(
5954 Some(format!("git remote add {remote_name} {remote_url}").into()),
5955 move |repo, _cx| async move {
5956 match repo {
5957 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5958 backend.create_remote(remote_name, remote_url).await
5959 }
5960 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5961 client
5962 .request(proto::GitCreateRemote {
5963 project_id: project_id.0,
5964 repository_id: id.to_proto(),
5965 remote_name,
5966 remote_url,
5967 })
5968 .await?;
5969
5970 Ok(())
5971 }
5972 }
5973 },
5974 )
5975 }
5976
5977 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5978 let id = self.id;
5979 self.send_job(
5980 Some(format!("git remove remote {remote_name}").into()),
5981 move |repo, _cx| async move {
5982 match repo {
5983 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5984 backend.remove_remote(remote_name).await
5985 }
5986 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5987 client
5988 .request(proto::GitRemoveRemote {
5989 project_id: project_id.0,
5990 repository_id: id.to_proto(),
5991 remote_name,
5992 })
5993 .await?;
5994
5995 Ok(())
5996 }
5997 }
5998 },
5999 )
6000 }
6001
6002 pub fn get_remotes(
6003 &mut self,
6004 branch_name: Option<String>,
6005 is_push: bool,
6006 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
6007 let id = self.id;
6008 self.send_job(None, move |repo, _cx| async move {
6009 match repo {
6010 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6011 let remote = if let Some(branch_name) = branch_name {
6012 if is_push {
6013 backend.get_push_remote(branch_name).await?
6014 } else {
6015 backend.get_branch_remote(branch_name).await?
6016 }
6017 } else {
6018 None
6019 };
6020
6021 match remote {
6022 Some(remote) => Ok(vec![remote]),
6023 None => backend.get_all_remotes().await,
6024 }
6025 }
6026 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6027 let response = client
6028 .request(proto::GetRemotes {
6029 project_id: project_id.0,
6030 repository_id: id.to_proto(),
6031 branch_name,
6032 is_push,
6033 })
6034 .await?;
6035
6036 let remotes = response
6037 .remotes
6038 .into_iter()
6039 .map(|remotes| Remote {
6040 name: remotes.name.into(),
6041 })
6042 .collect();
6043
6044 Ok(remotes)
6045 }
6046 }
6047 })
6048 }
6049
6050 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
6051 let id = self.id;
6052 self.send_job(None, move |repo, _| async move {
6053 match repo {
6054 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6055 backend.branches().await
6056 }
6057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6058 let response = client
6059 .request(proto::GitGetBranches {
6060 project_id: project_id.0,
6061 repository_id: id.to_proto(),
6062 })
6063 .await?;
6064
6065 let branches = response
6066 .branches
6067 .into_iter()
6068 .map(|branch| proto_to_branch(&branch))
6069 .collect();
6070
6071 Ok(branches)
6072 }
6073 }
6074 })
6075 }
6076
6077 /// If this is a linked worktree (*NOT* the main checkout of a repository),
6078 /// returns the pathed for the linked worktree.
6079 ///
6080 /// Returns None if this is the main checkout.
6081 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
6082 if self.work_directory_abs_path != self.original_repo_abs_path {
6083 Some(&self.work_directory_abs_path)
6084 } else {
6085 None
6086 }
6087 }
6088
6089 pub fn path_for_new_linked_worktree(
6090 &self,
6091 branch_name: &str,
6092 worktree_directory_setting: &str,
6093 ) -> Result<PathBuf> {
6094 let original_repo = self.original_repo_abs_path.clone();
6095 let project_name = original_repo
6096 .file_name()
6097 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
6098 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
6099 Ok(directory.join(branch_name).join(project_name))
6100 }
6101
6102 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
6103 let id = self.id;
6104 self.send_job(None, move |repo, _| async move {
6105 match repo {
6106 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6107 backend.worktrees().await
6108 }
6109 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6110 let response = client
6111 .request(proto::GitGetWorktrees {
6112 project_id: project_id.0,
6113 repository_id: id.to_proto(),
6114 })
6115 .await?;
6116
6117 let worktrees = response
6118 .worktrees
6119 .into_iter()
6120 .map(|worktree| proto_to_worktree(&worktree))
6121 .collect();
6122
6123 Ok(worktrees)
6124 }
6125 }
6126 })
6127 }
6128
6129 pub fn create_worktree(
6130 &mut self,
6131 target: CreateWorktreeTarget,
6132 path: PathBuf,
6133 ) -> oneshot::Receiver<Result<()>> {
6134 let id = self.id;
6135 let job_description = match target.branch_name() {
6136 Some(branch_name) => format!("git worktree add: {branch_name}"),
6137 None => "git worktree add (detached)".to_string(),
6138 };
6139 self.send_job(Some(job_description.into()), move |repo, _cx| async move {
6140 match repo {
6141 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6142 backend.create_worktree(target, path).await
6143 }
6144 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6145 let (name, commit, use_existing_branch) = match target {
6146 CreateWorktreeTarget::ExistingBranch { branch_name } => {
6147 (Some(branch_name), None, true)
6148 }
6149 CreateWorktreeTarget::NewBranch {
6150 branch_name,
6151 base_sha,
6152 } => (Some(branch_name), base_sha, false),
6153 CreateWorktreeTarget::Detached { base_sha } => (None, base_sha, false),
6154 };
6155
6156 client
6157 .request(proto::GitCreateWorktree {
6158 project_id: project_id.0,
6159 repository_id: id.to_proto(),
6160 name: name.unwrap_or_default(),
6161 directory: path.to_string_lossy().to_string(),
6162 commit,
6163 use_existing_branch,
6164 })
6165 .await?;
6166
6167 Ok(())
6168 }
6169 }
6170 })
6171 }
6172
6173 pub fn create_worktree_detached(
6174 &mut self,
6175 path: PathBuf,
6176 commit: String,
6177 ) -> oneshot::Receiver<Result<()>> {
6178 self.create_worktree(
6179 CreateWorktreeTarget::Detached {
6180 base_sha: Some(commit),
6181 },
6182 path,
6183 )
6184 }
6185
6186 pub fn checkout_branch_in_worktree(
6187 &mut self,
6188 branch_name: String,
6189 worktree_path: PathBuf,
6190 create: bool,
6191 ) -> oneshot::Receiver<Result<()>> {
6192 let description = if create {
6193 format!("git checkout -b {branch_name}")
6194 } else {
6195 format!("git checkout {branch_name}")
6196 };
6197 self.send_job(Some(description.into()), move |repo, _cx| async move {
6198 match repo {
6199 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6200 backend
6201 .checkout_branch_in_worktree(branch_name, worktree_path, create)
6202 .await
6203 }
6204 RepositoryState::Remote(_) => {
6205 log::warn!("checkout_branch_in_worktree not supported for remote repositories");
6206 Ok(())
6207 }
6208 }
6209 })
6210 }
6211
6212 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6213 let id = self.id;
6214 self.send_job(None, move |repo, _cx| async move {
6215 match repo {
6216 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6217 Ok(backend.head_sha().await)
6218 }
6219 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6220 let response = client
6221 .request(proto::GitGetHeadSha {
6222 project_id: project_id.0,
6223 repository_id: id.to_proto(),
6224 })
6225 .await?;
6226
6227 Ok(response.sha)
6228 }
6229 }
6230 })
6231 }
6232
6233 fn edit_ref(
6234 &mut self,
6235 ref_name: String,
6236 commit: Option<String>,
6237 ) -> oneshot::Receiver<Result<()>> {
6238 let id = self.id;
6239 self.send_job(None, move |repo, _cx| async move {
6240 match repo {
6241 RepositoryState::Local(LocalRepositoryState { backend, .. }) => match commit {
6242 Some(commit) => backend.update_ref(ref_name, commit).await,
6243 None => backend.delete_ref(ref_name).await,
6244 },
6245 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6246 let action = match commit {
6247 Some(sha) => proto::git_edit_ref::Action::UpdateToCommit(sha),
6248 None => {
6249 proto::git_edit_ref::Action::Delete(proto::git_edit_ref::DeleteRef {})
6250 }
6251 };
6252 client
6253 .request(proto::GitEditRef {
6254 project_id: project_id.0,
6255 repository_id: id.to_proto(),
6256 ref_name,
6257 action: Some(action),
6258 })
6259 .await?;
6260 Ok(())
6261 }
6262 }
6263 })
6264 }
6265
6266 pub fn update_ref(
6267 &mut self,
6268 ref_name: String,
6269 commit: String,
6270 ) -> oneshot::Receiver<Result<()>> {
6271 self.edit_ref(ref_name, Some(commit))
6272 }
6273
6274 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6275 self.edit_ref(ref_name, None)
6276 }
6277
6278 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6279 let id = self.id;
6280 self.send_job(None, move |repo, _cx| async move {
6281 match repo {
6282 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6283 backend.repair_worktrees().await
6284 }
6285 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6286 client
6287 .request(proto::GitRepairWorktrees {
6288 project_id: project_id.0,
6289 repository_id: id.to_proto(),
6290 })
6291 .await?;
6292 Ok(())
6293 }
6294 }
6295 })
6296 }
6297
6298 pub fn create_archive_checkpoint(&mut self) -> oneshot::Receiver<Result<(String, String)>> {
6299 let id = self.id;
6300 self.send_job(None, move |repo, _cx| async move {
6301 match repo {
6302 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6303 backend.create_archive_checkpoint().await
6304 }
6305 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6306 let response = client
6307 .request(proto::GitCreateArchiveCheckpoint {
6308 project_id: project_id.0,
6309 repository_id: id.to_proto(),
6310 })
6311 .await?;
6312 Ok((response.staged_commit_sha, response.unstaged_commit_sha))
6313 }
6314 }
6315 })
6316 }
6317
6318 pub fn restore_archive_checkpoint(
6319 &mut self,
6320 staged_sha: String,
6321 unstaged_sha: String,
6322 ) -> oneshot::Receiver<Result<()>> {
6323 let id = self.id;
6324 self.send_job(None, move |repo, _cx| async move {
6325 match repo {
6326 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6327 backend
6328 .restore_archive_checkpoint(staged_sha, unstaged_sha)
6329 .await
6330 }
6331 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6332 client
6333 .request(proto::GitRestoreArchiveCheckpoint {
6334 project_id: project_id.0,
6335 repository_id: id.to_proto(),
6336 staged_commit_sha: staged_sha,
6337 unstaged_commit_sha: unstaged_sha,
6338 })
6339 .await?;
6340 Ok(())
6341 }
6342 }
6343 })
6344 }
6345
6346 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6347 let id = self.id;
6348 self.send_job(
6349 Some(format!("git worktree remove: {}", path.display()).into()),
6350 move |repo, _cx| async move {
6351 match repo {
6352 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6353 backend.remove_worktree(path, force).await
6354 }
6355 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6356 client
6357 .request(proto::GitRemoveWorktree {
6358 project_id: project_id.0,
6359 repository_id: id.to_proto(),
6360 path: path.to_string_lossy().to_string(),
6361 force,
6362 })
6363 .await?;
6364
6365 Ok(())
6366 }
6367 }
6368 },
6369 )
6370 }
6371
6372 pub fn rename_worktree(
6373 &mut self,
6374 old_path: PathBuf,
6375 new_path: PathBuf,
6376 ) -> oneshot::Receiver<Result<()>> {
6377 let id = self.id;
6378 self.send_job(
6379 Some(format!("git worktree move: {}", old_path.display()).into()),
6380 move |repo, _cx| async move {
6381 match repo {
6382 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6383 backend.rename_worktree(old_path, new_path).await
6384 }
6385 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6386 client
6387 .request(proto::GitRenameWorktree {
6388 project_id: project_id.0,
6389 repository_id: id.to_proto(),
6390 old_path: old_path.to_string_lossy().to_string(),
6391 new_path: new_path.to_string_lossy().to_string(),
6392 })
6393 .await?;
6394
6395 Ok(())
6396 }
6397 }
6398 },
6399 )
6400 }
6401
6402 pub fn default_branch(
6403 &mut self,
6404 include_remote_name: bool,
6405 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6406 let id = self.id;
6407 self.send_job(None, move |repo, _| async move {
6408 match repo {
6409 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6410 backend.default_branch(include_remote_name).await
6411 }
6412 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6413 let response = client
6414 .request(proto::GetDefaultBranch {
6415 project_id: project_id.0,
6416 repository_id: id.to_proto(),
6417 })
6418 .await?;
6419
6420 anyhow::Ok(response.branch.map(SharedString::from))
6421 }
6422 }
6423 })
6424 }
6425
6426 pub fn diff_tree(
6427 &mut self,
6428 diff_type: DiffTreeType,
6429 _cx: &App,
6430 ) -> oneshot::Receiver<Result<TreeDiff>> {
6431 let repository_id = self.snapshot.id;
6432 self.send_job(None, move |repo, _cx| async move {
6433 match repo {
6434 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6435 backend.diff_tree(diff_type).await
6436 }
6437 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6438 let response = client
6439 .request(proto::GetTreeDiff {
6440 project_id: project_id.0,
6441 repository_id: repository_id.0,
6442 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6443 base: diff_type.base().to_string(),
6444 head: diff_type.head().to_string(),
6445 })
6446 .await?;
6447
6448 let entries = response
6449 .entries
6450 .into_iter()
6451 .filter_map(|entry| {
6452 let status = match entry.status() {
6453 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6454 proto::tree_diff_status::Status::Modified => {
6455 TreeDiffStatus::Modified {
6456 old: git::Oid::from_str(
6457 &entry.oid.context("missing oid").log_err()?,
6458 )
6459 .log_err()?,
6460 }
6461 }
6462 proto::tree_diff_status::Status::Deleted => {
6463 TreeDiffStatus::Deleted {
6464 old: git::Oid::from_str(
6465 &entry.oid.context("missing oid").log_err()?,
6466 )
6467 .log_err()?,
6468 }
6469 }
6470 };
6471 Some((
6472 RepoPath::from_rel_path(
6473 &RelPath::from_proto(&entry.path).log_err()?,
6474 ),
6475 status,
6476 ))
6477 })
6478 .collect();
6479
6480 Ok(TreeDiff { entries })
6481 }
6482 }
6483 })
6484 }
6485
6486 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6487 let id = self.id;
6488 self.send_job(None, move |repo, _cx| async move {
6489 match repo {
6490 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6491 backend.diff(diff_type).await
6492 }
6493 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6494 let (proto_diff_type, merge_base_ref) = match &diff_type {
6495 DiffType::HeadToIndex => {
6496 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6497 }
6498 DiffType::HeadToWorktree => {
6499 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6500 }
6501 DiffType::MergeBase { base_ref } => (
6502 proto::git_diff::DiffType::MergeBase.into(),
6503 Some(base_ref.to_string()),
6504 ),
6505 };
6506 let response = client
6507 .request(proto::GitDiff {
6508 project_id: project_id.0,
6509 repository_id: id.to_proto(),
6510 diff_type: proto_diff_type,
6511 merge_base_ref,
6512 })
6513 .await?;
6514
6515 Ok(response.diff)
6516 }
6517 }
6518 })
6519 }
6520
6521 pub fn create_branch(
6522 &mut self,
6523 branch_name: String,
6524 base_branch: Option<String>,
6525 ) -> oneshot::Receiver<Result<()>> {
6526 let id = self.id;
6527 let status_msg = if let Some(ref base) = base_branch {
6528 format!("git switch -c {branch_name} {base}").into()
6529 } else {
6530 format!("git switch -c {branch_name}").into()
6531 };
6532 self.send_job(Some(status_msg), move |repo, _cx| async move {
6533 match repo {
6534 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6535 backend.create_branch(branch_name, base_branch).await
6536 }
6537 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6538 client
6539 .request(proto::GitCreateBranch {
6540 project_id: project_id.0,
6541 repository_id: id.to_proto(),
6542 branch_name,
6543 })
6544 .await?;
6545
6546 Ok(())
6547 }
6548 }
6549 })
6550 }
6551
6552 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6553 let id = self.id;
6554 self.send_job(
6555 Some(format!("git switch {branch_name}").into()),
6556 move |repo, _cx| async move {
6557 match repo {
6558 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6559 backend.change_branch(branch_name).await
6560 }
6561 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6562 client
6563 .request(proto::GitChangeBranch {
6564 project_id: project_id.0,
6565 repository_id: id.to_proto(),
6566 branch_name,
6567 })
6568 .await?;
6569
6570 Ok(())
6571 }
6572 }
6573 },
6574 )
6575 }
6576
6577 pub fn delete_branch(
6578 &mut self,
6579 is_remote: bool,
6580 branch_name: String,
6581 ) -> oneshot::Receiver<Result<()>> {
6582 let id = self.id;
6583 self.send_job(
6584 Some(
6585 format!(
6586 "git branch {} {}",
6587 if is_remote { "-dr" } else { "-d" },
6588 branch_name
6589 )
6590 .into(),
6591 ),
6592 move |repo, _cx| async move {
6593 match repo {
6594 RepositoryState::Local(state) => {
6595 state.backend.delete_branch(is_remote, branch_name).await
6596 }
6597 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6598 client
6599 .request(proto::GitDeleteBranch {
6600 project_id: project_id.0,
6601 repository_id: id.to_proto(),
6602 is_remote,
6603 branch_name,
6604 })
6605 .await?;
6606
6607 Ok(())
6608 }
6609 }
6610 },
6611 )
6612 }
6613
6614 pub fn rename_branch(
6615 &mut self,
6616 branch: String,
6617 new_name: String,
6618 ) -> oneshot::Receiver<Result<()>> {
6619 let id = self.id;
6620 self.send_job(
6621 Some(format!("git branch -m {branch} {new_name}").into()),
6622 move |repo, _cx| async move {
6623 match repo {
6624 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6625 backend.rename_branch(branch, new_name).await
6626 }
6627 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6628 client
6629 .request(proto::GitRenameBranch {
6630 project_id: project_id.0,
6631 repository_id: id.to_proto(),
6632 branch,
6633 new_name,
6634 })
6635 .await?;
6636
6637 Ok(())
6638 }
6639 }
6640 },
6641 )
6642 }
6643
6644 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6645 let id = self.id;
6646 self.send_job(None, move |repo, _cx| async move {
6647 match repo {
6648 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6649 backend.check_for_pushed_commit().await
6650 }
6651 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6652 let response = client
6653 .request(proto::CheckForPushedCommits {
6654 project_id: project_id.0,
6655 repository_id: id.to_proto(),
6656 })
6657 .await?;
6658
6659 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6660
6661 Ok(branches)
6662 }
6663 }
6664 })
6665 }
6666
6667 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6668 let id = self.id;
6669 self.send_job(None, move |repo, _cx| async move {
6670 match repo {
6671 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6672 backend.checkpoint().await
6673 }
6674 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6675 let response = client
6676 .request(proto::GitCreateCheckpoint {
6677 project_id: project_id.0,
6678 repository_id: id.to_proto(),
6679 })
6680 .await?;
6681
6682 Ok(GitRepositoryCheckpoint {
6683 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6684 })
6685 }
6686 }
6687 })
6688 }
6689
6690 pub fn restore_checkpoint(
6691 &mut self,
6692 checkpoint: GitRepositoryCheckpoint,
6693 ) -> oneshot::Receiver<Result<()>> {
6694 let id = self.id;
6695 self.send_job(None, move |repo, _cx| async move {
6696 match repo {
6697 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6698 backend.restore_checkpoint(checkpoint).await
6699 }
6700 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6701 client
6702 .request(proto::GitRestoreCheckpoint {
6703 project_id: project_id.0,
6704 repository_id: id.to_proto(),
6705 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6706 })
6707 .await?;
6708 Ok(())
6709 }
6710 }
6711 })
6712 }
6713
6714 pub(crate) fn apply_remote_update(
6715 &mut self,
6716 update: proto::UpdateRepository,
6717 cx: &mut Context<Self>,
6718 ) -> Result<()> {
6719 if let Some(main_path) = &update.original_repo_abs_path {
6720 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6721 }
6722
6723 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6724 let new_head_commit = update
6725 .head_commit_details
6726 .as_ref()
6727 .map(proto_to_commit_details);
6728 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6729 cx.emit(RepositoryEvent::HeadChanged)
6730 }
6731 self.snapshot.branch = new_branch;
6732 self.snapshot.head_commit = new_head_commit;
6733
6734 // We don't store any merge head state for downstream projects; the upstream
6735 // will track it and we will just get the updated conflicts
6736 let new_merge_heads = TreeMap::from_ordered_entries(
6737 update
6738 .current_merge_conflicts
6739 .into_iter()
6740 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6741 );
6742 let conflicts_changed =
6743 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6744 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6745 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6746 let new_stash_entries = GitStash {
6747 entries: update
6748 .stash_entries
6749 .iter()
6750 .filter_map(|entry| proto_to_stash(entry).ok())
6751 .collect(),
6752 };
6753 if self.snapshot.stash_entries != new_stash_entries {
6754 cx.emit(RepositoryEvent::StashEntriesChanged)
6755 }
6756 self.snapshot.stash_entries = new_stash_entries;
6757 let new_linked_worktrees: Arc<[GitWorktree]> = update
6758 .linked_worktrees
6759 .iter()
6760 .map(proto_to_worktree)
6761 .collect();
6762 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6763 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6764 }
6765 self.snapshot.linked_worktrees = new_linked_worktrees;
6766 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6767 self.snapshot.remote_origin_url = update.remote_origin_url;
6768
6769 let edits = update
6770 .removed_statuses
6771 .into_iter()
6772 .filter_map(|path| {
6773 Some(sum_tree::Edit::Remove(PathKey(
6774 RelPath::from_proto(&path).log_err()?,
6775 )))
6776 })
6777 .chain(
6778 update
6779 .updated_statuses
6780 .into_iter()
6781 .filter_map(|updated_status| {
6782 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6783 }),
6784 )
6785 .collect::<Vec<_>>();
6786 if conflicts_changed || !edits.is_empty() {
6787 cx.emit(RepositoryEvent::StatusesChanged);
6788 }
6789 self.snapshot.statuses_by_path.edit(edits, ());
6790
6791 if update.is_last_update {
6792 self.snapshot.scan_id = update.scan_id;
6793 }
6794 self.clear_pending_ops(cx);
6795 Ok(())
6796 }
6797
6798 pub fn compare_checkpoints(
6799 &mut self,
6800 left: GitRepositoryCheckpoint,
6801 right: GitRepositoryCheckpoint,
6802 ) -> oneshot::Receiver<Result<bool>> {
6803 let id = self.id;
6804 self.send_job(None, move |repo, _cx| async move {
6805 match repo {
6806 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6807 backend.compare_checkpoints(left, right).await
6808 }
6809 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6810 let response = client
6811 .request(proto::GitCompareCheckpoints {
6812 project_id: project_id.0,
6813 repository_id: id.to_proto(),
6814 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6815 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6816 })
6817 .await?;
6818 Ok(response.equal)
6819 }
6820 }
6821 })
6822 }
6823
6824 pub fn diff_checkpoints(
6825 &mut self,
6826 base_checkpoint: GitRepositoryCheckpoint,
6827 target_checkpoint: GitRepositoryCheckpoint,
6828 ) -> oneshot::Receiver<Result<String>> {
6829 let id = self.id;
6830 self.send_job(None, move |repo, _cx| async move {
6831 match repo {
6832 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6833 backend
6834 .diff_checkpoints(base_checkpoint, target_checkpoint)
6835 .await
6836 }
6837 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6838 let response = client
6839 .request(proto::GitDiffCheckpoints {
6840 project_id: project_id.0,
6841 repository_id: id.to_proto(),
6842 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6843 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6844 })
6845 .await?;
6846 Ok(response.diff)
6847 }
6848 }
6849 })
6850 }
6851
6852 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6853 let updated = SumTree::from_iter(
6854 self.pending_ops.iter().filter_map(|ops| {
6855 let inner_ops: Vec<PendingOp> =
6856 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6857 if inner_ops.is_empty() {
6858 None
6859 } else {
6860 Some(PendingOps {
6861 repo_path: ops.repo_path.clone(),
6862 ops: inner_ops,
6863 })
6864 }
6865 }),
6866 (),
6867 );
6868
6869 if updated != self.pending_ops {
6870 cx.emit(RepositoryEvent::PendingOpsChanged {
6871 pending_ops: self.pending_ops.clone(),
6872 })
6873 }
6874
6875 self.pending_ops = updated;
6876 }
6877
6878 fn schedule_scan(
6879 &mut self,
6880 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6881 cx: &mut Context<Self>,
6882 ) {
6883 let this = cx.weak_entity();
6884 let _ = self.send_keyed_job(
6885 Some(GitJobKey::ReloadGitState),
6886 None,
6887 |state, mut cx| async move {
6888 log::debug!("run scheduled git status scan");
6889
6890 let Some(this) = this.upgrade() else {
6891 return Ok(());
6892 };
6893 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6894 bail!("not a local repository")
6895 };
6896 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6897 this.update(&mut cx, |this, cx| {
6898 this.clear_pending_ops(cx);
6899 });
6900 if let Some(updates_tx) = updates_tx {
6901 updates_tx
6902 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6903 .ok();
6904 }
6905 Ok(())
6906 },
6907 );
6908 }
6909
6910 fn spawn_local_git_worker(
6911 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6912 cx: &mut Context<Self>,
6913 ) -> mpsc::UnboundedSender<GitJob> {
6914 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6915
6916 cx.spawn(async move |_, cx| {
6917 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6918 if let Some(git_hosting_provider_registry) =
6919 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6920 {
6921 git_hosting_providers::register_additional_providers(
6922 git_hosting_provider_registry,
6923 state.backend.clone(),
6924 )
6925 .await;
6926 }
6927 let state = RepositoryState::Local(state);
6928 let mut jobs = VecDeque::new();
6929 loop {
6930 while let Ok(next_job) = job_rx.try_recv() {
6931 jobs.push_back(next_job);
6932 }
6933
6934 if let Some(job) = jobs.pop_front() {
6935 if let Some(current_key) = &job.key
6936 && jobs
6937 .iter()
6938 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6939 {
6940 continue;
6941 }
6942 (job.job)(state.clone(), cx).await;
6943 } else if let Some(job) = job_rx.next().await {
6944 jobs.push_back(job);
6945 } else {
6946 break;
6947 }
6948 }
6949 anyhow::Ok(())
6950 })
6951 .detach_and_log_err(cx);
6952
6953 job_tx
6954 }
6955
6956 fn spawn_remote_git_worker(
6957 state: RemoteRepositoryState,
6958 cx: &mut Context<Self>,
6959 ) -> mpsc::UnboundedSender<GitJob> {
6960 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6961
6962 cx.spawn(async move |_, cx| {
6963 let state = RepositoryState::Remote(state);
6964 let mut jobs = VecDeque::new();
6965 loop {
6966 while let Ok(next_job) = job_rx.try_recv() {
6967 jobs.push_back(next_job);
6968 }
6969
6970 if let Some(job) = jobs.pop_front() {
6971 if let Some(current_key) = &job.key
6972 && jobs
6973 .iter()
6974 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6975 {
6976 continue;
6977 }
6978 (job.job)(state.clone(), cx).await;
6979 } else if let Some(job) = job_rx.next().await {
6980 jobs.push_back(job);
6981 } else {
6982 break;
6983 }
6984 }
6985 anyhow::Ok(())
6986 })
6987 .detach_and_log_err(cx);
6988
6989 job_tx
6990 }
6991
6992 fn load_staged_text(
6993 &mut self,
6994 buffer_id: BufferId,
6995 repo_path: RepoPath,
6996 cx: &App,
6997 ) -> Task<Result<Option<String>>> {
6998 let rx = self.send_job(None, move |state, _| async move {
6999 match state {
7000 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7001 anyhow::Ok(backend.load_index_text(repo_path).await)
7002 }
7003 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7004 let response = client
7005 .request(proto::OpenUnstagedDiff {
7006 project_id: project_id.to_proto(),
7007 buffer_id: buffer_id.to_proto(),
7008 })
7009 .await?;
7010 Ok(response.staged_text)
7011 }
7012 }
7013 });
7014 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7015 }
7016
7017 fn load_committed_text(
7018 &mut self,
7019 buffer_id: BufferId,
7020 repo_path: RepoPath,
7021 cx: &App,
7022 ) -> Task<Result<DiffBasesChange>> {
7023 let rx = self.send_job(None, move |state, _| async move {
7024 match state {
7025 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7026 let committed_text = backend.load_committed_text(repo_path.clone()).await;
7027 let staged_text = backend.load_index_text(repo_path).await;
7028 let diff_bases_change = if committed_text == staged_text {
7029 DiffBasesChange::SetBoth(committed_text)
7030 } else {
7031 DiffBasesChange::SetEach {
7032 index: staged_text,
7033 head: committed_text,
7034 }
7035 };
7036 anyhow::Ok(diff_bases_change)
7037 }
7038 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7039 use proto::open_uncommitted_diff_response::Mode;
7040
7041 let response = client
7042 .request(proto::OpenUncommittedDiff {
7043 project_id: project_id.to_proto(),
7044 buffer_id: buffer_id.to_proto(),
7045 })
7046 .await?;
7047 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
7048 let bases = match mode {
7049 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
7050 Mode::IndexAndHead => DiffBasesChange::SetEach {
7051 head: response.committed_text,
7052 index: response.staged_text,
7053 },
7054 };
7055 Ok(bases)
7056 }
7057 }
7058 });
7059
7060 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7061 }
7062
7063 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
7064 let repository_id = self.snapshot.id;
7065 let rx = self.send_job(None, move |state, _| async move {
7066 match state {
7067 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7068 backend.load_blob_content(oid).await
7069 }
7070 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
7071 let response = client
7072 .request(proto::GetBlobContent {
7073 project_id: project_id.to_proto(),
7074 repository_id: repository_id.0,
7075 oid: oid.to_string(),
7076 })
7077 .await?;
7078 Ok(response.content)
7079 }
7080 }
7081 });
7082 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7083 }
7084
7085 fn paths_changed(
7086 &mut self,
7087 paths: Vec<RepoPath>,
7088 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
7089 cx: &mut Context<Self>,
7090 ) {
7091 if !paths.is_empty() {
7092 self.paths_needing_status_update.push(paths);
7093 }
7094
7095 let this = cx.weak_entity();
7096 let _ = self.send_keyed_job(
7097 Some(GitJobKey::RefreshStatuses),
7098 None,
7099 |state, mut cx| async move {
7100 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
7101 (
7102 this.snapshot.clone(),
7103 mem::take(&mut this.paths_needing_status_update),
7104 )
7105 })?;
7106 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
7107 bail!("not a local repository")
7108 };
7109
7110 if changed_paths.is_empty() {
7111 return Ok(());
7112 }
7113
7114 let has_head = prev_snapshot.head_commit.is_some();
7115
7116 let stash_entries = backend.stash_entries().await?;
7117 let changed_path_statuses = cx
7118 .background_spawn(async move {
7119 let mut changed_paths =
7120 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
7121 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
7122
7123 let status_task = backend.status(&changed_paths_vec);
7124 let diff_stat_future = if has_head {
7125 backend.diff_stat(&changed_paths_vec)
7126 } else {
7127 future::ready(Ok(status::GitDiffStat {
7128 entries: Arc::default(),
7129 }))
7130 .boxed()
7131 };
7132
7133 let (statuses, diff_stats) =
7134 futures::future::try_join(status_task, diff_stat_future).await?;
7135
7136 let diff_stats: HashMap<RepoPath, DiffStat> =
7137 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
7138
7139 let mut changed_path_statuses = Vec::new();
7140 let prev_statuses = prev_snapshot.statuses_by_path.clone();
7141 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7142
7143 for (repo_path, status) in &*statuses.entries {
7144 let current_diff_stat = diff_stats.get(repo_path).copied();
7145
7146 changed_paths.remove(repo_path);
7147 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
7148 && cursor.item().is_some_and(|entry| {
7149 entry.status == *status && entry.diff_stat == current_diff_stat
7150 })
7151 {
7152 continue;
7153 }
7154
7155 changed_path_statuses.push(Edit::Insert(StatusEntry {
7156 repo_path: repo_path.clone(),
7157 status: *status,
7158 diff_stat: current_diff_stat,
7159 }));
7160 }
7161 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7162 for path in changed_paths.into_iter() {
7163 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
7164 changed_path_statuses
7165 .push(Edit::Remove(PathKey(path.as_ref().clone())));
7166 }
7167 }
7168 anyhow::Ok(changed_path_statuses)
7169 })
7170 .await?;
7171
7172 this.update(&mut cx, |this, cx| {
7173 if this.snapshot.stash_entries != stash_entries {
7174 cx.emit(RepositoryEvent::StashEntriesChanged);
7175 this.snapshot.stash_entries = stash_entries;
7176 }
7177
7178 if !changed_path_statuses.is_empty() {
7179 cx.emit(RepositoryEvent::StatusesChanged);
7180 this.snapshot
7181 .statuses_by_path
7182 .edit(changed_path_statuses, ());
7183 this.snapshot.scan_id += 1;
7184 }
7185
7186 if let Some(updates_tx) = updates_tx {
7187 updates_tx
7188 .unbounded_send(DownstreamUpdate::UpdateRepository(
7189 this.snapshot.clone(),
7190 ))
7191 .ok();
7192 }
7193 })
7194 },
7195 );
7196 }
7197
7198 /// currently running git command and when it started
7199 pub fn current_job(&self) -> Option<JobInfo> {
7200 self.active_jobs.values().next().cloned()
7201 }
7202
7203 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7204 self.send_job(None, |_, _| async {})
7205 }
7206
7207 fn spawn_job_with_tracking<AsyncFn>(
7208 &mut self,
7209 paths: Vec<RepoPath>,
7210 git_status: pending_op::GitStatus,
7211 cx: &mut Context<Self>,
7212 f: AsyncFn,
7213 ) -> Task<Result<()>>
7214 where
7215 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7216 {
7217 let ids = self.new_pending_ops_for_paths(paths, git_status);
7218
7219 cx.spawn(async move |this, cx| {
7220 let (job_status, result) = match f(this.clone(), cx).await {
7221 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7222 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7223 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7224 };
7225
7226 this.update(cx, |this, _| {
7227 let mut edits = Vec::with_capacity(ids.len());
7228 for (id, entry) in ids {
7229 if let Some(mut ops) = this
7230 .pending_ops
7231 .get(&PathKey(entry.as_ref().clone()), ())
7232 .cloned()
7233 {
7234 if let Some(op) = ops.op_by_id_mut(id) {
7235 op.job_status = job_status;
7236 }
7237 edits.push(sum_tree::Edit::Insert(ops));
7238 }
7239 }
7240 this.pending_ops.edit(edits, ());
7241 })?;
7242
7243 result
7244 })
7245 }
7246
7247 fn new_pending_ops_for_paths(
7248 &mut self,
7249 paths: Vec<RepoPath>,
7250 git_status: pending_op::GitStatus,
7251 ) -> Vec<(PendingOpId, RepoPath)> {
7252 let mut edits = Vec::with_capacity(paths.len());
7253 let mut ids = Vec::with_capacity(paths.len());
7254 for path in paths {
7255 let mut ops = self
7256 .pending_ops
7257 .get(&PathKey(path.as_ref().clone()), ())
7258 .cloned()
7259 .unwrap_or_else(|| PendingOps::new(&path));
7260 let id = ops.max_id() + 1;
7261 ops.ops.push(PendingOp {
7262 id,
7263 git_status,
7264 job_status: pending_op::JobStatus::Running,
7265 });
7266 edits.push(sum_tree::Edit::Insert(ops));
7267 ids.push((id, path));
7268 }
7269 self.pending_ops.edit(edits, ());
7270 ids
7271 }
7272 pub fn default_remote_url(&self) -> Option<String> {
7273 self.remote_upstream_url
7274 .clone()
7275 .or(self.remote_origin_url.clone())
7276 }
7277}
7278
7279/// If `path` is a git linked worktree checkout, resolves it to the main
7280/// repository's working directory path. Returns `None` if `path` is a normal
7281/// repository, not a git repo, or if resolution fails.
7282///
7283/// Resolution works by:
7284/// 1. Reading the `.git` file to get the `gitdir:` pointer
7285/// 2. Following that to the worktree-specific git directory
7286/// 3. Reading the `commondir` file to find the shared `.git` directory
7287/// 4. Deriving the main repo's working directory from the common dir
7288pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7289 let dot_git = path.join(".git");
7290 let metadata = fs.metadata(&dot_git).await.ok()??;
7291 if metadata.is_dir {
7292 return None; // Normal repo, not a linked worktree
7293 }
7294 // It's a .git file — parse the gitdir: pointer
7295 let content = fs.load(&dot_git).await.ok()?;
7296 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7297 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7298 // Read commondir to find the main .git directory
7299 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7300 let common_dir = fs
7301 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7302 .await
7303 .ok()?;
7304 git::repository::original_repo_path_from_common_dir(&common_dir)
7305}
7306
7307/// Validates that the resolved worktree directory is acceptable:
7308/// - The setting must not be an absolute path.
7309/// - The resolved path must be either a subdirectory of the working
7310/// directory or a subdirectory of its parent (i.e., a sibling).
7311///
7312/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7313pub fn worktrees_directory_for_repo(
7314 original_repo_abs_path: &Path,
7315 worktree_directory_setting: &str,
7316) -> Result<PathBuf> {
7317 // Check the original setting before trimming, since a path like "///"
7318 // is absolute but becomes "" after stripping trailing separators.
7319 // Also check for leading `/` or `\` explicitly, because on Windows
7320 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7321 // would slip through even though it's clearly not a relative path.
7322 if Path::new(worktree_directory_setting).is_absolute()
7323 || worktree_directory_setting.starts_with('/')
7324 || worktree_directory_setting.starts_with('\\')
7325 {
7326 anyhow::bail!(
7327 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7328 );
7329 }
7330
7331 if worktree_directory_setting.is_empty() {
7332 anyhow::bail!("git.worktree_directory must not be empty");
7333 }
7334
7335 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7336 if trimmed == ".." {
7337 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7338 }
7339
7340 let joined = original_repo_abs_path.join(trimmed);
7341 let resolved = util::normalize_path(&joined);
7342 let resolved = if resolved.starts_with(original_repo_abs_path) {
7343 resolved
7344 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7345 resolved.join(repo_dir_name)
7346 } else {
7347 resolved
7348 };
7349
7350 let parent = original_repo_abs_path
7351 .parent()
7352 .unwrap_or(original_repo_abs_path);
7353
7354 if !resolved.starts_with(parent) {
7355 anyhow::bail!(
7356 "git.worktree_directory resolved to {resolved:?}, which is outside \
7357 the project root and its parent directory. It must resolve to a \
7358 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7359 );
7360 }
7361
7362 Ok(resolved)
7363}
7364
7365/// Returns a short name for a linked worktree suitable for UI display
7366///
7367/// Uses the main worktree path to come up with a short name that disambiguates
7368/// the linked worktree from the main worktree.
7369pub fn linked_worktree_short_name(
7370 main_worktree_path: &Path,
7371 linked_worktree_path: &Path,
7372) -> Option<SharedString> {
7373 if main_worktree_path == linked_worktree_path {
7374 return None;
7375 }
7376
7377 let project_name = main_worktree_path.file_name()?.to_str()?;
7378 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7379 let name = if directory_name != project_name {
7380 directory_name.to_string()
7381 } else {
7382 linked_worktree_path
7383 .parent()?
7384 .file_name()?
7385 .to_str()?
7386 .to_string()
7387 };
7388 Some(name.into())
7389}
7390
7391fn get_permalink_in_rust_registry_src(
7392 provider_registry: Arc<GitHostingProviderRegistry>,
7393 path: PathBuf,
7394 selection: Range<u32>,
7395) -> Result<url::Url> {
7396 #[derive(Deserialize)]
7397 struct CargoVcsGit {
7398 sha1: String,
7399 }
7400
7401 #[derive(Deserialize)]
7402 struct CargoVcsInfo {
7403 git: CargoVcsGit,
7404 path_in_vcs: String,
7405 }
7406
7407 #[derive(Deserialize)]
7408 struct CargoPackage {
7409 repository: String,
7410 }
7411
7412 #[derive(Deserialize)]
7413 struct CargoToml {
7414 package: CargoPackage,
7415 }
7416
7417 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7418 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7419 Some((dir, json))
7420 }) else {
7421 bail!("No .cargo_vcs_info.json found in parent directories")
7422 };
7423 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7424 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7425 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7426 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7427 .context("parsing package.repository field of manifest")?;
7428 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7429 let permalink = provider.build_permalink(
7430 remote,
7431 BuildPermalinkParams::new(
7432 &cargo_vcs_info.git.sha1,
7433 &RepoPath::from_rel_path(
7434 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7435 ),
7436 Some(selection),
7437 ),
7438 );
7439 Ok(permalink)
7440}
7441
7442fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7443 let Some(blame) = blame else {
7444 return proto::BlameBufferResponse {
7445 blame_response: None,
7446 };
7447 };
7448
7449 let entries = blame
7450 .entries
7451 .into_iter()
7452 .map(|entry| proto::BlameEntry {
7453 sha: entry.sha.as_bytes().into(),
7454 start_line: entry.range.start,
7455 end_line: entry.range.end,
7456 original_line_number: entry.original_line_number,
7457 author: entry.author,
7458 author_mail: entry.author_mail,
7459 author_time: entry.author_time,
7460 author_tz: entry.author_tz,
7461 committer: entry.committer_name,
7462 committer_mail: entry.committer_email,
7463 committer_time: entry.committer_time,
7464 committer_tz: entry.committer_tz,
7465 summary: entry.summary,
7466 previous: entry.previous,
7467 filename: entry.filename,
7468 })
7469 .collect::<Vec<_>>();
7470
7471 let messages = blame
7472 .messages
7473 .into_iter()
7474 .map(|(oid, message)| proto::CommitMessage {
7475 oid: oid.as_bytes().into(),
7476 message,
7477 })
7478 .collect::<Vec<_>>();
7479
7480 proto::BlameBufferResponse {
7481 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7482 }
7483}
7484
7485fn deserialize_blame_buffer_response(
7486 response: proto::BlameBufferResponse,
7487) -> Option<git::blame::Blame> {
7488 let response = response.blame_response?;
7489 let entries = response
7490 .entries
7491 .into_iter()
7492 .filter_map(|entry| {
7493 Some(git::blame::BlameEntry {
7494 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7495 range: entry.start_line..entry.end_line,
7496 original_line_number: entry.original_line_number,
7497 committer_name: entry.committer,
7498 committer_time: entry.committer_time,
7499 committer_tz: entry.committer_tz,
7500 committer_email: entry.committer_mail,
7501 author: entry.author,
7502 author_mail: entry.author_mail,
7503 author_time: entry.author_time,
7504 author_tz: entry.author_tz,
7505 summary: entry.summary,
7506 previous: entry.previous,
7507 filename: entry.filename,
7508 })
7509 })
7510 .collect::<Vec<_>>();
7511
7512 let messages = response
7513 .messages
7514 .into_iter()
7515 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7516 .collect::<HashMap<_, _>>();
7517
7518 Some(Blame { entries, messages })
7519}
7520
7521fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7522 proto::Branch {
7523 is_head: branch.is_head,
7524 ref_name: branch.ref_name.to_string(),
7525 unix_timestamp: branch
7526 .most_recent_commit
7527 .as_ref()
7528 .map(|commit| commit.commit_timestamp as u64),
7529 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7530 ref_name: upstream.ref_name.to_string(),
7531 tracking: upstream
7532 .tracking
7533 .status()
7534 .map(|upstream| proto::UpstreamTracking {
7535 ahead: upstream.ahead as u64,
7536 behind: upstream.behind as u64,
7537 }),
7538 }),
7539 most_recent_commit: branch
7540 .most_recent_commit
7541 .as_ref()
7542 .map(|commit| proto::CommitSummary {
7543 sha: commit.sha.to_string(),
7544 subject: commit.subject.to_string(),
7545 commit_timestamp: commit.commit_timestamp,
7546 author_name: commit.author_name.to_string(),
7547 }),
7548 }
7549}
7550
7551fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7552 proto::Worktree {
7553 path: worktree.path.to_string_lossy().to_string(),
7554 ref_name: worktree
7555 .ref_name
7556 .as_ref()
7557 .map(|s| s.to_string())
7558 .unwrap_or_default(),
7559 sha: worktree.sha.to_string(),
7560 is_main: worktree.is_main,
7561 is_bare: worktree.is_bare,
7562 }
7563}
7564
7565fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7566 git::repository::Worktree {
7567 path: PathBuf::from(proto.path.clone()),
7568 ref_name: if proto.ref_name.is_empty() {
7569 None
7570 } else {
7571 Some(SharedString::from(&proto.ref_name))
7572 },
7573 sha: proto.sha.clone().into(),
7574 is_main: proto.is_main,
7575 is_bare: proto.is_bare,
7576 }
7577}
7578
7579fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7580 git::repository::Branch {
7581 is_head: proto.is_head,
7582 ref_name: proto.ref_name.clone().into(),
7583 upstream: proto
7584 .upstream
7585 .as_ref()
7586 .map(|upstream| git::repository::Upstream {
7587 ref_name: upstream.ref_name.to_string().into(),
7588 tracking: upstream
7589 .tracking
7590 .as_ref()
7591 .map(|tracking| {
7592 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7593 ahead: tracking.ahead as u32,
7594 behind: tracking.behind as u32,
7595 })
7596 })
7597 .unwrap_or(git::repository::UpstreamTracking::Gone),
7598 }),
7599 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7600 git::repository::CommitSummary {
7601 sha: commit.sha.to_string().into(),
7602 subject: commit.subject.to_string().into(),
7603 commit_timestamp: commit.commit_timestamp,
7604 author_name: commit.author_name.to_string().into(),
7605 has_parent: true,
7606 }
7607 }),
7608 }
7609}
7610
7611fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7612 proto::GitCommitDetails {
7613 sha: commit.sha.to_string(),
7614 message: commit.message.to_string(),
7615 commit_timestamp: commit.commit_timestamp,
7616 author_email: commit.author_email.to_string(),
7617 author_name: commit.author_name.to_string(),
7618 }
7619}
7620
7621fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7622 CommitDetails {
7623 sha: proto.sha.clone().into(),
7624 message: proto.message.clone().into(),
7625 commit_timestamp: proto.commit_timestamp,
7626 author_email: proto.author_email.clone().into(),
7627 author_name: proto.author_name.clone().into(),
7628 }
7629}
7630
7631/// This snapshot computes the repository state on the foreground thread while
7632/// running the git commands on the background thread. We update branch, head,
7633/// remotes, and worktrees first so the UI can react sooner, then compute file
7634/// state and emit those events immediately after.
7635async fn compute_snapshot(
7636 this: Entity<Repository>,
7637 backend: Arc<dyn GitRepository>,
7638 cx: &mut AsyncApp,
7639) -> Result<RepositorySnapshot> {
7640 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7641 this.paths_needing_status_update.clear();
7642 (
7643 this.id,
7644 this.work_directory_abs_path.clone(),
7645 this.snapshot.clone(),
7646 )
7647 });
7648
7649 let head_commit_future = {
7650 let backend = backend.clone();
7651 async move {
7652 Ok(match backend.head_sha().await {
7653 Some(head_sha) => backend.show(head_sha).await.log_err(),
7654 None => None,
7655 })
7656 }
7657 };
7658 let (branches, head_commit, all_worktrees) = cx
7659 .background_spawn({
7660 let backend = backend.clone();
7661 async move {
7662 futures::future::try_join3(
7663 backend.branches(),
7664 head_commit_future,
7665 backend.worktrees(),
7666 )
7667 .await
7668 }
7669 })
7670 .await?;
7671 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7672 let branch_list: Arc<[Branch]> = branches.into();
7673
7674 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7675 .into_iter()
7676 .filter(|wt| wt.path != *work_directory_abs_path)
7677 .collect();
7678
7679 let (remote_origin_url, remote_upstream_url) = cx
7680 .background_spawn({
7681 let backend = backend.clone();
7682 async move {
7683 Ok::<_, anyhow::Error>(
7684 futures::future::join(
7685 backend.remote_url("origin"),
7686 backend.remote_url("upstream"),
7687 )
7688 .await,
7689 )
7690 }
7691 })
7692 .await?;
7693
7694 let snapshot = this.update(cx, |this, cx| {
7695 let head_changed =
7696 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7697 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7698 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7699
7700 this.snapshot = RepositorySnapshot {
7701 id,
7702 work_directory_abs_path,
7703 branch,
7704 branch_list: branch_list.clone(),
7705 head_commit,
7706 remote_origin_url,
7707 remote_upstream_url,
7708 linked_worktrees,
7709 scan_id: prev_snapshot.scan_id + 1,
7710 ..prev_snapshot
7711 };
7712
7713 if head_changed {
7714 cx.emit(RepositoryEvent::HeadChanged);
7715 }
7716
7717 if branch_list_changed {
7718 cx.emit(RepositoryEvent::BranchListChanged);
7719 }
7720
7721 if worktrees_changed {
7722 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7723 }
7724
7725 this.snapshot.clone()
7726 });
7727
7728 let (statuses, diff_stats, stash_entries) = cx
7729 .background_spawn({
7730 let backend = backend.clone();
7731 let snapshot = snapshot.clone();
7732 async move {
7733 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7734 if snapshot.head_commit.is_some() {
7735 backend.diff_stat(&[])
7736 } else {
7737 future::ready(Ok(status::GitDiffStat {
7738 entries: Arc::default(),
7739 }))
7740 .boxed()
7741 };
7742 futures::future::try_join3(
7743 backend.status(&[RepoPath::from_rel_path(
7744 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7745 )]),
7746 diff_stat_future,
7747 backend.stash_entries(),
7748 )
7749 .await
7750 }
7751 })
7752 .await?;
7753
7754 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7755 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7756 let mut conflicted_paths = Vec::new();
7757 let statuses_by_path = SumTree::from_iter(
7758 statuses.entries.iter().map(|(repo_path, status)| {
7759 if status.is_conflicted() {
7760 conflicted_paths.push(repo_path.clone());
7761 }
7762 StatusEntry {
7763 repo_path: repo_path.clone(),
7764 status: *status,
7765 diff_stat: diff_stat_map.get(repo_path).copied(),
7766 }
7767 }),
7768 (),
7769 );
7770
7771 let merge_details = cx
7772 .background_spawn({
7773 let backend = backend.clone();
7774 let mut merge_details = snapshot.merge.clone();
7775 async move {
7776 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7777 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7778 }
7779 })
7780 .await?;
7781 let (merge_details, conflicts_changed) = merge_details;
7782 log::debug!("new merge details: {merge_details:?}");
7783
7784 Ok(this.update(cx, |this, cx| {
7785 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7786 cx.emit(RepositoryEvent::StatusesChanged);
7787 }
7788 if stash_entries != this.snapshot.stash_entries {
7789 cx.emit(RepositoryEvent::StashEntriesChanged);
7790 }
7791
7792 this.snapshot.scan_id += 1;
7793 this.snapshot.merge = merge_details;
7794 this.snapshot.statuses_by_path = statuses_by_path;
7795 this.snapshot.stash_entries = stash_entries;
7796
7797 this.snapshot.clone()
7798 }))
7799}
7800
7801fn status_from_proto(
7802 simple_status: i32,
7803 status: Option<proto::GitFileStatus>,
7804) -> anyhow::Result<FileStatus> {
7805 use proto::git_file_status::Variant;
7806
7807 let Some(variant) = status.and_then(|status| status.variant) else {
7808 let code = proto::GitStatus::from_i32(simple_status)
7809 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7810 let result = match code {
7811 proto::GitStatus::Added => TrackedStatus {
7812 worktree_status: StatusCode::Added,
7813 index_status: StatusCode::Unmodified,
7814 }
7815 .into(),
7816 proto::GitStatus::Modified => TrackedStatus {
7817 worktree_status: StatusCode::Modified,
7818 index_status: StatusCode::Unmodified,
7819 }
7820 .into(),
7821 proto::GitStatus::Conflict => UnmergedStatus {
7822 first_head: UnmergedStatusCode::Updated,
7823 second_head: UnmergedStatusCode::Updated,
7824 }
7825 .into(),
7826 proto::GitStatus::Deleted => TrackedStatus {
7827 worktree_status: StatusCode::Deleted,
7828 index_status: StatusCode::Unmodified,
7829 }
7830 .into(),
7831 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7832 };
7833 return Ok(result);
7834 };
7835
7836 let result = match variant {
7837 Variant::Untracked(_) => FileStatus::Untracked,
7838 Variant::Ignored(_) => FileStatus::Ignored,
7839 Variant::Unmerged(unmerged) => {
7840 let [first_head, second_head] =
7841 [unmerged.first_head, unmerged.second_head].map(|head| {
7842 let code = proto::GitStatus::from_i32(head)
7843 .with_context(|| format!("Invalid git status code: {head}"))?;
7844 let result = match code {
7845 proto::GitStatus::Added => UnmergedStatusCode::Added,
7846 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7847 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7848 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7849 };
7850 Ok(result)
7851 });
7852 let [first_head, second_head] = [first_head?, second_head?];
7853 UnmergedStatus {
7854 first_head,
7855 second_head,
7856 }
7857 .into()
7858 }
7859 Variant::Tracked(tracked) => {
7860 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7861 .map(|status| {
7862 let code = proto::GitStatus::from_i32(status)
7863 .with_context(|| format!("Invalid git status code: {status}"))?;
7864 let result = match code {
7865 proto::GitStatus::Modified => StatusCode::Modified,
7866 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7867 proto::GitStatus::Added => StatusCode::Added,
7868 proto::GitStatus::Deleted => StatusCode::Deleted,
7869 proto::GitStatus::Renamed => StatusCode::Renamed,
7870 proto::GitStatus::Copied => StatusCode::Copied,
7871 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7872 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7873 };
7874 Ok(result)
7875 });
7876 let [index_status, worktree_status] = [index_status?, worktree_status?];
7877 TrackedStatus {
7878 index_status,
7879 worktree_status,
7880 }
7881 .into()
7882 }
7883 };
7884 Ok(result)
7885}
7886
7887fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7888 use proto::git_file_status::{Tracked, Unmerged, Variant};
7889
7890 let variant = match status {
7891 FileStatus::Untracked => Variant::Untracked(Default::default()),
7892 FileStatus::Ignored => Variant::Ignored(Default::default()),
7893 FileStatus::Unmerged(UnmergedStatus {
7894 first_head,
7895 second_head,
7896 }) => Variant::Unmerged(Unmerged {
7897 first_head: unmerged_status_to_proto(first_head),
7898 second_head: unmerged_status_to_proto(second_head),
7899 }),
7900 FileStatus::Tracked(TrackedStatus {
7901 index_status,
7902 worktree_status,
7903 }) => Variant::Tracked(Tracked {
7904 index_status: tracked_status_to_proto(index_status),
7905 worktree_status: tracked_status_to_proto(worktree_status),
7906 }),
7907 };
7908 proto::GitFileStatus {
7909 variant: Some(variant),
7910 }
7911}
7912
7913fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7914 match code {
7915 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7916 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7917 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7918 }
7919}
7920
7921fn tracked_status_to_proto(code: StatusCode) -> i32 {
7922 match code {
7923 StatusCode::Added => proto::GitStatus::Added as _,
7924 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7925 StatusCode::Modified => proto::GitStatus::Modified as _,
7926 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7927 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7928 StatusCode::Copied => proto::GitStatus::Copied as _,
7929 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7930 }
7931}