1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
36 DiffType, FetchOptions, GitRepository, GitRepositoryCheckpoint, GraphCommitData,
37 InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RemoteCommandOutput,
38 RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332pub struct Repository {
333 this: WeakEntity<Self>,
334 snapshot: RepositorySnapshot,
335 commit_message_buffer: Option<Entity<Buffer>>,
336 git_store: WeakEntity<GitStore>,
337 // For a local repository, holds paths that have had worktree events since the last status scan completed,
338 // and that should be examined during the next status scan.
339 paths_needing_status_update: Vec<Vec<RepoPath>>,
340 job_sender: mpsc::UnboundedSender<GitJob>,
341 active_jobs: HashMap<JobId, JobInfo>,
342 pending_ops: SumTree<PendingOps>,
343 job_id: JobId,
344 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
345 latest_askpass_id: u64,
346 repository_state: Shared<Task<Result<RepositoryState, String>>>,
347 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
348 graph_commit_data_handler: GraphCommitHandlerState,
349 commit_data: HashMap<Oid, CommitDataState>,
350}
351
352impl std::ops::Deref for Repository {
353 type Target = RepositorySnapshot;
354
355 fn deref(&self) -> &Self::Target {
356 &self.snapshot
357 }
358}
359
360#[derive(Clone)]
361pub struct LocalRepositoryState {
362 pub fs: Arc<dyn Fs>,
363 pub backend: Arc<dyn GitRepository>,
364 pub environment: Arc<HashMap<String, String>>,
365}
366
367impl LocalRepositoryState {
368 async fn new(
369 work_directory_abs_path: Arc<Path>,
370 dot_git_abs_path: Arc<Path>,
371 project_environment: WeakEntity<ProjectEnvironment>,
372 fs: Arc<dyn Fs>,
373 is_trusted: bool,
374 cx: &mut AsyncApp,
375 ) -> anyhow::Result<Self> {
376 let environment = project_environment
377 .update(cx, |project_environment, cx| {
378 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
379 })?
380 .await
381 .unwrap_or_else(|| {
382 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
383 HashMap::default()
384 });
385 let search_paths = environment.get("PATH").map(|val| val.to_owned());
386 let backend = cx
387 .background_spawn({
388 let fs = fs.clone();
389 async move {
390 let system_git_binary_path = search_paths
391 .and_then(|search_paths| {
392 which::which_in("git", Some(search_paths), &work_directory_abs_path)
393 .ok()
394 })
395 .or_else(|| which::which("git").ok());
396 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
397 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
398 }
399 })
400 .await?;
401 backend.set_trusted(is_trusted);
402 Ok(LocalRepositoryState {
403 backend,
404 environment: Arc::new(environment),
405 fs,
406 })
407 }
408}
409
410#[derive(Clone)]
411pub struct RemoteRepositoryState {
412 pub project_id: ProjectId,
413 pub client: AnyProtoClient,
414}
415
416#[derive(Clone)]
417pub enum RepositoryState {
418 Local(LocalRepositoryState),
419 Remote(RemoteRepositoryState),
420}
421
422#[derive(Clone, Debug, PartialEq, Eq)]
423pub enum GitGraphEvent {
424 CountUpdated(usize),
425 FullyLoaded,
426 LoadingError,
427}
428
429#[derive(Clone, Debug, PartialEq, Eq)]
430pub enum RepositoryEvent {
431 StatusesChanged,
432 HeadChanged,
433 BranchListChanged,
434 StashEntriesChanged,
435 GitWorktreeListChanged,
436 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
437 GraphEvent((LogSource, LogOrder), GitGraphEvent),
438}
439
440#[derive(Clone, Debug)]
441pub struct JobsUpdated;
442
443#[derive(Debug)]
444pub enum GitStoreEvent {
445 ActiveRepositoryChanged(Option<RepositoryId>),
446 /// Bool is true when the repository that's updated is the active repository
447 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
448 RepositoryAdded,
449 RepositoryRemoved(RepositoryId),
450 IndexWriteError(anyhow::Error),
451 JobsUpdated,
452 ConflictsUpdated,
453}
454
455impl EventEmitter<RepositoryEvent> for Repository {}
456impl EventEmitter<JobsUpdated> for Repository {}
457impl EventEmitter<GitStoreEvent> for GitStore {}
458
459pub struct GitJob {
460 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
461 key: Option<GitJobKey>,
462}
463
464#[derive(PartialEq, Eq)]
465enum GitJobKey {
466 WriteIndex(Vec<RepoPath>),
467 ReloadBufferDiffBases,
468 RefreshStatuses,
469 ReloadGitState,
470}
471
472impl GitStore {
473 pub fn local(
474 worktree_store: &Entity<WorktreeStore>,
475 buffer_store: Entity<BufferStore>,
476 environment: Entity<ProjectEnvironment>,
477 fs: Arc<dyn Fs>,
478 cx: &mut Context<Self>,
479 ) -> Self {
480 Self::new(
481 worktree_store.clone(),
482 buffer_store,
483 GitStoreState::Local {
484 next_repository_id: Arc::new(AtomicU64::new(1)),
485 downstream: None,
486 project_environment: environment,
487 fs,
488 },
489 cx,
490 )
491 }
492
493 pub fn remote(
494 worktree_store: &Entity<WorktreeStore>,
495 buffer_store: Entity<BufferStore>,
496 upstream_client: AnyProtoClient,
497 project_id: u64,
498 cx: &mut Context<Self>,
499 ) -> Self {
500 Self::new(
501 worktree_store.clone(),
502 buffer_store,
503 GitStoreState::Remote {
504 upstream_client,
505 upstream_project_id: project_id,
506 downstream: None,
507 },
508 cx,
509 )
510 }
511
512 fn new(
513 worktree_store: Entity<WorktreeStore>,
514 buffer_store: Entity<BufferStore>,
515 state: GitStoreState,
516 cx: &mut Context<Self>,
517 ) -> Self {
518 let mut _subscriptions = vec![
519 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
520 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
521 ];
522
523 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
524 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
525 }
526
527 GitStore {
528 state,
529 buffer_store,
530 worktree_store,
531 repositories: HashMap::default(),
532 worktree_ids: HashMap::default(),
533 active_repo_id: None,
534 _subscriptions,
535 loading_diffs: HashMap::default(),
536 shared_diffs: HashMap::default(),
537 diffs: HashMap::default(),
538 }
539 }
540
541 pub fn init(client: &AnyProtoClient) {
542 client.add_entity_request_handler(Self::handle_get_remotes);
543 client.add_entity_request_handler(Self::handle_get_branches);
544 client.add_entity_request_handler(Self::handle_get_default_branch);
545 client.add_entity_request_handler(Self::handle_change_branch);
546 client.add_entity_request_handler(Self::handle_create_branch);
547 client.add_entity_request_handler(Self::handle_rename_branch);
548 client.add_entity_request_handler(Self::handle_create_remote);
549 client.add_entity_request_handler(Self::handle_remove_remote);
550 client.add_entity_request_handler(Self::handle_delete_branch);
551 client.add_entity_request_handler(Self::handle_git_init);
552 client.add_entity_request_handler(Self::handle_push);
553 client.add_entity_request_handler(Self::handle_pull);
554 client.add_entity_request_handler(Self::handle_fetch);
555 client.add_entity_request_handler(Self::handle_stage);
556 client.add_entity_request_handler(Self::handle_unstage);
557 client.add_entity_request_handler(Self::handle_stash);
558 client.add_entity_request_handler(Self::handle_stash_pop);
559 client.add_entity_request_handler(Self::handle_stash_apply);
560 client.add_entity_request_handler(Self::handle_stash_drop);
561 client.add_entity_request_handler(Self::handle_commit);
562 client.add_entity_request_handler(Self::handle_run_hook);
563 client.add_entity_request_handler(Self::handle_reset);
564 client.add_entity_request_handler(Self::handle_show);
565 client.add_entity_request_handler(Self::handle_create_checkpoint);
566 client.add_entity_request_handler(Self::handle_restore_checkpoint);
567 client.add_entity_request_handler(Self::handle_compare_checkpoints);
568 client.add_entity_request_handler(Self::handle_diff_checkpoints);
569 client.add_entity_request_handler(Self::handle_load_commit_diff);
570 client.add_entity_request_handler(Self::handle_file_history);
571 client.add_entity_request_handler(Self::handle_checkout_files);
572 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
573 client.add_entity_request_handler(Self::handle_set_index_text);
574 client.add_entity_request_handler(Self::handle_askpass);
575 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
576 client.add_entity_request_handler(Self::handle_git_diff);
577 client.add_entity_request_handler(Self::handle_tree_diff);
578 client.add_entity_request_handler(Self::handle_get_blob_content);
579 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
580 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
581 client.add_entity_message_handler(Self::handle_update_diff_bases);
582 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
583 client.add_entity_request_handler(Self::handle_blame_buffer);
584 client.add_entity_message_handler(Self::handle_update_repository);
585 client.add_entity_message_handler(Self::handle_remove_repository);
586 client.add_entity_request_handler(Self::handle_git_clone);
587 client.add_entity_request_handler(Self::handle_get_worktrees);
588 client.add_entity_request_handler(Self::handle_create_worktree);
589 client.add_entity_request_handler(Self::handle_remove_worktree);
590 client.add_entity_request_handler(Self::handle_rename_worktree);
591 client.add_entity_request_handler(Self::handle_get_head_sha);
592 }
593
594 pub fn is_local(&self) -> bool {
595 matches!(self.state, GitStoreState::Local { .. })
596 }
597
598 fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context<Self>) {
599 if self.active_repo_id != Some(repo_id) {
600 self.active_repo_id = Some(repo_id);
601 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
602 }
603 }
604
605 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
606 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
607 self.set_active_repo_id(repo.read(cx).id, cx);
608 }
609 }
610
611 pub fn set_active_repo_for_worktree(
612 &mut self,
613 worktree_id: WorktreeId,
614 cx: &mut Context<Self>,
615 ) {
616 let Some(worktree) = self
617 .worktree_store
618 .read(cx)
619 .worktree_for_id(worktree_id, cx)
620 else {
621 return;
622 };
623 let worktree_abs_path = worktree.read(cx).abs_path();
624 let Some(repo_id) = self
625 .repositories
626 .values()
627 .filter(|repo| {
628 let repo_path = &repo.read(cx).work_directory_abs_path;
629 *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
630 })
631 .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
632 .map(|repo| repo.read(cx).id)
633 else {
634 return;
635 };
636
637 self.set_active_repo_id(repo_id, cx);
638 }
639
640 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
641 match &mut self.state {
642 GitStoreState::Remote {
643 downstream: downstream_client,
644 ..
645 } => {
646 for repo in self.repositories.values() {
647 let update = repo.read(cx).snapshot.initial_update(project_id);
648 for update in split_repository_update(update) {
649 client.send(update).log_err();
650 }
651 }
652 *downstream_client = Some((client, ProjectId(project_id)));
653 }
654 GitStoreState::Local {
655 downstream: downstream_client,
656 ..
657 } => {
658 let mut snapshots = HashMap::default();
659 let (updates_tx, mut updates_rx) = mpsc::unbounded();
660 for repo in self.repositories.values() {
661 updates_tx
662 .unbounded_send(DownstreamUpdate::UpdateRepository(
663 repo.read(cx).snapshot.clone(),
664 ))
665 .ok();
666 }
667 *downstream_client = Some(LocalDownstreamState {
668 client: client.clone(),
669 project_id: ProjectId(project_id),
670 updates_tx,
671 _task: cx.spawn(async move |this, cx| {
672 cx.background_spawn(async move {
673 while let Some(update) = updates_rx.next().await {
674 match update {
675 DownstreamUpdate::UpdateRepository(snapshot) => {
676 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
677 {
678 let update =
679 snapshot.build_update(old_snapshot, project_id);
680 *old_snapshot = snapshot;
681 for update in split_repository_update(update) {
682 client.send(update)?;
683 }
684 } else {
685 let update = snapshot.initial_update(project_id);
686 for update in split_repository_update(update) {
687 client.send(update)?;
688 }
689 snapshots.insert(snapshot.id, snapshot);
690 }
691 }
692 DownstreamUpdate::RemoveRepository(id) => {
693 client.send(proto::RemoveRepository {
694 project_id,
695 id: id.to_proto(),
696 })?;
697 }
698 }
699 }
700 anyhow::Ok(())
701 })
702 .await
703 .ok();
704 this.update(cx, |this, _| {
705 if let GitStoreState::Local {
706 downstream: downstream_client,
707 ..
708 } = &mut this.state
709 {
710 downstream_client.take();
711 } else {
712 unreachable!("unshared called on remote store");
713 }
714 })
715 }),
716 });
717 }
718 }
719 }
720
721 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
722 match &mut self.state {
723 GitStoreState::Local {
724 downstream: downstream_client,
725 ..
726 } => {
727 downstream_client.take();
728 }
729 GitStoreState::Remote {
730 downstream: downstream_client,
731 ..
732 } => {
733 downstream_client.take();
734 }
735 }
736 self.shared_diffs.clear();
737 }
738
739 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
740 self.shared_diffs.remove(peer_id);
741 }
742
743 pub fn active_repository(&self) -> Option<Entity<Repository>> {
744 self.active_repo_id
745 .as_ref()
746 .map(|id| self.repositories[id].clone())
747 }
748
749 pub fn open_unstaged_diff(
750 &mut self,
751 buffer: Entity<Buffer>,
752 cx: &mut Context<Self>,
753 ) -> Task<Result<Entity<BufferDiff>>> {
754 let buffer_id = buffer.read(cx).remote_id();
755 if let Some(diff_state) = self.diffs.get(&buffer_id)
756 && let Some(unstaged_diff) = diff_state
757 .read(cx)
758 .unstaged_diff
759 .as_ref()
760 .and_then(|weak| weak.upgrade())
761 {
762 if let Some(task) =
763 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
764 {
765 return cx.background_executor().spawn(async move {
766 task.await;
767 Ok(unstaged_diff)
768 });
769 }
770 return Task::ready(Ok(unstaged_diff));
771 }
772
773 let Some((repo, repo_path)) =
774 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
775 else {
776 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
777 };
778
779 let task = self
780 .loading_diffs
781 .entry((buffer_id, DiffKind::Unstaged))
782 .or_insert_with(|| {
783 let staged_text = repo.update(cx, |repo, cx| {
784 repo.load_staged_text(buffer_id, repo_path, cx)
785 });
786 cx.spawn(async move |this, cx| {
787 Self::open_diff_internal(
788 this,
789 DiffKind::Unstaged,
790 staged_text.await.map(DiffBasesChange::SetIndex),
791 buffer,
792 cx,
793 )
794 .await
795 .map_err(Arc::new)
796 })
797 .shared()
798 })
799 .clone();
800
801 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
802 }
803
804 pub fn open_diff_since(
805 &mut self,
806 oid: Option<git::Oid>,
807 buffer: Entity<Buffer>,
808 repo: Entity<Repository>,
809 cx: &mut Context<Self>,
810 ) -> Task<Result<Entity<BufferDiff>>> {
811 let buffer_id = buffer.read(cx).remote_id();
812
813 if let Some(diff_state) = self.diffs.get(&buffer_id)
814 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
815 {
816 if let Some(task) =
817 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
818 {
819 return cx.background_executor().spawn(async move {
820 task.await;
821 Ok(oid_diff)
822 });
823 }
824 return Task::ready(Ok(oid_diff));
825 }
826
827 let diff_kind = DiffKind::SinceOid(oid);
828 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
829 let task = task.clone();
830 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
831 }
832
833 let task = cx
834 .spawn(async move |this, cx| {
835 let result: Result<Entity<BufferDiff>> = async {
836 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
837 let language_registry =
838 buffer.update(cx, |buffer, _| buffer.language_registry());
839 let content: Option<Arc<str>> = match oid {
840 None => None,
841 Some(oid) => Some(
842 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
843 .await?
844 .into(),
845 ),
846 };
847 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
848
849 buffer_diff
850 .update(cx, |buffer_diff, cx| {
851 buffer_diff.language_changed(
852 buffer_snapshot.language().cloned(),
853 language_registry,
854 cx,
855 );
856 buffer_diff.set_base_text(
857 content.clone(),
858 buffer_snapshot.language().cloned(),
859 buffer_snapshot.text,
860 cx,
861 )
862 })
863 .await?;
864 let unstaged_diff = this
865 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
866 .await?;
867 buffer_diff.update(cx, |buffer_diff, _| {
868 buffer_diff.set_secondary_diff(unstaged_diff);
869 });
870
871 this.update(cx, |this, cx| {
872 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
873 .detach();
874
875 this.loading_diffs.remove(&(buffer_id, diff_kind));
876
877 let git_store = cx.weak_entity();
878 let diff_state = this
879 .diffs
880 .entry(buffer_id)
881 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
882
883 diff_state.update(cx, |state, _| {
884 if let Some(oid) = oid {
885 if let Some(content) = content {
886 state.oid_texts.insert(oid, content);
887 }
888 }
889 state.oid_diffs.insert(oid, buffer_diff.downgrade());
890 });
891 })?;
892
893 Ok(buffer_diff)
894 }
895 .await;
896 result.map_err(Arc::new)
897 })
898 .shared();
899
900 self.loading_diffs
901 .insert((buffer_id, diff_kind), task.clone());
902 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
903 }
904
905 #[ztracing::instrument(skip_all)]
906 pub fn open_uncommitted_diff(
907 &mut self,
908 buffer: Entity<Buffer>,
909 cx: &mut Context<Self>,
910 ) -> Task<Result<Entity<BufferDiff>>> {
911 let buffer_id = buffer.read(cx).remote_id();
912
913 if let Some(diff_state) = self.diffs.get(&buffer_id)
914 && let Some(uncommitted_diff) = diff_state
915 .read(cx)
916 .uncommitted_diff
917 .as_ref()
918 .and_then(|weak| weak.upgrade())
919 {
920 if let Some(task) =
921 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
922 {
923 return cx.background_executor().spawn(async move {
924 task.await;
925 Ok(uncommitted_diff)
926 });
927 }
928 return Task::ready(Ok(uncommitted_diff));
929 }
930
931 let Some((repo, repo_path)) =
932 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
933 else {
934 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
935 };
936
937 let task = self
938 .loading_diffs
939 .entry((buffer_id, DiffKind::Uncommitted))
940 .or_insert_with(|| {
941 let changes = repo.update(cx, |repo, cx| {
942 repo.load_committed_text(buffer_id, repo_path, cx)
943 });
944
945 // todo(lw): hot foreground spawn
946 cx.spawn(async move |this, cx| {
947 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
948 .await
949 .map_err(Arc::new)
950 })
951 .shared()
952 })
953 .clone();
954
955 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
956 }
957
958 #[ztracing::instrument(skip_all)]
959 async fn open_diff_internal(
960 this: WeakEntity<Self>,
961 kind: DiffKind,
962 texts: Result<DiffBasesChange>,
963 buffer_entity: Entity<Buffer>,
964 cx: &mut AsyncApp,
965 ) -> Result<Entity<BufferDiff>> {
966 let diff_bases_change = match texts {
967 Err(e) => {
968 this.update(cx, |this, cx| {
969 let buffer = buffer_entity.read(cx);
970 let buffer_id = buffer.remote_id();
971 this.loading_diffs.remove(&(buffer_id, kind));
972 })?;
973 return Err(e);
974 }
975 Ok(change) => change,
976 };
977
978 this.update(cx, |this, cx| {
979 let buffer = buffer_entity.read(cx);
980 let buffer_id = buffer.remote_id();
981 let language = buffer.language().cloned();
982 let language_registry = buffer.language_registry();
983 let text_snapshot = buffer.text_snapshot();
984 this.loading_diffs.remove(&(buffer_id, kind));
985
986 let git_store = cx.weak_entity();
987 let diff_state = this
988 .diffs
989 .entry(buffer_id)
990 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
991
992 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
993
994 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
995 diff_state.update(cx, |diff_state, cx| {
996 diff_state.language_changed = true;
997 diff_state.language = language;
998 diff_state.language_registry = language_registry;
999
1000 match kind {
1001 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
1002 DiffKind::Uncommitted => {
1003 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1004 diff
1005 } else {
1006 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1007 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1008 unstaged_diff
1009 };
1010
1011 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1012 diff_state.uncommitted_diff = Some(diff.downgrade())
1013 }
1014 DiffKind::SinceOid(_) => {
1015 unreachable!("open_diff_internal is not used for OID diffs")
1016 }
1017 }
1018
1019 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
1020 let rx = diff_state.wait_for_recalculation();
1021
1022 anyhow::Ok(async move {
1023 if let Some(rx) = rx {
1024 rx.await;
1025 }
1026 Ok(diff)
1027 })
1028 })
1029 })??
1030 .await
1031 }
1032
1033 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1034 let diff_state = self.diffs.get(&buffer_id)?;
1035 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1036 }
1037
1038 pub fn get_uncommitted_diff(
1039 &self,
1040 buffer_id: BufferId,
1041 cx: &App,
1042 ) -> Option<Entity<BufferDiff>> {
1043 let diff_state = self.diffs.get(&buffer_id)?;
1044 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1045 }
1046
1047 pub fn get_diff_since_oid(
1048 &self,
1049 buffer_id: BufferId,
1050 oid: Option<git::Oid>,
1051 cx: &App,
1052 ) -> Option<Entity<BufferDiff>> {
1053 let diff_state = self.diffs.get(&buffer_id)?;
1054 diff_state.read(cx).oid_diff(oid)
1055 }
1056
1057 pub fn open_conflict_set(
1058 &mut self,
1059 buffer: Entity<Buffer>,
1060 cx: &mut Context<Self>,
1061 ) -> Entity<ConflictSet> {
1062 log::debug!("open conflict set");
1063 let buffer_id = buffer.read(cx).remote_id();
1064
1065 if let Some(git_state) = self.diffs.get(&buffer_id)
1066 && let Some(conflict_set) = git_state
1067 .read(cx)
1068 .conflict_set
1069 .as_ref()
1070 .and_then(|weak| weak.upgrade())
1071 {
1072 let conflict_set = conflict_set;
1073 let buffer_snapshot = buffer.read(cx).text_snapshot();
1074
1075 git_state.update(cx, |state, cx| {
1076 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1077 });
1078
1079 return conflict_set;
1080 }
1081
1082 let is_unmerged = self
1083 .repository_and_path_for_buffer_id(buffer_id, cx)
1084 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1085 let git_store = cx.weak_entity();
1086 let buffer_git_state = self
1087 .diffs
1088 .entry(buffer_id)
1089 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1090 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1091
1092 self._subscriptions
1093 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1094 cx.emit(GitStoreEvent::ConflictsUpdated);
1095 }));
1096
1097 buffer_git_state.update(cx, |state, cx| {
1098 state.conflict_set = Some(conflict_set.downgrade());
1099 let buffer_snapshot = buffer.read(cx).text_snapshot();
1100 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1101 });
1102
1103 conflict_set
1104 }
1105
1106 pub fn project_path_git_status(
1107 &self,
1108 project_path: &ProjectPath,
1109 cx: &App,
1110 ) -> Option<FileStatus> {
1111 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1112 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1113 }
1114
1115 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1116 let mut work_directory_abs_paths = Vec::new();
1117 let mut checkpoints = Vec::new();
1118 for repository in self.repositories.values() {
1119 repository.update(cx, |repository, _| {
1120 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1121 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1122 });
1123 }
1124
1125 cx.background_executor().spawn(async move {
1126 let checkpoints = future::try_join_all(checkpoints).await?;
1127 Ok(GitStoreCheckpoint {
1128 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1129 .into_iter()
1130 .zip(checkpoints)
1131 .collect(),
1132 })
1133 })
1134 }
1135
1136 pub fn restore_checkpoint(
1137 &self,
1138 checkpoint: GitStoreCheckpoint,
1139 cx: &mut App,
1140 ) -> Task<Result<()>> {
1141 let repositories_by_work_dir_abs_path = self
1142 .repositories
1143 .values()
1144 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1145 .collect::<HashMap<_, _>>();
1146
1147 let mut tasks = Vec::new();
1148 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1149 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1150 let restore = repository.update(cx, |repository, _| {
1151 repository.restore_checkpoint(checkpoint)
1152 });
1153 tasks.push(async move { restore.await? });
1154 }
1155 }
1156 cx.background_spawn(async move {
1157 future::try_join_all(tasks).await?;
1158 Ok(())
1159 })
1160 }
1161
1162 /// Compares two checkpoints, returning true if they are equal.
1163 pub fn compare_checkpoints(
1164 &self,
1165 left: GitStoreCheckpoint,
1166 mut right: GitStoreCheckpoint,
1167 cx: &mut App,
1168 ) -> Task<Result<bool>> {
1169 let repositories_by_work_dir_abs_path = self
1170 .repositories
1171 .values()
1172 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1173 .collect::<HashMap<_, _>>();
1174
1175 let mut tasks = Vec::new();
1176 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1177 if let Some(right_checkpoint) = right
1178 .checkpoints_by_work_dir_abs_path
1179 .remove(&work_dir_abs_path)
1180 {
1181 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1182 {
1183 let compare = repository.update(cx, |repository, _| {
1184 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1185 });
1186
1187 tasks.push(async move { compare.await? });
1188 }
1189 } else {
1190 return Task::ready(Ok(false));
1191 }
1192 }
1193 cx.background_spawn(async move {
1194 Ok(future::try_join_all(tasks)
1195 .await?
1196 .into_iter()
1197 .all(|result| result))
1198 })
1199 }
1200
1201 /// Blames a buffer.
1202 pub fn blame_buffer(
1203 &self,
1204 buffer: &Entity<Buffer>,
1205 version: Option<clock::Global>,
1206 cx: &mut Context<Self>,
1207 ) -> Task<Result<Option<Blame>>> {
1208 let buffer = buffer.read(cx);
1209 let Some((repo, repo_path)) =
1210 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1211 else {
1212 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1213 };
1214 let content = match &version {
1215 Some(version) => buffer.rope_for_version(version),
1216 None => buffer.as_rope().clone(),
1217 };
1218 let line_ending = buffer.line_ending();
1219 let version = version.unwrap_or(buffer.version());
1220 let buffer_id = buffer.remote_id();
1221
1222 let repo = repo.downgrade();
1223 cx.spawn(async move |_, cx| {
1224 let repository_state = repo
1225 .update(cx, |repo, _| repo.repository_state.clone())?
1226 .await
1227 .map_err(|err| anyhow::anyhow!(err))?;
1228 match repository_state {
1229 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1230 .blame(repo_path.clone(), content, line_ending)
1231 .await
1232 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1233 .map(Some),
1234 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1235 let response = client
1236 .request(proto::BlameBuffer {
1237 project_id: project_id.to_proto(),
1238 buffer_id: buffer_id.into(),
1239 version: serialize_version(&version),
1240 })
1241 .await?;
1242 Ok(deserialize_blame_buffer_response(response))
1243 }
1244 }
1245 })
1246 }
1247
1248 pub fn file_history(
1249 &self,
1250 repo: &Entity<Repository>,
1251 path: RepoPath,
1252 cx: &mut App,
1253 ) -> Task<Result<git::repository::FileHistory>> {
1254 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1255
1256 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1257 }
1258
1259 pub fn file_history_paginated(
1260 &self,
1261 repo: &Entity<Repository>,
1262 path: RepoPath,
1263 skip: usize,
1264 limit: Option<usize>,
1265 cx: &mut App,
1266 ) -> Task<Result<git::repository::FileHistory>> {
1267 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1268
1269 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1270 }
1271
1272 pub fn get_permalink_to_line(
1273 &self,
1274 buffer: &Entity<Buffer>,
1275 selection: Range<u32>,
1276 cx: &mut App,
1277 ) -> Task<Result<url::Url>> {
1278 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1279 return Task::ready(Err(anyhow!("buffer has no file")));
1280 };
1281
1282 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1283 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1284 cx,
1285 ) else {
1286 // If we're not in a Git repo, check whether this is a Rust source
1287 // file in the Cargo registry (presumably opened with go-to-definition
1288 // from a normal Rust file). If so, we can put together a permalink
1289 // using crate metadata.
1290 if buffer
1291 .read(cx)
1292 .language()
1293 .is_none_or(|lang| lang.name() != "Rust")
1294 {
1295 return Task::ready(Err(anyhow!("no permalink available")));
1296 }
1297 let file_path = file.worktree.read(cx).absolutize(&file.path);
1298 return cx.spawn(async move |cx| {
1299 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1300 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1301 .context("no permalink available")
1302 });
1303 };
1304
1305 let buffer_id = buffer.read(cx).remote_id();
1306 let branch = repo.read(cx).branch.clone();
1307 let remote = branch
1308 .as_ref()
1309 .and_then(|b| b.upstream.as_ref())
1310 .and_then(|b| b.remote_name())
1311 .unwrap_or("origin")
1312 .to_string();
1313
1314 let rx = repo.update(cx, |repo, _| {
1315 repo.send_job(None, move |state, cx| async move {
1316 match state {
1317 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1318 let origin_url = backend
1319 .remote_url(&remote)
1320 .await
1321 .with_context(|| format!("remote \"{remote}\" not found"))?;
1322
1323 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1324
1325 let provider_registry =
1326 cx.update(GitHostingProviderRegistry::default_global);
1327
1328 let (provider, remote) =
1329 parse_git_remote_url(provider_registry, &origin_url)
1330 .context("parsing Git remote URL")?;
1331
1332 Ok(provider.build_permalink(
1333 remote,
1334 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1335 ))
1336 }
1337 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1338 let response = client
1339 .request(proto::GetPermalinkToLine {
1340 project_id: project_id.to_proto(),
1341 buffer_id: buffer_id.into(),
1342 selection: Some(proto::Range {
1343 start: selection.start as u64,
1344 end: selection.end as u64,
1345 }),
1346 })
1347 .await?;
1348
1349 url::Url::parse(&response.permalink).context("failed to parse permalink")
1350 }
1351 }
1352 })
1353 });
1354 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1355 }
1356
1357 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1358 match &self.state {
1359 GitStoreState::Local {
1360 downstream: downstream_client,
1361 ..
1362 } => downstream_client
1363 .as_ref()
1364 .map(|state| (state.client.clone(), state.project_id)),
1365 GitStoreState::Remote {
1366 downstream: downstream_client,
1367 ..
1368 } => downstream_client.clone(),
1369 }
1370 }
1371
1372 fn upstream_client(&self) -> Option<AnyProtoClient> {
1373 match &self.state {
1374 GitStoreState::Local { .. } => None,
1375 GitStoreState::Remote {
1376 upstream_client, ..
1377 } => Some(upstream_client.clone()),
1378 }
1379 }
1380
1381 fn on_worktree_store_event(
1382 &mut self,
1383 worktree_store: Entity<WorktreeStore>,
1384 event: &WorktreeStoreEvent,
1385 cx: &mut Context<Self>,
1386 ) {
1387 let GitStoreState::Local {
1388 project_environment,
1389 downstream,
1390 next_repository_id,
1391 fs,
1392 } = &self.state
1393 else {
1394 return;
1395 };
1396
1397 match event {
1398 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1399 if let Some(worktree) = self
1400 .worktree_store
1401 .read(cx)
1402 .worktree_for_id(*worktree_id, cx)
1403 {
1404 let paths_by_git_repo =
1405 self.process_updated_entries(&worktree, updated_entries, cx);
1406 let downstream = downstream
1407 .as_ref()
1408 .map(|downstream| downstream.updates_tx.clone());
1409 cx.spawn(async move |_, cx| {
1410 let paths_by_git_repo = paths_by_git_repo.await;
1411 for (repo, paths) in paths_by_git_repo {
1412 repo.update(cx, |repo, cx| {
1413 repo.paths_changed(paths, downstream.clone(), cx);
1414 });
1415 }
1416 })
1417 .detach();
1418 }
1419 }
1420 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1421 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1422 else {
1423 return;
1424 };
1425 if !worktree.read(cx).is_visible() {
1426 log::debug!(
1427 "not adding repositories for local worktree {:?} because it's not visible",
1428 worktree.read(cx).abs_path()
1429 );
1430 return;
1431 }
1432 self.update_repositories_from_worktree(
1433 *worktree_id,
1434 project_environment.clone(),
1435 next_repository_id.clone(),
1436 downstream
1437 .as_ref()
1438 .map(|downstream| downstream.updates_tx.clone()),
1439 changed_repos.clone(),
1440 fs.clone(),
1441 cx,
1442 );
1443 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1444 }
1445 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1446 let repos_without_worktree: Vec<RepositoryId> = self
1447 .worktree_ids
1448 .iter_mut()
1449 .filter_map(|(repo_id, worktree_ids)| {
1450 worktree_ids.remove(worktree_id);
1451 if worktree_ids.is_empty() {
1452 Some(*repo_id)
1453 } else {
1454 None
1455 }
1456 })
1457 .collect();
1458 let is_active_repo_removed = repos_without_worktree
1459 .iter()
1460 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1461
1462 for repo_id in repos_without_worktree {
1463 self.repositories.remove(&repo_id);
1464 self.worktree_ids.remove(&repo_id);
1465 if let Some(updates_tx) =
1466 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1467 {
1468 updates_tx
1469 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1470 .ok();
1471 }
1472 }
1473
1474 if is_active_repo_removed {
1475 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1476 self.active_repo_id = Some(repo_id);
1477 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1478 } else {
1479 self.active_repo_id = None;
1480 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1481 }
1482 }
1483 }
1484 _ => {}
1485 }
1486 }
1487 fn on_repository_event(
1488 &mut self,
1489 repo: Entity<Repository>,
1490 event: &RepositoryEvent,
1491 cx: &mut Context<Self>,
1492 ) {
1493 let id = repo.read(cx).id;
1494 let repo_snapshot = repo.read(cx).snapshot.clone();
1495 for (buffer_id, diff) in self.diffs.iter() {
1496 if let Some((buffer_repo, repo_path)) =
1497 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1498 && buffer_repo == repo
1499 {
1500 diff.update(cx, |diff, cx| {
1501 if let Some(conflict_set) = &diff.conflict_set {
1502 let conflict_status_changed =
1503 conflict_set.update(cx, |conflict_set, cx| {
1504 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1505 conflict_set.set_has_conflict(has_conflict, cx)
1506 })?;
1507 if conflict_status_changed {
1508 let buffer_store = self.buffer_store.read(cx);
1509 if let Some(buffer) = buffer_store.get(*buffer_id) {
1510 let _ = diff
1511 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1512 }
1513 }
1514 }
1515 anyhow::Ok(())
1516 })
1517 .ok();
1518 }
1519 }
1520 cx.emit(GitStoreEvent::RepositoryUpdated(
1521 id,
1522 event.clone(),
1523 self.active_repo_id == Some(id),
1524 ))
1525 }
1526
1527 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1528 cx.emit(GitStoreEvent::JobsUpdated)
1529 }
1530
1531 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1532 fn update_repositories_from_worktree(
1533 &mut self,
1534 worktree_id: WorktreeId,
1535 project_environment: Entity<ProjectEnvironment>,
1536 next_repository_id: Arc<AtomicU64>,
1537 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1538 updated_git_repositories: UpdatedGitRepositoriesSet,
1539 fs: Arc<dyn Fs>,
1540 cx: &mut Context<Self>,
1541 ) {
1542 let mut removed_ids = Vec::new();
1543 for update in updated_git_repositories.iter() {
1544 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1545 let existing_work_directory_abs_path =
1546 repo.read(cx).work_directory_abs_path.clone();
1547 Some(&existing_work_directory_abs_path)
1548 == update.old_work_directory_abs_path.as_ref()
1549 || Some(&existing_work_directory_abs_path)
1550 == update.new_work_directory_abs_path.as_ref()
1551 }) {
1552 let repo_id = *id;
1553 if let Some(new_work_directory_abs_path) =
1554 update.new_work_directory_abs_path.clone()
1555 {
1556 self.worktree_ids
1557 .entry(repo_id)
1558 .or_insert_with(HashSet::new)
1559 .insert(worktree_id);
1560 existing.update(cx, |existing, cx| {
1561 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1562 existing.schedule_scan(updates_tx.clone(), cx);
1563 });
1564 } else {
1565 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1566 worktree_ids.remove(&worktree_id);
1567 if worktree_ids.is_empty() {
1568 removed_ids.push(repo_id);
1569 }
1570 }
1571 }
1572 } else if let UpdatedGitRepository {
1573 new_work_directory_abs_path: Some(work_directory_abs_path),
1574 dot_git_abs_path: Some(dot_git_abs_path),
1575 repository_dir_abs_path: Some(repository_dir_abs_path),
1576 common_dir_abs_path: Some(common_dir_abs_path),
1577 ..
1578 } = update
1579 {
1580 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1581 work_directory_abs_path,
1582 common_dir_abs_path,
1583 repository_dir_abs_path,
1584 )
1585 .into();
1586 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1587 let is_trusted = TrustedWorktrees::try_get_global(cx)
1588 .map(|trusted_worktrees| {
1589 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1590 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1591 })
1592 })
1593 .unwrap_or(false);
1594 let git_store = cx.weak_entity();
1595 let repo = cx.new(|cx| {
1596 let mut repo = Repository::local(
1597 id,
1598 work_directory_abs_path.clone(),
1599 original_repo_abs_path.clone(),
1600 dot_git_abs_path.clone(),
1601 project_environment.downgrade(),
1602 fs.clone(),
1603 is_trusted,
1604 git_store,
1605 cx,
1606 );
1607 if let Some(updates_tx) = updates_tx.as_ref() {
1608 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1609 updates_tx
1610 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1611 .ok();
1612 }
1613 repo.schedule_scan(updates_tx.clone(), cx);
1614 repo
1615 });
1616 self._subscriptions
1617 .push(cx.subscribe(&repo, Self::on_repository_event));
1618 self._subscriptions
1619 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1620 self.repositories.insert(id, repo);
1621 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1622 cx.emit(GitStoreEvent::RepositoryAdded);
1623 self.active_repo_id.get_or_insert_with(|| {
1624 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1625 id
1626 });
1627 }
1628 }
1629
1630 for id in removed_ids {
1631 if self.active_repo_id == Some(id) {
1632 self.active_repo_id = None;
1633 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1634 }
1635 self.repositories.remove(&id);
1636 if let Some(updates_tx) = updates_tx.as_ref() {
1637 updates_tx
1638 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1639 .ok();
1640 }
1641 }
1642 }
1643
1644 fn on_trusted_worktrees_event(
1645 &mut self,
1646 _: Entity<TrustedWorktreesStore>,
1647 event: &TrustedWorktreesEvent,
1648 cx: &mut Context<Self>,
1649 ) {
1650 if !matches!(self.state, GitStoreState::Local { .. }) {
1651 return;
1652 }
1653
1654 let (is_trusted, event_paths) = match event {
1655 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1656 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1657 };
1658
1659 for (repo_id, worktree_ids) in &self.worktree_ids {
1660 if worktree_ids
1661 .iter()
1662 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1663 {
1664 if let Some(repo) = self.repositories.get(repo_id) {
1665 let repository_state = repo.read(cx).repository_state.clone();
1666 cx.background_spawn(async move {
1667 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1668 state.backend.set_trusted(is_trusted);
1669 }
1670 })
1671 .detach();
1672 }
1673 }
1674 }
1675 }
1676
1677 fn on_buffer_store_event(
1678 &mut self,
1679 _: Entity<BufferStore>,
1680 event: &BufferStoreEvent,
1681 cx: &mut Context<Self>,
1682 ) {
1683 match event {
1684 BufferStoreEvent::BufferAdded(buffer) => {
1685 cx.subscribe(buffer, |this, buffer, event, cx| {
1686 if let BufferEvent::LanguageChanged(_) = event {
1687 let buffer_id = buffer.read(cx).remote_id();
1688 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1689 diff_state.update(cx, |diff_state, cx| {
1690 diff_state.buffer_language_changed(buffer, cx);
1691 });
1692 }
1693 }
1694 })
1695 .detach();
1696 }
1697 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1698 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1699 diffs.remove(buffer_id);
1700 }
1701 }
1702 BufferStoreEvent::BufferDropped(buffer_id) => {
1703 self.diffs.remove(buffer_id);
1704 for diffs in self.shared_diffs.values_mut() {
1705 diffs.remove(buffer_id);
1706 }
1707 }
1708 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1709 // Whenever a buffer's file path changes, it's possible that the
1710 // new path is actually a path that is being tracked by a git
1711 // repository. In that case, we'll want to update the buffer's
1712 // `BufferDiffState`, in case it already has one.
1713 let buffer_id = buffer.read(cx).remote_id();
1714 let diff_state = self.diffs.get(&buffer_id);
1715 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1716
1717 if let Some(diff_state) = diff_state
1718 && let Some((repo, repo_path)) = repo
1719 {
1720 let buffer = buffer.clone();
1721 let diff_state = diff_state.clone();
1722
1723 cx.spawn(async move |_git_store, cx| {
1724 async {
1725 let diff_bases_change = repo
1726 .update(cx, |repo, cx| {
1727 repo.load_committed_text(buffer_id, repo_path, cx)
1728 })
1729 .await?;
1730
1731 diff_state.update(cx, |diff_state, cx| {
1732 let buffer_snapshot = buffer.read(cx).text_snapshot();
1733 diff_state.diff_bases_changed(
1734 buffer_snapshot,
1735 Some(diff_bases_change),
1736 cx,
1737 );
1738 });
1739 anyhow::Ok(())
1740 }
1741 .await
1742 .log_err();
1743 })
1744 .detach();
1745 }
1746 }
1747 }
1748 }
1749
1750 pub fn recalculate_buffer_diffs(
1751 &mut self,
1752 buffers: Vec<Entity<Buffer>>,
1753 cx: &mut Context<Self>,
1754 ) -> impl Future<Output = ()> + use<> {
1755 let mut futures = Vec::new();
1756 for buffer in buffers {
1757 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1758 let buffer = buffer.read(cx).text_snapshot();
1759 diff_state.update(cx, |diff_state, cx| {
1760 diff_state.recalculate_diffs(buffer.clone(), cx);
1761 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1762 });
1763 futures.push(diff_state.update(cx, |diff_state, cx| {
1764 diff_state
1765 .reparse_conflict_markers(buffer, cx)
1766 .map(|_| {})
1767 .boxed()
1768 }));
1769 }
1770 }
1771 async move {
1772 futures::future::join_all(futures).await;
1773 }
1774 }
1775
1776 fn on_buffer_diff_event(
1777 &mut self,
1778 diff: Entity<buffer_diff::BufferDiff>,
1779 event: &BufferDiffEvent,
1780 cx: &mut Context<Self>,
1781 ) {
1782 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1783 let buffer_id = diff.read(cx).buffer_id;
1784 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1785 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1786 diff_state.hunk_staging_operation_count += 1;
1787 diff_state.hunk_staging_operation_count
1788 });
1789 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1790 let recv = repo.update(cx, |repo, cx| {
1791 log::debug!("hunks changed for {}", path.as_unix_str());
1792 repo.spawn_set_index_text_job(
1793 path,
1794 new_index_text.as_ref().map(|rope| rope.to_string()),
1795 Some(hunk_staging_operation_count),
1796 cx,
1797 )
1798 });
1799 let diff = diff.downgrade();
1800 cx.spawn(async move |this, cx| {
1801 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1802 diff.update(cx, |diff, cx| {
1803 diff.clear_pending_hunks(cx);
1804 })
1805 .ok();
1806 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1807 .ok();
1808 }
1809 })
1810 .detach();
1811 }
1812 }
1813 }
1814 }
1815
1816 fn local_worktree_git_repos_changed(
1817 &mut self,
1818 worktree: Entity<Worktree>,
1819 changed_repos: &UpdatedGitRepositoriesSet,
1820 cx: &mut Context<Self>,
1821 ) {
1822 log::debug!("local worktree repos changed");
1823 debug_assert!(worktree.read(cx).is_local());
1824
1825 for repository in self.repositories.values() {
1826 repository.update(cx, |repository, cx| {
1827 let repo_abs_path = &repository.work_directory_abs_path;
1828 if changed_repos.iter().any(|update| {
1829 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1830 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1831 }) {
1832 repository.reload_buffer_diff_bases(cx);
1833 }
1834 });
1835 }
1836 }
1837
1838 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1839 &self.repositories
1840 }
1841
1842 /// Returns the original (main) repository working directory for the given worktree.
1843 /// For normal checkouts this equals the worktree's own path; for linked
1844 /// worktrees it points back to the original repo.
1845 pub fn original_repo_path_for_worktree(
1846 &self,
1847 worktree_id: WorktreeId,
1848 cx: &App,
1849 ) -> Option<Arc<Path>> {
1850 self.active_repo_id
1851 .iter()
1852 .chain(self.worktree_ids.keys())
1853 .find(|repo_id| {
1854 self.worktree_ids
1855 .get(repo_id)
1856 .is_some_and(|ids| ids.contains(&worktree_id))
1857 })
1858 .and_then(|repo_id| self.repositories.get(repo_id))
1859 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1860 }
1861
1862 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1863 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1864 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1865 Some(status.status)
1866 }
1867
1868 pub fn repository_and_path_for_buffer_id(
1869 &self,
1870 buffer_id: BufferId,
1871 cx: &App,
1872 ) -> Option<(Entity<Repository>, RepoPath)> {
1873 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1874 let project_path = buffer.read(cx).project_path(cx)?;
1875 self.repository_and_path_for_project_path(&project_path, cx)
1876 }
1877
1878 pub fn repository_and_path_for_project_path(
1879 &self,
1880 path: &ProjectPath,
1881 cx: &App,
1882 ) -> Option<(Entity<Repository>, RepoPath)> {
1883 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1884 self.repositories
1885 .values()
1886 .filter_map(|repo| {
1887 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1888 Some((repo.clone(), repo_path))
1889 })
1890 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1891 }
1892
1893 pub fn git_init(
1894 &self,
1895 path: Arc<Path>,
1896 fallback_branch_name: String,
1897 cx: &App,
1898 ) -> Task<Result<()>> {
1899 match &self.state {
1900 GitStoreState::Local { fs, .. } => {
1901 let fs = fs.clone();
1902 cx.background_executor()
1903 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1904 }
1905 GitStoreState::Remote {
1906 upstream_client,
1907 upstream_project_id: project_id,
1908 ..
1909 } => {
1910 let client = upstream_client.clone();
1911 let project_id = *project_id;
1912 cx.background_executor().spawn(async move {
1913 client
1914 .request(proto::GitInit {
1915 project_id: project_id,
1916 abs_path: path.to_string_lossy().into_owned(),
1917 fallback_branch_name,
1918 })
1919 .await?;
1920 Ok(())
1921 })
1922 }
1923 }
1924 }
1925
1926 pub fn git_clone(
1927 &self,
1928 repo: String,
1929 path: impl Into<Arc<std::path::Path>>,
1930 cx: &App,
1931 ) -> Task<Result<()>> {
1932 let path = path.into();
1933 match &self.state {
1934 GitStoreState::Local { fs, .. } => {
1935 let fs = fs.clone();
1936 cx.background_executor()
1937 .spawn(async move { fs.git_clone(&repo, &path).await })
1938 }
1939 GitStoreState::Remote {
1940 upstream_client,
1941 upstream_project_id,
1942 ..
1943 } => {
1944 if upstream_client.is_via_collab() {
1945 return Task::ready(Err(anyhow!(
1946 "Git Clone isn't supported for project guests"
1947 )));
1948 }
1949 let request = upstream_client.request(proto::GitClone {
1950 project_id: *upstream_project_id,
1951 abs_path: path.to_string_lossy().into_owned(),
1952 remote_repo: repo,
1953 });
1954
1955 cx.background_spawn(async move {
1956 let result = request.await?;
1957
1958 match result.success {
1959 true => Ok(()),
1960 false => Err(anyhow!("Git Clone failed")),
1961 }
1962 })
1963 }
1964 }
1965 }
1966
1967 async fn handle_update_repository(
1968 this: Entity<Self>,
1969 envelope: TypedEnvelope<proto::UpdateRepository>,
1970 mut cx: AsyncApp,
1971 ) -> Result<()> {
1972 this.update(&mut cx, |this, cx| {
1973 let path_style = this.worktree_store.read(cx).path_style();
1974 let mut update = envelope.payload;
1975
1976 let id = RepositoryId::from_proto(update.id);
1977 let client = this.upstream_client().context("no upstream client")?;
1978
1979 let original_repo_abs_path: Option<Arc<Path>> = update
1980 .original_repo_abs_path
1981 .as_deref()
1982 .map(|p| Path::new(p).into());
1983
1984 let mut repo_subscription = None;
1985 let repo = this.repositories.entry(id).or_insert_with(|| {
1986 let git_store = cx.weak_entity();
1987 let repo = cx.new(|cx| {
1988 Repository::remote(
1989 id,
1990 Path::new(&update.abs_path).into(),
1991 original_repo_abs_path.clone(),
1992 path_style,
1993 ProjectId(update.project_id),
1994 client,
1995 git_store,
1996 cx,
1997 )
1998 });
1999 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
2000 cx.emit(GitStoreEvent::RepositoryAdded);
2001 repo
2002 });
2003 this._subscriptions.extend(repo_subscription);
2004
2005 repo.update(cx, {
2006 let update = update.clone();
2007 |repo, cx| repo.apply_remote_update(update, cx)
2008 })?;
2009
2010 this.active_repo_id.get_or_insert_with(|| {
2011 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2012 id
2013 });
2014
2015 if let Some((client, project_id)) = this.downstream_client() {
2016 update.project_id = project_id.to_proto();
2017 client.send(update).log_err();
2018 }
2019 Ok(())
2020 })
2021 }
2022
2023 async fn handle_remove_repository(
2024 this: Entity<Self>,
2025 envelope: TypedEnvelope<proto::RemoveRepository>,
2026 mut cx: AsyncApp,
2027 ) -> Result<()> {
2028 this.update(&mut cx, |this, cx| {
2029 let mut update = envelope.payload;
2030 let id = RepositoryId::from_proto(update.id);
2031 this.repositories.remove(&id);
2032 if let Some((client, project_id)) = this.downstream_client() {
2033 update.project_id = project_id.to_proto();
2034 client.send(update).log_err();
2035 }
2036 if this.active_repo_id == Some(id) {
2037 this.active_repo_id = None;
2038 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2039 }
2040 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2041 });
2042 Ok(())
2043 }
2044
2045 async fn handle_git_init(
2046 this: Entity<Self>,
2047 envelope: TypedEnvelope<proto::GitInit>,
2048 cx: AsyncApp,
2049 ) -> Result<proto::Ack> {
2050 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2051 let name = envelope.payload.fallback_branch_name;
2052 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2053 .await?;
2054
2055 Ok(proto::Ack {})
2056 }
2057
2058 async fn handle_git_clone(
2059 this: Entity<Self>,
2060 envelope: TypedEnvelope<proto::GitClone>,
2061 cx: AsyncApp,
2062 ) -> Result<proto::GitCloneResponse> {
2063 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2064 let repo_name = envelope.payload.remote_repo;
2065 let result = cx
2066 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2067 .await;
2068
2069 Ok(proto::GitCloneResponse {
2070 success: result.is_ok(),
2071 })
2072 }
2073
2074 async fn handle_fetch(
2075 this: Entity<Self>,
2076 envelope: TypedEnvelope<proto::Fetch>,
2077 mut cx: AsyncApp,
2078 ) -> Result<proto::RemoteMessageResponse> {
2079 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2080 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2081 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2082 let askpass_id = envelope.payload.askpass_id;
2083
2084 let askpass = make_remote_delegate(
2085 this,
2086 envelope.payload.project_id,
2087 repository_id,
2088 askpass_id,
2089 &mut cx,
2090 );
2091
2092 let remote_output = repository_handle
2093 .update(&mut cx, |repository_handle, cx| {
2094 repository_handle.fetch(fetch_options, askpass, cx)
2095 })
2096 .await??;
2097
2098 Ok(proto::RemoteMessageResponse {
2099 stdout: remote_output.stdout,
2100 stderr: remote_output.stderr,
2101 })
2102 }
2103
2104 async fn handle_push(
2105 this: Entity<Self>,
2106 envelope: TypedEnvelope<proto::Push>,
2107 mut cx: AsyncApp,
2108 ) -> Result<proto::RemoteMessageResponse> {
2109 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2110 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2111
2112 let askpass_id = envelope.payload.askpass_id;
2113 let askpass = make_remote_delegate(
2114 this,
2115 envelope.payload.project_id,
2116 repository_id,
2117 askpass_id,
2118 &mut cx,
2119 );
2120
2121 let options = envelope
2122 .payload
2123 .options
2124 .as_ref()
2125 .map(|_| match envelope.payload.options() {
2126 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2127 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2128 });
2129
2130 let branch_name = envelope.payload.branch_name.into();
2131 let remote_branch_name = envelope.payload.remote_branch_name.into();
2132 let remote_name = envelope.payload.remote_name.into();
2133
2134 let remote_output = repository_handle
2135 .update(&mut cx, |repository_handle, cx| {
2136 repository_handle.push(
2137 branch_name,
2138 remote_branch_name,
2139 remote_name,
2140 options,
2141 askpass,
2142 cx,
2143 )
2144 })
2145 .await??;
2146 Ok(proto::RemoteMessageResponse {
2147 stdout: remote_output.stdout,
2148 stderr: remote_output.stderr,
2149 })
2150 }
2151
2152 async fn handle_pull(
2153 this: Entity<Self>,
2154 envelope: TypedEnvelope<proto::Pull>,
2155 mut cx: AsyncApp,
2156 ) -> Result<proto::RemoteMessageResponse> {
2157 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2158 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2159 let askpass_id = envelope.payload.askpass_id;
2160 let askpass = make_remote_delegate(
2161 this,
2162 envelope.payload.project_id,
2163 repository_id,
2164 askpass_id,
2165 &mut cx,
2166 );
2167
2168 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2169 let remote_name = envelope.payload.remote_name.into();
2170 let rebase = envelope.payload.rebase;
2171
2172 let remote_message = repository_handle
2173 .update(&mut cx, |repository_handle, cx| {
2174 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2175 })
2176 .await??;
2177
2178 Ok(proto::RemoteMessageResponse {
2179 stdout: remote_message.stdout,
2180 stderr: remote_message.stderr,
2181 })
2182 }
2183
2184 async fn handle_stage(
2185 this: Entity<Self>,
2186 envelope: TypedEnvelope<proto::Stage>,
2187 mut cx: AsyncApp,
2188 ) -> Result<proto::Ack> {
2189 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2190 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2191
2192 let entries = envelope
2193 .payload
2194 .paths
2195 .into_iter()
2196 .map(|path| RepoPath::new(&path))
2197 .collect::<Result<Vec<_>>>()?;
2198
2199 repository_handle
2200 .update(&mut cx, |repository_handle, cx| {
2201 repository_handle.stage_entries(entries, cx)
2202 })
2203 .await?;
2204 Ok(proto::Ack {})
2205 }
2206
2207 async fn handle_unstage(
2208 this: Entity<Self>,
2209 envelope: TypedEnvelope<proto::Unstage>,
2210 mut cx: AsyncApp,
2211 ) -> Result<proto::Ack> {
2212 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2213 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2214
2215 let entries = envelope
2216 .payload
2217 .paths
2218 .into_iter()
2219 .map(|path| RepoPath::new(&path))
2220 .collect::<Result<Vec<_>>>()?;
2221
2222 repository_handle
2223 .update(&mut cx, |repository_handle, cx| {
2224 repository_handle.unstage_entries(entries, cx)
2225 })
2226 .await?;
2227
2228 Ok(proto::Ack {})
2229 }
2230
2231 async fn handle_stash(
2232 this: Entity<Self>,
2233 envelope: TypedEnvelope<proto::Stash>,
2234 mut cx: AsyncApp,
2235 ) -> Result<proto::Ack> {
2236 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2237 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2238
2239 let entries = envelope
2240 .payload
2241 .paths
2242 .into_iter()
2243 .map(|path| RepoPath::new(&path))
2244 .collect::<Result<Vec<_>>>()?;
2245
2246 repository_handle
2247 .update(&mut cx, |repository_handle, cx| {
2248 repository_handle.stash_entries(entries, cx)
2249 })
2250 .await?;
2251
2252 Ok(proto::Ack {})
2253 }
2254
2255 async fn handle_stash_pop(
2256 this: Entity<Self>,
2257 envelope: TypedEnvelope<proto::StashPop>,
2258 mut cx: AsyncApp,
2259 ) -> Result<proto::Ack> {
2260 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2261 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2262 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2263
2264 repository_handle
2265 .update(&mut cx, |repository_handle, cx| {
2266 repository_handle.stash_pop(stash_index, cx)
2267 })
2268 .await?;
2269
2270 Ok(proto::Ack {})
2271 }
2272
2273 async fn handle_stash_apply(
2274 this: Entity<Self>,
2275 envelope: TypedEnvelope<proto::StashApply>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::Ack> {
2278 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2279 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2280 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2281
2282 repository_handle
2283 .update(&mut cx, |repository_handle, cx| {
2284 repository_handle.stash_apply(stash_index, cx)
2285 })
2286 .await?;
2287
2288 Ok(proto::Ack {})
2289 }
2290
2291 async fn handle_stash_drop(
2292 this: Entity<Self>,
2293 envelope: TypedEnvelope<proto::StashDrop>,
2294 mut cx: AsyncApp,
2295 ) -> Result<proto::Ack> {
2296 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2297 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2298 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2299
2300 repository_handle
2301 .update(&mut cx, |repository_handle, cx| {
2302 repository_handle.stash_drop(stash_index, cx)
2303 })
2304 .await??;
2305
2306 Ok(proto::Ack {})
2307 }
2308
2309 async fn handle_set_index_text(
2310 this: Entity<Self>,
2311 envelope: TypedEnvelope<proto::SetIndexText>,
2312 mut cx: AsyncApp,
2313 ) -> Result<proto::Ack> {
2314 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2315 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2316 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2317
2318 repository_handle
2319 .update(&mut cx, |repository_handle, cx| {
2320 repository_handle.spawn_set_index_text_job(
2321 repo_path,
2322 envelope.payload.text,
2323 None,
2324 cx,
2325 )
2326 })
2327 .await??;
2328 Ok(proto::Ack {})
2329 }
2330
2331 async fn handle_run_hook(
2332 this: Entity<Self>,
2333 envelope: TypedEnvelope<proto::RunGitHook>,
2334 mut cx: AsyncApp,
2335 ) -> Result<proto::Ack> {
2336 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2337 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2338 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2339 repository_handle
2340 .update(&mut cx, |repository_handle, cx| {
2341 repository_handle.run_hook(hook, cx)
2342 })
2343 .await??;
2344 Ok(proto::Ack {})
2345 }
2346
2347 async fn handle_commit(
2348 this: Entity<Self>,
2349 envelope: TypedEnvelope<proto::Commit>,
2350 mut cx: AsyncApp,
2351 ) -> Result<proto::Ack> {
2352 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2353 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2354 let askpass_id = envelope.payload.askpass_id;
2355
2356 let askpass = make_remote_delegate(
2357 this,
2358 envelope.payload.project_id,
2359 repository_id,
2360 askpass_id,
2361 &mut cx,
2362 );
2363
2364 let message = SharedString::from(envelope.payload.message);
2365 let name = envelope.payload.name.map(SharedString::from);
2366 let email = envelope.payload.email.map(SharedString::from);
2367 let options = envelope.payload.options.unwrap_or_default();
2368
2369 repository_handle
2370 .update(&mut cx, |repository_handle, cx| {
2371 repository_handle.commit(
2372 message,
2373 name.zip(email),
2374 CommitOptions {
2375 amend: options.amend,
2376 signoff: options.signoff,
2377 allow_empty: options.allow_empty,
2378 },
2379 askpass,
2380 cx,
2381 )
2382 })
2383 .await??;
2384 Ok(proto::Ack {})
2385 }
2386
2387 async fn handle_get_remotes(
2388 this: Entity<Self>,
2389 envelope: TypedEnvelope<proto::GetRemotes>,
2390 mut cx: AsyncApp,
2391 ) -> Result<proto::GetRemotesResponse> {
2392 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2393 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2394
2395 let branch_name = envelope.payload.branch_name;
2396 let is_push = envelope.payload.is_push;
2397
2398 let remotes = repository_handle
2399 .update(&mut cx, |repository_handle, _| {
2400 repository_handle.get_remotes(branch_name, is_push)
2401 })
2402 .await??;
2403
2404 Ok(proto::GetRemotesResponse {
2405 remotes: remotes
2406 .into_iter()
2407 .map(|remotes| proto::get_remotes_response::Remote {
2408 name: remotes.name.to_string(),
2409 })
2410 .collect::<Vec<_>>(),
2411 })
2412 }
2413
2414 async fn handle_get_worktrees(
2415 this: Entity<Self>,
2416 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2417 mut cx: AsyncApp,
2418 ) -> Result<proto::GitWorktreesResponse> {
2419 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2420 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2421
2422 let worktrees = repository_handle
2423 .update(&mut cx, |repository_handle, _| {
2424 repository_handle.worktrees()
2425 })
2426 .await??;
2427
2428 Ok(proto::GitWorktreesResponse {
2429 worktrees: worktrees
2430 .into_iter()
2431 .map(|worktree| worktree_to_proto(&worktree))
2432 .collect::<Vec<_>>(),
2433 })
2434 }
2435
2436 async fn handle_create_worktree(
2437 this: Entity<Self>,
2438 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2439 mut cx: AsyncApp,
2440 ) -> Result<proto::Ack> {
2441 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2442 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2443 let directory = PathBuf::from(envelope.payload.directory);
2444 let name = envelope.payload.name;
2445 let commit = envelope.payload.commit;
2446 let use_existing_branch = envelope.payload.use_existing_branch;
2447 let target = if name.is_empty() {
2448 CreateWorktreeTarget::Detached { base_sha: commit }
2449 } else if use_existing_branch {
2450 CreateWorktreeTarget::ExistingBranch { branch_name: name }
2451 } else {
2452 CreateWorktreeTarget::NewBranch {
2453 branch_name: name,
2454 base_sha: commit,
2455 }
2456 };
2457
2458 repository_handle
2459 .update(&mut cx, |repository_handle, _| {
2460 repository_handle.create_worktree(target, directory)
2461 })
2462 .await??;
2463
2464 Ok(proto::Ack {})
2465 }
2466
2467 async fn handle_remove_worktree(
2468 this: Entity<Self>,
2469 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2470 mut cx: AsyncApp,
2471 ) -> Result<proto::Ack> {
2472 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2473 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2474 let path = PathBuf::from(envelope.payload.path);
2475 let force = envelope.payload.force;
2476
2477 repository_handle
2478 .update(&mut cx, |repository_handle, _| {
2479 repository_handle.remove_worktree(path, force)
2480 })
2481 .await??;
2482
2483 Ok(proto::Ack {})
2484 }
2485
2486 async fn handle_rename_worktree(
2487 this: Entity<Self>,
2488 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2489 mut cx: AsyncApp,
2490 ) -> Result<proto::Ack> {
2491 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2492 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2493 let old_path = PathBuf::from(envelope.payload.old_path);
2494 let new_path = PathBuf::from(envelope.payload.new_path);
2495
2496 repository_handle
2497 .update(&mut cx, |repository_handle, _| {
2498 repository_handle.rename_worktree(old_path, new_path)
2499 })
2500 .await??;
2501
2502 Ok(proto::Ack {})
2503 }
2504
2505 async fn handle_get_head_sha(
2506 this: Entity<Self>,
2507 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2508 mut cx: AsyncApp,
2509 ) -> Result<proto::GitGetHeadShaResponse> {
2510 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2511 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2512
2513 let head_sha = repository_handle
2514 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2515 .await??;
2516
2517 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2518 }
2519
2520 async fn handle_get_branches(
2521 this: Entity<Self>,
2522 envelope: TypedEnvelope<proto::GitGetBranches>,
2523 mut cx: AsyncApp,
2524 ) -> Result<proto::GitBranchesResponse> {
2525 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2526 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2527
2528 let branches = repository_handle
2529 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2530 .await??;
2531
2532 Ok(proto::GitBranchesResponse {
2533 branches: branches
2534 .into_iter()
2535 .map(|branch| branch_to_proto(&branch))
2536 .collect::<Vec<_>>(),
2537 })
2538 }
2539 async fn handle_get_default_branch(
2540 this: Entity<Self>,
2541 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2542 mut cx: AsyncApp,
2543 ) -> Result<proto::GetDefaultBranchResponse> {
2544 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2545 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2546
2547 let branch = repository_handle
2548 .update(&mut cx, |repository_handle, _| {
2549 repository_handle.default_branch(false)
2550 })
2551 .await??
2552 .map(Into::into);
2553
2554 Ok(proto::GetDefaultBranchResponse { branch })
2555 }
2556 async fn handle_create_branch(
2557 this: Entity<Self>,
2558 envelope: TypedEnvelope<proto::GitCreateBranch>,
2559 mut cx: AsyncApp,
2560 ) -> Result<proto::Ack> {
2561 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2562 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2563 let branch_name = envelope.payload.branch_name;
2564
2565 repository_handle
2566 .update(&mut cx, |repository_handle, _| {
2567 repository_handle.create_branch(branch_name, None)
2568 })
2569 .await??;
2570
2571 Ok(proto::Ack {})
2572 }
2573
2574 async fn handle_change_branch(
2575 this: Entity<Self>,
2576 envelope: TypedEnvelope<proto::GitChangeBranch>,
2577 mut cx: AsyncApp,
2578 ) -> Result<proto::Ack> {
2579 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2580 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2581 let branch_name = envelope.payload.branch_name;
2582
2583 repository_handle
2584 .update(&mut cx, |repository_handle, _| {
2585 repository_handle.change_branch(branch_name)
2586 })
2587 .await??;
2588
2589 Ok(proto::Ack {})
2590 }
2591
2592 async fn handle_rename_branch(
2593 this: Entity<Self>,
2594 envelope: TypedEnvelope<proto::GitRenameBranch>,
2595 mut cx: AsyncApp,
2596 ) -> Result<proto::Ack> {
2597 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2598 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2599 let branch = envelope.payload.branch;
2600 let new_name = envelope.payload.new_name;
2601
2602 repository_handle
2603 .update(&mut cx, |repository_handle, _| {
2604 repository_handle.rename_branch(branch, new_name)
2605 })
2606 .await??;
2607
2608 Ok(proto::Ack {})
2609 }
2610
2611 async fn handle_create_remote(
2612 this: Entity<Self>,
2613 envelope: TypedEnvelope<proto::GitCreateRemote>,
2614 mut cx: AsyncApp,
2615 ) -> Result<proto::Ack> {
2616 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2617 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2618 let remote_name = envelope.payload.remote_name;
2619 let remote_url = envelope.payload.remote_url;
2620
2621 repository_handle
2622 .update(&mut cx, |repository_handle, _| {
2623 repository_handle.create_remote(remote_name, remote_url)
2624 })
2625 .await??;
2626
2627 Ok(proto::Ack {})
2628 }
2629
2630 async fn handle_delete_branch(
2631 this: Entity<Self>,
2632 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2633 mut cx: AsyncApp,
2634 ) -> Result<proto::Ack> {
2635 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2636 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2637 let is_remote = envelope.payload.is_remote;
2638 let branch_name = envelope.payload.branch_name;
2639
2640 repository_handle
2641 .update(&mut cx, |repository_handle, _| {
2642 repository_handle.delete_branch(is_remote, branch_name)
2643 })
2644 .await??;
2645
2646 Ok(proto::Ack {})
2647 }
2648
2649 async fn handle_remove_remote(
2650 this: Entity<Self>,
2651 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2652 mut cx: AsyncApp,
2653 ) -> Result<proto::Ack> {
2654 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2655 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2656 let remote_name = envelope.payload.remote_name;
2657
2658 repository_handle
2659 .update(&mut cx, |repository_handle, _| {
2660 repository_handle.remove_remote(remote_name)
2661 })
2662 .await??;
2663
2664 Ok(proto::Ack {})
2665 }
2666
2667 async fn handle_show(
2668 this: Entity<Self>,
2669 envelope: TypedEnvelope<proto::GitShow>,
2670 mut cx: AsyncApp,
2671 ) -> Result<proto::GitCommitDetails> {
2672 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2673 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2674
2675 let commit = repository_handle
2676 .update(&mut cx, |repository_handle, _| {
2677 repository_handle.show(envelope.payload.commit)
2678 })
2679 .await??;
2680 Ok(proto::GitCommitDetails {
2681 sha: commit.sha.into(),
2682 message: commit.message.into(),
2683 commit_timestamp: commit.commit_timestamp,
2684 author_email: commit.author_email.into(),
2685 author_name: commit.author_name.into(),
2686 })
2687 }
2688
2689 async fn handle_create_checkpoint(
2690 this: Entity<Self>,
2691 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2692 mut cx: AsyncApp,
2693 ) -> Result<proto::GitCreateCheckpointResponse> {
2694 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2695 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2696
2697 let checkpoint = repository_handle
2698 .update(&mut cx, |repository, _| repository.checkpoint())
2699 .await??;
2700
2701 Ok(proto::GitCreateCheckpointResponse {
2702 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2703 })
2704 }
2705
2706 async fn handle_restore_checkpoint(
2707 this: Entity<Self>,
2708 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2709 mut cx: AsyncApp,
2710 ) -> Result<proto::Ack> {
2711 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2712 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2713
2714 let checkpoint = GitRepositoryCheckpoint {
2715 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2716 };
2717
2718 repository_handle
2719 .update(&mut cx, |repository, _| {
2720 repository.restore_checkpoint(checkpoint)
2721 })
2722 .await??;
2723
2724 Ok(proto::Ack {})
2725 }
2726
2727 async fn handle_compare_checkpoints(
2728 this: Entity<Self>,
2729 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2730 mut cx: AsyncApp,
2731 ) -> Result<proto::GitCompareCheckpointsResponse> {
2732 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2733 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2734
2735 let left = GitRepositoryCheckpoint {
2736 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2737 };
2738 let right = GitRepositoryCheckpoint {
2739 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2740 };
2741
2742 let equal = repository_handle
2743 .update(&mut cx, |repository, _| {
2744 repository.compare_checkpoints(left, right)
2745 })
2746 .await??;
2747
2748 Ok(proto::GitCompareCheckpointsResponse { equal })
2749 }
2750
2751 async fn handle_diff_checkpoints(
2752 this: Entity<Self>,
2753 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2754 mut cx: AsyncApp,
2755 ) -> Result<proto::GitDiffCheckpointsResponse> {
2756 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2757 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2758
2759 let base = GitRepositoryCheckpoint {
2760 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2761 };
2762 let target = GitRepositoryCheckpoint {
2763 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2764 };
2765
2766 let diff = repository_handle
2767 .update(&mut cx, |repository, _| {
2768 repository.diff_checkpoints(base, target)
2769 })
2770 .await??;
2771
2772 Ok(proto::GitDiffCheckpointsResponse { diff })
2773 }
2774
2775 async fn handle_load_commit_diff(
2776 this: Entity<Self>,
2777 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2778 mut cx: AsyncApp,
2779 ) -> Result<proto::LoadCommitDiffResponse> {
2780 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2781 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2782
2783 let commit_diff = repository_handle
2784 .update(&mut cx, |repository_handle, _| {
2785 repository_handle.load_commit_diff(envelope.payload.commit)
2786 })
2787 .await??;
2788 Ok(proto::LoadCommitDiffResponse {
2789 files: commit_diff
2790 .files
2791 .into_iter()
2792 .map(|file| proto::CommitFile {
2793 path: file.path.to_proto(),
2794 old_text: file.old_text,
2795 new_text: file.new_text,
2796 is_binary: file.is_binary,
2797 })
2798 .collect(),
2799 })
2800 }
2801
2802 async fn handle_file_history(
2803 this: Entity<Self>,
2804 envelope: TypedEnvelope<proto::GitFileHistory>,
2805 mut cx: AsyncApp,
2806 ) -> Result<proto::GitFileHistoryResponse> {
2807 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2808 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2809 let path = RepoPath::from_proto(&envelope.payload.path)?;
2810 let skip = envelope.payload.skip as usize;
2811 let limit = envelope.payload.limit.map(|l| l as usize);
2812
2813 let file_history = repository_handle
2814 .update(&mut cx, |repository_handle, _| {
2815 repository_handle.file_history_paginated(path, skip, limit)
2816 })
2817 .await??;
2818
2819 Ok(proto::GitFileHistoryResponse {
2820 entries: file_history
2821 .entries
2822 .into_iter()
2823 .map(|entry| proto::FileHistoryEntry {
2824 sha: entry.sha.to_string(),
2825 subject: entry.subject.to_string(),
2826 message: entry.message.to_string(),
2827 commit_timestamp: entry.commit_timestamp,
2828 author_name: entry.author_name.to_string(),
2829 author_email: entry.author_email.to_string(),
2830 })
2831 .collect(),
2832 path: file_history.path.to_proto(),
2833 })
2834 }
2835
2836 async fn handle_reset(
2837 this: Entity<Self>,
2838 envelope: TypedEnvelope<proto::GitReset>,
2839 mut cx: AsyncApp,
2840 ) -> Result<proto::Ack> {
2841 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2842 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2843
2844 let mode = match envelope.payload.mode() {
2845 git_reset::ResetMode::Soft => ResetMode::Soft,
2846 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2847 };
2848
2849 repository_handle
2850 .update(&mut cx, |repository_handle, cx| {
2851 repository_handle.reset(envelope.payload.commit, mode, cx)
2852 })
2853 .await??;
2854 Ok(proto::Ack {})
2855 }
2856
2857 async fn handle_checkout_files(
2858 this: Entity<Self>,
2859 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2860 mut cx: AsyncApp,
2861 ) -> Result<proto::Ack> {
2862 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2863 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2864 let paths = envelope
2865 .payload
2866 .paths
2867 .iter()
2868 .map(|s| RepoPath::from_proto(s))
2869 .collect::<Result<Vec<_>>>()?;
2870
2871 repository_handle
2872 .update(&mut cx, |repository_handle, cx| {
2873 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2874 })
2875 .await?;
2876 Ok(proto::Ack {})
2877 }
2878
2879 async fn handle_open_commit_message_buffer(
2880 this: Entity<Self>,
2881 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2882 mut cx: AsyncApp,
2883 ) -> Result<proto::OpenBufferResponse> {
2884 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2885 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2886 let buffer = repository
2887 .update(&mut cx, |repository, cx| {
2888 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2889 })
2890 .await?;
2891
2892 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2893 this.update(&mut cx, |this, cx| {
2894 this.buffer_store.update(cx, |buffer_store, cx| {
2895 buffer_store
2896 .create_buffer_for_peer(
2897 &buffer,
2898 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2899 cx,
2900 )
2901 .detach_and_log_err(cx);
2902 })
2903 });
2904
2905 Ok(proto::OpenBufferResponse {
2906 buffer_id: buffer_id.to_proto(),
2907 })
2908 }
2909
2910 async fn handle_askpass(
2911 this: Entity<Self>,
2912 envelope: TypedEnvelope<proto::AskPassRequest>,
2913 mut cx: AsyncApp,
2914 ) -> Result<proto::AskPassResponse> {
2915 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2916 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2917
2918 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2919 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2920 debug_panic!("no askpass found");
2921 anyhow::bail!("no askpass found");
2922 };
2923
2924 let response = askpass
2925 .ask_password(envelope.payload.prompt)
2926 .await
2927 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2928
2929 delegates
2930 .lock()
2931 .insert(envelope.payload.askpass_id, askpass);
2932
2933 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2934 Ok(proto::AskPassResponse {
2935 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2936 })
2937 }
2938
2939 async fn handle_check_for_pushed_commits(
2940 this: Entity<Self>,
2941 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2942 mut cx: AsyncApp,
2943 ) -> Result<proto::CheckForPushedCommitsResponse> {
2944 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2945 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2946
2947 let branches = repository_handle
2948 .update(&mut cx, |repository_handle, _| {
2949 repository_handle.check_for_pushed_commits()
2950 })
2951 .await??;
2952 Ok(proto::CheckForPushedCommitsResponse {
2953 pushed_to: branches
2954 .into_iter()
2955 .map(|commit| commit.to_string())
2956 .collect(),
2957 })
2958 }
2959
2960 async fn handle_git_diff(
2961 this: Entity<Self>,
2962 envelope: TypedEnvelope<proto::GitDiff>,
2963 mut cx: AsyncApp,
2964 ) -> Result<proto::GitDiffResponse> {
2965 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2966 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2967 let diff_type = match envelope.payload.diff_type() {
2968 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2969 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2970 proto::git_diff::DiffType::MergeBase => {
2971 let base_ref = envelope
2972 .payload
2973 .merge_base_ref
2974 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2975 DiffType::MergeBase {
2976 base_ref: base_ref.into(),
2977 }
2978 }
2979 };
2980
2981 let mut diff = repository_handle
2982 .update(&mut cx, |repository_handle, cx| {
2983 repository_handle.diff(diff_type, cx)
2984 })
2985 .await??;
2986 const ONE_MB: usize = 1_000_000;
2987 if diff.len() > ONE_MB {
2988 diff = diff.chars().take(ONE_MB).collect()
2989 }
2990
2991 Ok(proto::GitDiffResponse { diff })
2992 }
2993
2994 async fn handle_tree_diff(
2995 this: Entity<Self>,
2996 request: TypedEnvelope<proto::GetTreeDiff>,
2997 mut cx: AsyncApp,
2998 ) -> Result<proto::GetTreeDiffResponse> {
2999 let repository_id = RepositoryId(request.payload.repository_id);
3000 let diff_type = if request.payload.is_merge {
3001 DiffTreeType::MergeBase {
3002 base: request.payload.base.into(),
3003 head: request.payload.head.into(),
3004 }
3005 } else {
3006 DiffTreeType::Since {
3007 base: request.payload.base.into(),
3008 head: request.payload.head.into(),
3009 }
3010 };
3011
3012 let diff = this
3013 .update(&mut cx, |this, cx| {
3014 let repository = this.repositories().get(&repository_id)?;
3015 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
3016 })
3017 .context("missing repository")?
3018 .await??;
3019
3020 Ok(proto::GetTreeDiffResponse {
3021 entries: diff
3022 .entries
3023 .into_iter()
3024 .map(|(path, status)| proto::TreeDiffStatus {
3025 path: path.as_ref().to_proto(),
3026 status: match status {
3027 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
3028 TreeDiffStatus::Modified { .. } => {
3029 proto::tree_diff_status::Status::Modified.into()
3030 }
3031 TreeDiffStatus::Deleted { .. } => {
3032 proto::tree_diff_status::Status::Deleted.into()
3033 }
3034 },
3035 oid: match status {
3036 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3037 Some(old.to_string())
3038 }
3039 TreeDiffStatus::Added => None,
3040 },
3041 })
3042 .collect(),
3043 })
3044 }
3045
3046 async fn handle_get_blob_content(
3047 this: Entity<Self>,
3048 request: TypedEnvelope<proto::GetBlobContent>,
3049 mut cx: AsyncApp,
3050 ) -> Result<proto::GetBlobContentResponse> {
3051 let oid = git::Oid::from_str(&request.payload.oid)?;
3052 let repository_id = RepositoryId(request.payload.repository_id);
3053 let content = this
3054 .update(&mut cx, |this, cx| {
3055 let repository = this.repositories().get(&repository_id)?;
3056 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3057 })
3058 .context("missing repository")?
3059 .await?;
3060 Ok(proto::GetBlobContentResponse { content })
3061 }
3062
3063 async fn handle_open_unstaged_diff(
3064 this: Entity<Self>,
3065 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3066 mut cx: AsyncApp,
3067 ) -> Result<proto::OpenUnstagedDiffResponse> {
3068 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3069 let diff = this
3070 .update(&mut cx, |this, cx| {
3071 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3072 Some(this.open_unstaged_diff(buffer, cx))
3073 })
3074 .context("missing buffer")?
3075 .await?;
3076 this.update(&mut cx, |this, _| {
3077 let shared_diffs = this
3078 .shared_diffs
3079 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3080 .or_default();
3081 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3082 });
3083 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3084 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3085 }
3086
3087 async fn handle_open_uncommitted_diff(
3088 this: Entity<Self>,
3089 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3090 mut cx: AsyncApp,
3091 ) -> Result<proto::OpenUncommittedDiffResponse> {
3092 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3093 let diff = this
3094 .update(&mut cx, |this, cx| {
3095 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3096 Some(this.open_uncommitted_diff(buffer, cx))
3097 })
3098 .context("missing buffer")?
3099 .await?;
3100 this.update(&mut cx, |this, _| {
3101 let shared_diffs = this
3102 .shared_diffs
3103 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3104 .or_default();
3105 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3106 });
3107 Ok(diff.read_with(&cx, |diff, cx| {
3108 use proto::open_uncommitted_diff_response::Mode;
3109
3110 let unstaged_diff = diff.secondary_diff();
3111 let index_snapshot = unstaged_diff.and_then(|diff| {
3112 let diff = diff.read(cx);
3113 diff.base_text_exists().then(|| diff.base_text(cx))
3114 });
3115
3116 let mode;
3117 let staged_text;
3118 let committed_text;
3119 if diff.base_text_exists() {
3120 let committed_snapshot = diff.base_text(cx);
3121 committed_text = Some(committed_snapshot.text());
3122 if let Some(index_text) = index_snapshot {
3123 if index_text.remote_id() == committed_snapshot.remote_id() {
3124 mode = Mode::IndexMatchesHead;
3125 staged_text = None;
3126 } else {
3127 mode = Mode::IndexAndHead;
3128 staged_text = Some(index_text.text());
3129 }
3130 } else {
3131 mode = Mode::IndexAndHead;
3132 staged_text = None;
3133 }
3134 } else {
3135 mode = Mode::IndexAndHead;
3136 committed_text = None;
3137 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3138 }
3139
3140 proto::OpenUncommittedDiffResponse {
3141 committed_text,
3142 staged_text,
3143 mode: mode.into(),
3144 }
3145 }))
3146 }
3147
3148 async fn handle_update_diff_bases(
3149 this: Entity<Self>,
3150 request: TypedEnvelope<proto::UpdateDiffBases>,
3151 mut cx: AsyncApp,
3152 ) -> Result<()> {
3153 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3154 this.update(&mut cx, |this, cx| {
3155 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3156 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3157 {
3158 let buffer = buffer.read(cx).text_snapshot();
3159 diff_state.update(cx, |diff_state, cx| {
3160 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3161 })
3162 }
3163 });
3164 Ok(())
3165 }
3166
3167 async fn handle_blame_buffer(
3168 this: Entity<Self>,
3169 envelope: TypedEnvelope<proto::BlameBuffer>,
3170 mut cx: AsyncApp,
3171 ) -> Result<proto::BlameBufferResponse> {
3172 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3173 let version = deserialize_version(&envelope.payload.version);
3174 let buffer = this.read_with(&cx, |this, cx| {
3175 this.buffer_store.read(cx).get_existing(buffer_id)
3176 })?;
3177 buffer
3178 .update(&mut cx, |buffer, _| {
3179 buffer.wait_for_version(version.clone())
3180 })
3181 .await?;
3182 let blame = this
3183 .update(&mut cx, |this, cx| {
3184 this.blame_buffer(&buffer, Some(version), cx)
3185 })
3186 .await?;
3187 Ok(serialize_blame_buffer_response(blame))
3188 }
3189
3190 async fn handle_get_permalink_to_line(
3191 this: Entity<Self>,
3192 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3193 mut cx: AsyncApp,
3194 ) -> Result<proto::GetPermalinkToLineResponse> {
3195 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3196 // let version = deserialize_version(&envelope.payload.version);
3197 let selection = {
3198 let proto_selection = envelope
3199 .payload
3200 .selection
3201 .context("no selection to get permalink for defined")?;
3202 proto_selection.start as u32..proto_selection.end as u32
3203 };
3204 let buffer = this.read_with(&cx, |this, cx| {
3205 this.buffer_store.read(cx).get_existing(buffer_id)
3206 })?;
3207 let permalink = this
3208 .update(&mut cx, |this, cx| {
3209 this.get_permalink_to_line(&buffer, selection, cx)
3210 })
3211 .await?;
3212 Ok(proto::GetPermalinkToLineResponse {
3213 permalink: permalink.to_string(),
3214 })
3215 }
3216
3217 fn repository_for_request(
3218 this: &Entity<Self>,
3219 id: RepositoryId,
3220 cx: &mut AsyncApp,
3221 ) -> Result<Entity<Repository>> {
3222 this.read_with(cx, |this, _| {
3223 this.repositories
3224 .get(&id)
3225 .context("missing repository handle")
3226 .cloned()
3227 })
3228 }
3229
3230 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3231 self.repositories
3232 .iter()
3233 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3234 .collect()
3235 }
3236
3237 fn process_updated_entries(
3238 &self,
3239 worktree: &Entity<Worktree>,
3240 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3241 cx: &mut App,
3242 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3243 let path_style = worktree.read(cx).path_style();
3244 let mut repo_paths = self
3245 .repositories
3246 .values()
3247 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3248 .collect::<Vec<_>>();
3249 let mut entries: Vec<_> = updated_entries
3250 .iter()
3251 .map(|(path, _, _)| path.clone())
3252 .collect();
3253 entries.sort();
3254 let worktree = worktree.read(cx);
3255
3256 let entries = entries
3257 .into_iter()
3258 .map(|path| worktree.absolutize(&path))
3259 .collect::<Arc<[_]>>();
3260
3261 let executor = cx.background_executor().clone();
3262 cx.background_executor().spawn(async move {
3263 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3264 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3265 let mut tasks = FuturesOrdered::new();
3266 for (repo_path, repo) in repo_paths.into_iter().rev() {
3267 let entries = entries.clone();
3268 let task = executor.spawn(async move {
3269 // Find all repository paths that belong to this repo
3270 let mut ix = entries.partition_point(|path| path < &*repo_path);
3271 if ix == entries.len() {
3272 return None;
3273 };
3274
3275 let mut paths = Vec::new();
3276 // All paths prefixed by a given repo will constitute a continuous range.
3277 while let Some(path) = entries.get(ix)
3278 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3279 &repo_path, path, path_style,
3280 )
3281 {
3282 paths.push((repo_path, ix));
3283 ix += 1;
3284 }
3285 if paths.is_empty() {
3286 None
3287 } else {
3288 Some((repo, paths))
3289 }
3290 });
3291 tasks.push_back(task);
3292 }
3293
3294 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3295 let mut path_was_used = vec![false; entries.len()];
3296 let tasks = tasks.collect::<Vec<_>>().await;
3297 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3298 // We always want to assign a path to it's innermost repository.
3299 for t in tasks {
3300 let Some((repo, paths)) = t else {
3301 continue;
3302 };
3303 let entry = paths_by_git_repo.entry(repo).or_default();
3304 for (repo_path, ix) in paths {
3305 if path_was_used[ix] {
3306 continue;
3307 }
3308 path_was_used[ix] = true;
3309 entry.push(repo_path);
3310 }
3311 }
3312
3313 paths_by_git_repo
3314 })
3315 }
3316}
3317
3318impl BufferGitState {
3319 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3320 Self {
3321 unstaged_diff: Default::default(),
3322 uncommitted_diff: Default::default(),
3323 oid_diffs: Default::default(),
3324 recalculate_diff_task: Default::default(),
3325 language: Default::default(),
3326 language_registry: Default::default(),
3327 recalculating_tx: postage::watch::channel_with(false).0,
3328 hunk_staging_operation_count: 0,
3329 hunk_staging_operation_count_as_of_write: 0,
3330 head_text: Default::default(),
3331 index_text: Default::default(),
3332 oid_texts: Default::default(),
3333 head_changed: Default::default(),
3334 index_changed: Default::default(),
3335 language_changed: Default::default(),
3336 conflict_updated_futures: Default::default(),
3337 conflict_set: Default::default(),
3338 reparse_conflict_markers_task: Default::default(),
3339 }
3340 }
3341
3342 #[ztracing::instrument(skip_all)]
3343 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3344 self.language = buffer.read(cx).language().cloned();
3345 self.language_changed = true;
3346 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3347 }
3348
3349 fn reparse_conflict_markers(
3350 &mut self,
3351 buffer: text::BufferSnapshot,
3352 cx: &mut Context<Self>,
3353 ) -> oneshot::Receiver<()> {
3354 let (tx, rx) = oneshot::channel();
3355
3356 let Some(conflict_set) = self
3357 .conflict_set
3358 .as_ref()
3359 .and_then(|conflict_set| conflict_set.upgrade())
3360 else {
3361 return rx;
3362 };
3363
3364 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3365 if conflict_set.has_conflict {
3366 Some(conflict_set.snapshot())
3367 } else {
3368 None
3369 }
3370 });
3371
3372 if let Some(old_snapshot) = old_snapshot {
3373 self.conflict_updated_futures.push(tx);
3374 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3375 let (snapshot, changed_range) = cx
3376 .background_spawn(async move {
3377 let new_snapshot = ConflictSet::parse(&buffer);
3378 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3379 (new_snapshot, changed_range)
3380 })
3381 .await;
3382 this.update(cx, |this, cx| {
3383 if let Some(conflict_set) = &this.conflict_set {
3384 conflict_set
3385 .update(cx, |conflict_set, cx| {
3386 conflict_set.set_snapshot(snapshot, changed_range, cx);
3387 })
3388 .ok();
3389 }
3390 let futures = std::mem::take(&mut this.conflict_updated_futures);
3391 for tx in futures {
3392 tx.send(()).ok();
3393 }
3394 })
3395 }))
3396 }
3397
3398 rx
3399 }
3400
3401 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3402 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3403 }
3404
3405 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3406 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3407 }
3408
3409 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3410 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3411 }
3412
3413 fn handle_base_texts_updated(
3414 &mut self,
3415 buffer: text::BufferSnapshot,
3416 message: proto::UpdateDiffBases,
3417 cx: &mut Context<Self>,
3418 ) {
3419 use proto::update_diff_bases::Mode;
3420
3421 let Some(mode) = Mode::from_i32(message.mode) else {
3422 return;
3423 };
3424
3425 let diff_bases_change = match mode {
3426 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3427 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3428 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3429 Mode::IndexAndHead => DiffBasesChange::SetEach {
3430 index: message.staged_text,
3431 head: message.committed_text,
3432 },
3433 };
3434
3435 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3436 }
3437
3438 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3439 if *self.recalculating_tx.borrow() {
3440 let mut rx = self.recalculating_tx.subscribe();
3441 Some(async move {
3442 loop {
3443 let is_recalculating = rx.recv().await;
3444 if is_recalculating != Some(true) {
3445 break;
3446 }
3447 }
3448 })
3449 } else {
3450 None
3451 }
3452 }
3453
3454 fn diff_bases_changed(
3455 &mut self,
3456 buffer: text::BufferSnapshot,
3457 diff_bases_change: Option<DiffBasesChange>,
3458 cx: &mut Context<Self>,
3459 ) {
3460 match diff_bases_change {
3461 Some(DiffBasesChange::SetIndex(index)) => {
3462 self.index_text = index.map(|mut index| {
3463 text::LineEnding::normalize(&mut index);
3464 Arc::from(index.as_str())
3465 });
3466 self.index_changed = true;
3467 }
3468 Some(DiffBasesChange::SetHead(head)) => {
3469 self.head_text = head.map(|mut head| {
3470 text::LineEnding::normalize(&mut head);
3471 Arc::from(head.as_str())
3472 });
3473 self.head_changed = true;
3474 }
3475 Some(DiffBasesChange::SetBoth(text)) => {
3476 let text = text.map(|mut text| {
3477 text::LineEnding::normalize(&mut text);
3478 Arc::from(text.as_str())
3479 });
3480 self.head_text = text.clone();
3481 self.index_text = text;
3482 self.head_changed = true;
3483 self.index_changed = true;
3484 }
3485 Some(DiffBasesChange::SetEach { index, head }) => {
3486 self.index_text = index.map(|mut index| {
3487 text::LineEnding::normalize(&mut index);
3488 Arc::from(index.as_str())
3489 });
3490 self.index_changed = true;
3491 self.head_text = head.map(|mut head| {
3492 text::LineEnding::normalize(&mut head);
3493 Arc::from(head.as_str())
3494 });
3495 self.head_changed = true;
3496 }
3497 None => {}
3498 }
3499
3500 self.recalculate_diffs(buffer, cx)
3501 }
3502
3503 #[ztracing::instrument(skip_all)]
3504 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3505 *self.recalculating_tx.borrow_mut() = true;
3506
3507 let language = self.language.clone();
3508 let language_registry = self.language_registry.clone();
3509 let unstaged_diff = self.unstaged_diff();
3510 let uncommitted_diff = self.uncommitted_diff();
3511 let head = self.head_text.clone();
3512 let index = self.index_text.clone();
3513 let index_changed = self.index_changed;
3514 let head_changed = self.head_changed;
3515 let language_changed = self.language_changed;
3516 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3517 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3518 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3519 (None, None) => true,
3520 _ => false,
3521 };
3522
3523 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3524 .oid_diffs
3525 .iter()
3526 .filter_map(|(oid, weak)| {
3527 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3528 weak.upgrade().map(|diff| (*oid, diff, base_text))
3529 })
3530 .collect();
3531
3532 self.oid_diffs.retain(|oid, weak| {
3533 let alive = weak.upgrade().is_some();
3534 if !alive {
3535 if let Some(oid) = oid {
3536 self.oid_texts.remove(oid);
3537 }
3538 }
3539 alive
3540 });
3541 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3542 log::debug!(
3543 "start recalculating diffs for buffer {}",
3544 buffer.remote_id()
3545 );
3546
3547 let mut new_unstaged_diff = None;
3548 if let Some(unstaged_diff) = &unstaged_diff {
3549 new_unstaged_diff = Some(
3550 cx.update(|cx| {
3551 unstaged_diff.read(cx).update_diff(
3552 buffer.clone(),
3553 index,
3554 index_changed.then_some(false),
3555 language.clone(),
3556 cx,
3557 )
3558 })
3559 .await,
3560 );
3561 }
3562
3563 // Dropping BufferDiff can be expensive, so yield back to the event loop
3564 // for a bit
3565 yield_now().await;
3566
3567 let mut new_uncommitted_diff = None;
3568 if let Some(uncommitted_diff) = &uncommitted_diff {
3569 new_uncommitted_diff = if index_matches_head {
3570 new_unstaged_diff.clone()
3571 } else {
3572 Some(
3573 cx.update(|cx| {
3574 uncommitted_diff.read(cx).update_diff(
3575 buffer.clone(),
3576 head,
3577 head_changed.then_some(true),
3578 language.clone(),
3579 cx,
3580 )
3581 })
3582 .await,
3583 )
3584 }
3585 }
3586
3587 // Dropping BufferDiff can be expensive, so yield back to the event loop
3588 // for a bit
3589 yield_now().await;
3590
3591 let cancel = this.update(cx, |this, _| {
3592 // This checks whether all pending stage/unstage operations
3593 // have quiesced (i.e. both the corresponding write and the
3594 // read of that write have completed). If not, then we cancel
3595 // this recalculation attempt to avoid invalidating pending
3596 // state too quickly; another recalculation will come along
3597 // later and clear the pending state once the state of the index has settled.
3598 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3599 *this.recalculating_tx.borrow_mut() = false;
3600 true
3601 } else {
3602 false
3603 }
3604 })?;
3605 if cancel {
3606 log::debug!(
3607 concat!(
3608 "aborting recalculating diffs for buffer {}",
3609 "due to subsequent hunk operations",
3610 ),
3611 buffer.remote_id()
3612 );
3613 return Ok(());
3614 }
3615
3616 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3617 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3618 {
3619 let task = unstaged_diff.update(cx, |diff, cx| {
3620 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3621 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3622 // view multibuffers.
3623 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3624 });
3625 Some(task.await)
3626 } else {
3627 None
3628 };
3629
3630 yield_now().await;
3631
3632 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3633 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3634 {
3635 uncommitted_diff
3636 .update(cx, |diff, cx| {
3637 if language_changed {
3638 diff.language_changed(language.clone(), language_registry.clone(), cx);
3639 }
3640 diff.set_snapshot_with_secondary(
3641 new_uncommitted_diff,
3642 &buffer,
3643 unstaged_changed_range.flatten(),
3644 true,
3645 cx,
3646 )
3647 })
3648 .await;
3649 }
3650
3651 yield_now().await;
3652
3653 for (oid, oid_diff, base_text) in oid_diffs {
3654 let new_oid_diff = cx
3655 .update(|cx| {
3656 oid_diff.read(cx).update_diff(
3657 buffer.clone(),
3658 base_text,
3659 None,
3660 language.clone(),
3661 cx,
3662 )
3663 })
3664 .await;
3665
3666 oid_diff
3667 .update(cx, |diff, cx| {
3668 if language_changed {
3669 diff.language_changed(language.clone(), language_registry.clone(), cx);
3670 }
3671 diff.set_snapshot(new_oid_diff, &buffer, cx)
3672 })
3673 .await;
3674
3675 log::debug!(
3676 "finished recalculating oid diff for buffer {} oid {:?}",
3677 buffer.remote_id(),
3678 oid
3679 );
3680
3681 yield_now().await;
3682 }
3683
3684 log::debug!(
3685 "finished recalculating diffs for buffer {}",
3686 buffer.remote_id()
3687 );
3688
3689 if let Some(this) = this.upgrade() {
3690 this.update(cx, |this, _| {
3691 this.index_changed = false;
3692 this.head_changed = false;
3693 this.language_changed = false;
3694 *this.recalculating_tx.borrow_mut() = false;
3695 });
3696 }
3697
3698 Ok(())
3699 }));
3700 }
3701}
3702
3703fn make_remote_delegate(
3704 this: Entity<GitStore>,
3705 project_id: u64,
3706 repository_id: RepositoryId,
3707 askpass_id: u64,
3708 cx: &mut AsyncApp,
3709) -> AskPassDelegate {
3710 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3711 this.update(cx, |this, cx| {
3712 let Some((client, _)) = this.downstream_client() else {
3713 return;
3714 };
3715 let response = client.request(proto::AskPassRequest {
3716 project_id,
3717 repository_id: repository_id.to_proto(),
3718 askpass_id,
3719 prompt,
3720 });
3721 cx.spawn(async move |_, _| {
3722 let mut response = response.await?.response;
3723 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3724 .ok();
3725 response.zeroize();
3726 anyhow::Ok(())
3727 })
3728 .detach_and_log_err(cx);
3729 });
3730 })
3731}
3732
3733impl RepositoryId {
3734 pub fn to_proto(self) -> u64 {
3735 self.0
3736 }
3737
3738 pub fn from_proto(id: u64) -> Self {
3739 RepositoryId(id)
3740 }
3741}
3742
3743impl RepositorySnapshot {
3744 fn empty(
3745 id: RepositoryId,
3746 work_directory_abs_path: Arc<Path>,
3747 original_repo_abs_path: Option<Arc<Path>>,
3748 path_style: PathStyle,
3749 ) -> Self {
3750 Self {
3751 id,
3752 statuses_by_path: Default::default(),
3753 original_repo_abs_path: original_repo_abs_path
3754 .unwrap_or_else(|| work_directory_abs_path.clone()),
3755 work_directory_abs_path,
3756 branch: None,
3757 branch_list: Arc::from([]),
3758 head_commit: None,
3759 scan_id: 0,
3760 merge: Default::default(),
3761 remote_origin_url: None,
3762 remote_upstream_url: None,
3763 stash_entries: Default::default(),
3764 linked_worktrees: Arc::from([]),
3765 path_style,
3766 }
3767 }
3768
3769 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3770 proto::UpdateRepository {
3771 branch_summary: self.branch.as_ref().map(branch_to_proto),
3772 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3773 updated_statuses: self
3774 .statuses_by_path
3775 .iter()
3776 .map(|entry| entry.to_proto())
3777 .collect(),
3778 removed_statuses: Default::default(),
3779 current_merge_conflicts: self
3780 .merge
3781 .merge_heads_by_conflicted_path
3782 .iter()
3783 .map(|(repo_path, _)| repo_path.to_proto())
3784 .collect(),
3785 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3786 project_id,
3787 id: self.id.to_proto(),
3788 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3789 entry_ids: vec![self.id.to_proto()],
3790 scan_id: self.scan_id,
3791 is_last_update: true,
3792 stash_entries: self
3793 .stash_entries
3794 .entries
3795 .iter()
3796 .map(stash_to_proto)
3797 .collect(),
3798 remote_upstream_url: self.remote_upstream_url.clone(),
3799 remote_origin_url: self.remote_origin_url.clone(),
3800 original_repo_abs_path: Some(
3801 self.original_repo_abs_path.to_string_lossy().into_owned(),
3802 ),
3803 linked_worktrees: self
3804 .linked_worktrees
3805 .iter()
3806 .map(worktree_to_proto)
3807 .collect(),
3808 }
3809 }
3810
3811 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3812 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3813 let mut removed_statuses: Vec<String> = Vec::new();
3814
3815 let mut new_statuses = self.statuses_by_path.iter().peekable();
3816 let mut old_statuses = old.statuses_by_path.iter().peekable();
3817
3818 let mut current_new_entry = new_statuses.next();
3819 let mut current_old_entry = old_statuses.next();
3820 loop {
3821 match (current_new_entry, current_old_entry) {
3822 (Some(new_entry), Some(old_entry)) => {
3823 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3824 Ordering::Less => {
3825 updated_statuses.push(new_entry.to_proto());
3826 current_new_entry = new_statuses.next();
3827 }
3828 Ordering::Equal => {
3829 if new_entry.status != old_entry.status
3830 || new_entry.diff_stat != old_entry.diff_stat
3831 {
3832 updated_statuses.push(new_entry.to_proto());
3833 }
3834 current_old_entry = old_statuses.next();
3835 current_new_entry = new_statuses.next();
3836 }
3837 Ordering::Greater => {
3838 removed_statuses.push(old_entry.repo_path.to_proto());
3839 current_old_entry = old_statuses.next();
3840 }
3841 }
3842 }
3843 (None, Some(old_entry)) => {
3844 removed_statuses.push(old_entry.repo_path.to_proto());
3845 current_old_entry = old_statuses.next();
3846 }
3847 (Some(new_entry), None) => {
3848 updated_statuses.push(new_entry.to_proto());
3849 current_new_entry = new_statuses.next();
3850 }
3851 (None, None) => break,
3852 }
3853 }
3854
3855 proto::UpdateRepository {
3856 branch_summary: self.branch.as_ref().map(branch_to_proto),
3857 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3858 updated_statuses,
3859 removed_statuses,
3860 current_merge_conflicts: self
3861 .merge
3862 .merge_heads_by_conflicted_path
3863 .iter()
3864 .map(|(path, _)| path.to_proto())
3865 .collect(),
3866 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3867 project_id,
3868 id: self.id.to_proto(),
3869 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3870 entry_ids: vec![],
3871 scan_id: self.scan_id,
3872 is_last_update: true,
3873 stash_entries: self
3874 .stash_entries
3875 .entries
3876 .iter()
3877 .map(stash_to_proto)
3878 .collect(),
3879 remote_upstream_url: self.remote_upstream_url.clone(),
3880 remote_origin_url: self.remote_origin_url.clone(),
3881 original_repo_abs_path: Some(
3882 self.original_repo_abs_path.to_string_lossy().into_owned(),
3883 ),
3884 linked_worktrees: self
3885 .linked_worktrees
3886 .iter()
3887 .map(worktree_to_proto)
3888 .collect(),
3889 }
3890 }
3891
3892 /// The main worktree is the original checkout that other worktrees were
3893 /// created from.
3894 ///
3895 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3896 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3897 ///
3898 /// Submodules also return `true` here, since they are not linked worktrees.
3899 pub fn is_main_worktree(&self) -> bool {
3900 self.work_directory_abs_path == self.original_repo_abs_path
3901 }
3902
3903 /// Returns true if this repository is a linked worktree, that is, one that
3904 /// was created from another worktree.
3905 ///
3906 /// Returns `false` for both the main worktree and submodules.
3907 pub fn is_linked_worktree(&self) -> bool {
3908 !self.is_main_worktree()
3909 }
3910
3911 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3912 &self.linked_worktrees
3913 }
3914
3915 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3916 self.statuses_by_path.iter().cloned()
3917 }
3918
3919 pub fn status_summary(&self) -> GitSummary {
3920 self.statuses_by_path.summary().item_summary
3921 }
3922
3923 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3924 self.statuses_by_path
3925 .get(&PathKey(path.as_ref().clone()), ())
3926 .cloned()
3927 }
3928
3929 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3930 self.statuses_by_path
3931 .get(&PathKey(path.as_ref().clone()), ())
3932 .and_then(|entry| entry.diff_stat)
3933 }
3934
3935 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3936 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3937 }
3938
3939 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3940 self.path_style
3941 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3942 .unwrap()
3943 .into()
3944 }
3945
3946 #[inline]
3947 fn abs_path_to_repo_path_inner(
3948 work_directory_abs_path: &Path,
3949 abs_path: &Path,
3950 path_style: PathStyle,
3951 ) -> Option<RepoPath> {
3952 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3953 Some(RepoPath::from_rel_path(&rel_path))
3954 }
3955
3956 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3957 self.merge
3958 .merge_heads_by_conflicted_path
3959 .contains_key(repo_path)
3960 }
3961
3962 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3963 let had_conflict_on_last_merge_head_change = self
3964 .merge
3965 .merge_heads_by_conflicted_path
3966 .contains_key(repo_path);
3967 let has_conflict_currently = self
3968 .status_for_path(repo_path)
3969 .is_some_and(|entry| entry.status.is_conflicted());
3970 had_conflict_on_last_merge_head_change || has_conflict_currently
3971 }
3972
3973 /// This is the name that will be displayed in the repository selector for this repository.
3974 pub fn display_name(&self) -> SharedString {
3975 self.work_directory_abs_path
3976 .file_name()
3977 .unwrap_or_default()
3978 .to_string_lossy()
3979 .to_string()
3980 .into()
3981 }
3982}
3983
3984pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3985 proto::StashEntry {
3986 oid: entry.oid.as_bytes().to_vec(),
3987 message: entry.message.clone(),
3988 branch: entry.branch.clone(),
3989 index: entry.index as u64,
3990 timestamp: entry.timestamp,
3991 }
3992}
3993
3994pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3995 Ok(StashEntry {
3996 oid: Oid::from_bytes(&entry.oid)?,
3997 message: entry.message.clone(),
3998 index: entry.index as usize,
3999 branch: entry.branch.clone(),
4000 timestamp: entry.timestamp,
4001 })
4002}
4003
4004impl MergeDetails {
4005 async fn update(
4006 &mut self,
4007 backend: &Arc<dyn GitRepository>,
4008 current_conflicted_paths: Vec<RepoPath>,
4009 ) -> Result<bool> {
4010 log::debug!("load merge details");
4011 self.message = backend.merge_message().await.map(SharedString::from);
4012 let heads = backend
4013 .revparse_batch(vec![
4014 "MERGE_HEAD".into(),
4015 "CHERRY_PICK_HEAD".into(),
4016 "REBASE_HEAD".into(),
4017 "REVERT_HEAD".into(),
4018 "APPLY_HEAD".into(),
4019 ])
4020 .await
4021 .log_err()
4022 .unwrap_or_default()
4023 .into_iter()
4024 .map(|opt| opt.map(SharedString::from))
4025 .collect::<Vec<_>>();
4026
4027 let mut conflicts_changed = false;
4028
4029 // Record the merge state for newly conflicted paths
4030 for path in ¤t_conflicted_paths {
4031 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
4032 conflicts_changed = true;
4033 self.merge_heads_by_conflicted_path
4034 .insert(path.clone(), heads.clone());
4035 }
4036 }
4037
4038 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4039 self.merge_heads_by_conflicted_path
4040 .retain(|path, old_merge_heads| {
4041 let keep = current_conflicted_paths.contains(path)
4042 || (old_merge_heads == &heads
4043 && old_merge_heads.iter().any(|head| head.is_some()));
4044 if !keep {
4045 conflicts_changed = true;
4046 }
4047 keep
4048 });
4049
4050 Ok(conflicts_changed)
4051 }
4052}
4053
4054impl Repository {
4055 pub fn is_trusted(&self) -> bool {
4056 match self.repository_state.peek() {
4057 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4058 _ => false,
4059 }
4060 }
4061
4062 pub fn snapshot(&self) -> RepositorySnapshot {
4063 self.snapshot.clone()
4064 }
4065
4066 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4067 self.pending_ops.iter().cloned()
4068 }
4069
4070 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4071 self.pending_ops.summary().clone()
4072 }
4073
4074 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4075 self.pending_ops
4076 .get(&PathKey(path.as_ref().clone()), ())
4077 .cloned()
4078 }
4079
4080 fn local(
4081 id: RepositoryId,
4082 work_directory_abs_path: Arc<Path>,
4083 original_repo_abs_path: Arc<Path>,
4084 dot_git_abs_path: Arc<Path>,
4085 project_environment: WeakEntity<ProjectEnvironment>,
4086 fs: Arc<dyn Fs>,
4087 is_trusted: bool,
4088 git_store: WeakEntity<GitStore>,
4089 cx: &mut Context<Self>,
4090 ) -> Self {
4091 let snapshot = RepositorySnapshot::empty(
4092 id,
4093 work_directory_abs_path.clone(),
4094 Some(original_repo_abs_path),
4095 PathStyle::local(),
4096 );
4097 let state = cx
4098 .spawn(async move |_, cx| {
4099 LocalRepositoryState::new(
4100 work_directory_abs_path,
4101 dot_git_abs_path,
4102 project_environment,
4103 fs,
4104 is_trusted,
4105 cx,
4106 )
4107 .await
4108 .map_err(|err| err.to_string())
4109 })
4110 .shared();
4111 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4112 let state = cx
4113 .spawn(async move |_, _| {
4114 let state = state.await?;
4115 Ok(RepositoryState::Local(state))
4116 })
4117 .shared();
4118
4119 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4120 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4121 if this.scan_id > 1 {
4122 this.initial_graph_data.clear();
4123 }
4124 }
4125 RepositoryEvent::StashEntriesChanged => {
4126 if this.scan_id > 1 {
4127 this.initial_graph_data
4128 .retain(|(log_source, _), _| *log_source != LogSource::All);
4129 }
4130 }
4131 _ => {}
4132 })
4133 .detach();
4134
4135 Repository {
4136 this: cx.weak_entity(),
4137 git_store,
4138 snapshot,
4139 pending_ops: Default::default(),
4140 repository_state: state,
4141 commit_message_buffer: None,
4142 askpass_delegates: Default::default(),
4143 paths_needing_status_update: Default::default(),
4144 latest_askpass_id: 0,
4145 job_sender,
4146 job_id: 0,
4147 active_jobs: Default::default(),
4148 initial_graph_data: Default::default(),
4149 commit_data: Default::default(),
4150 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4151 }
4152 }
4153
4154 fn remote(
4155 id: RepositoryId,
4156 work_directory_abs_path: Arc<Path>,
4157 original_repo_abs_path: Option<Arc<Path>>,
4158 path_style: PathStyle,
4159 project_id: ProjectId,
4160 client: AnyProtoClient,
4161 git_store: WeakEntity<GitStore>,
4162 cx: &mut Context<Self>,
4163 ) -> Self {
4164 let snapshot = RepositorySnapshot::empty(
4165 id,
4166 work_directory_abs_path,
4167 original_repo_abs_path,
4168 path_style,
4169 );
4170 let repository_state = RemoteRepositoryState { project_id, client };
4171 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4172 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4173 Self {
4174 this: cx.weak_entity(),
4175 snapshot,
4176 commit_message_buffer: None,
4177 git_store,
4178 pending_ops: Default::default(),
4179 paths_needing_status_update: Default::default(),
4180 job_sender,
4181 repository_state,
4182 askpass_delegates: Default::default(),
4183 latest_askpass_id: 0,
4184 active_jobs: Default::default(),
4185 job_id: 0,
4186 initial_graph_data: Default::default(),
4187 commit_data: Default::default(),
4188 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4189 }
4190 }
4191
4192 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4193 self.git_store.upgrade()
4194 }
4195
4196 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4197 let this = cx.weak_entity();
4198 let git_store = self.git_store.clone();
4199 let _ = self.send_keyed_job(
4200 Some(GitJobKey::ReloadBufferDiffBases),
4201 None,
4202 |state, mut cx| async move {
4203 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4204 log::error!("tried to recompute diffs for a non-local repository");
4205 return Ok(());
4206 };
4207
4208 let Some(this) = this.upgrade() else {
4209 return Ok(());
4210 };
4211
4212 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4213 git_store.update(cx, |git_store, cx| {
4214 git_store
4215 .diffs
4216 .iter()
4217 .filter_map(|(buffer_id, diff_state)| {
4218 let buffer_store = git_store.buffer_store.read(cx);
4219 let buffer = buffer_store.get(*buffer_id)?;
4220 let file = File::from_dyn(buffer.read(cx).file())?;
4221 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4222 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4223 log::debug!(
4224 "start reload diff bases for repo path {}",
4225 repo_path.as_unix_str()
4226 );
4227 diff_state.update(cx, |diff_state, _| {
4228 let has_unstaged_diff = diff_state
4229 .unstaged_diff
4230 .as_ref()
4231 .is_some_and(|diff| diff.is_upgradable());
4232 let has_uncommitted_diff = diff_state
4233 .uncommitted_diff
4234 .as_ref()
4235 .is_some_and(|set| set.is_upgradable());
4236
4237 Some((
4238 buffer,
4239 repo_path,
4240 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4241 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4242 ))
4243 })
4244 })
4245 .collect::<Vec<_>>()
4246 })
4247 })?;
4248
4249 let buffer_diff_base_changes = cx
4250 .background_spawn(async move {
4251 let mut changes = Vec::new();
4252 for (buffer, repo_path, current_index_text, current_head_text) in
4253 &repo_diff_state_updates
4254 {
4255 let index_text = if current_index_text.is_some() {
4256 backend.load_index_text(repo_path.clone()).await
4257 } else {
4258 None
4259 };
4260 let head_text = if current_head_text.is_some() {
4261 backend.load_committed_text(repo_path.clone()).await
4262 } else {
4263 None
4264 };
4265
4266 let change =
4267 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4268 (Some(current_index), Some(current_head)) => {
4269 let index_changed =
4270 index_text.as_deref() != current_index.as_deref();
4271 let head_changed =
4272 head_text.as_deref() != current_head.as_deref();
4273 if index_changed && head_changed {
4274 if index_text == head_text {
4275 Some(DiffBasesChange::SetBoth(head_text))
4276 } else {
4277 Some(DiffBasesChange::SetEach {
4278 index: index_text,
4279 head: head_text,
4280 })
4281 }
4282 } else if index_changed {
4283 Some(DiffBasesChange::SetIndex(index_text))
4284 } else if head_changed {
4285 Some(DiffBasesChange::SetHead(head_text))
4286 } else {
4287 None
4288 }
4289 }
4290 (Some(current_index), None) => {
4291 let index_changed =
4292 index_text.as_deref() != current_index.as_deref();
4293 index_changed
4294 .then_some(DiffBasesChange::SetIndex(index_text))
4295 }
4296 (None, Some(current_head)) => {
4297 let head_changed =
4298 head_text.as_deref() != current_head.as_deref();
4299 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4300 }
4301 (None, None) => None,
4302 };
4303
4304 changes.push((buffer.clone(), change))
4305 }
4306 changes
4307 })
4308 .await;
4309
4310 git_store.update(&mut cx, |git_store, cx| {
4311 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4312 let buffer_snapshot = buffer.read(cx).text_snapshot();
4313 let buffer_id = buffer_snapshot.remote_id();
4314 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4315 continue;
4316 };
4317
4318 let downstream_client = git_store.downstream_client();
4319 diff_state.update(cx, |diff_state, cx| {
4320 use proto::update_diff_bases::Mode;
4321
4322 if let Some((diff_bases_change, (client, project_id))) =
4323 diff_bases_change.clone().zip(downstream_client)
4324 {
4325 let (staged_text, committed_text, mode) = match diff_bases_change {
4326 DiffBasesChange::SetIndex(index) => {
4327 (index, None, Mode::IndexOnly)
4328 }
4329 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4330 DiffBasesChange::SetEach { index, head } => {
4331 (index, head, Mode::IndexAndHead)
4332 }
4333 DiffBasesChange::SetBoth(text) => {
4334 (None, text, Mode::IndexMatchesHead)
4335 }
4336 };
4337 client
4338 .send(proto::UpdateDiffBases {
4339 project_id: project_id.to_proto(),
4340 buffer_id: buffer_id.to_proto(),
4341 staged_text,
4342 committed_text,
4343 mode: mode as i32,
4344 })
4345 .log_err();
4346 }
4347
4348 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4349 });
4350 }
4351 })
4352 },
4353 );
4354 }
4355
4356 pub fn send_job<F, Fut, R>(
4357 &mut self,
4358 status: Option<SharedString>,
4359 job: F,
4360 ) -> oneshot::Receiver<R>
4361 where
4362 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4363 Fut: Future<Output = R> + 'static,
4364 R: Send + 'static,
4365 {
4366 self.send_keyed_job(None, status, job)
4367 }
4368
4369 fn send_keyed_job<F, Fut, R>(
4370 &mut self,
4371 key: Option<GitJobKey>,
4372 status: Option<SharedString>,
4373 job: F,
4374 ) -> oneshot::Receiver<R>
4375 where
4376 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4377 Fut: Future<Output = R> + 'static,
4378 R: Send + 'static,
4379 {
4380 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4381 let job_id = post_inc(&mut self.job_id);
4382 let this = self.this.clone();
4383 self.job_sender
4384 .unbounded_send(GitJob {
4385 key,
4386 job: Box::new(move |state, cx: &mut AsyncApp| {
4387 let job = job(state, cx.clone());
4388 cx.spawn(async move |cx| {
4389 if let Some(s) = status.clone() {
4390 this.update(cx, |this, cx| {
4391 this.active_jobs.insert(
4392 job_id,
4393 JobInfo {
4394 start: Instant::now(),
4395 message: s.clone(),
4396 },
4397 );
4398
4399 cx.notify();
4400 })
4401 .ok();
4402 }
4403 let result = job.await;
4404
4405 this.update(cx, |this, cx| {
4406 this.active_jobs.remove(&job_id);
4407 cx.notify();
4408 })
4409 .ok();
4410
4411 result_tx.send(result).ok();
4412 })
4413 }),
4414 })
4415 .ok();
4416 result_rx
4417 }
4418
4419 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4420 let Some(git_store) = self.git_store.upgrade() else {
4421 return;
4422 };
4423 let entity = cx.entity();
4424 git_store.update(cx, |git_store, cx| {
4425 let Some((&id, _)) = git_store
4426 .repositories
4427 .iter()
4428 .find(|(_, handle)| *handle == &entity)
4429 else {
4430 return;
4431 };
4432 git_store.active_repo_id = Some(id);
4433 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4434 });
4435 }
4436
4437 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4438 self.snapshot.status()
4439 }
4440
4441 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4442 self.snapshot.diff_stat_for_path(path)
4443 }
4444
4445 pub fn cached_stash(&self) -> GitStash {
4446 self.snapshot.stash_entries.clone()
4447 }
4448
4449 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4450 let git_store = self.git_store.upgrade()?;
4451 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4452 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4453 let abs_path = SanitizedPath::new(&abs_path);
4454 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4455 Some(ProjectPath {
4456 worktree_id: worktree.read(cx).id(),
4457 path: relative_path,
4458 })
4459 }
4460
4461 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4462 let git_store = self.git_store.upgrade()?;
4463 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4464 let abs_path = worktree_store.absolutize(path, cx)?;
4465 self.snapshot.abs_path_to_repo_path(&abs_path)
4466 }
4467
4468 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4469 other
4470 .read(cx)
4471 .snapshot
4472 .work_directory_abs_path
4473 .starts_with(&self.snapshot.work_directory_abs_path)
4474 }
4475
4476 pub fn open_commit_buffer(
4477 &mut self,
4478 languages: Option<Arc<LanguageRegistry>>,
4479 buffer_store: Entity<BufferStore>,
4480 cx: &mut Context<Self>,
4481 ) -> Task<Result<Entity<Buffer>>> {
4482 let id = self.id;
4483 if let Some(buffer) = self.commit_message_buffer.clone() {
4484 return Task::ready(Ok(buffer));
4485 }
4486 let this = cx.weak_entity();
4487
4488 let rx = self.send_job(None, move |state, mut cx| async move {
4489 let Some(this) = this.upgrade() else {
4490 bail!("git store was dropped");
4491 };
4492 match state {
4493 RepositoryState::Local(..) => {
4494 this.update(&mut cx, |_, cx| {
4495 Self::open_local_commit_buffer(languages, buffer_store, cx)
4496 })
4497 .await
4498 }
4499 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4500 let request = client.request(proto::OpenCommitMessageBuffer {
4501 project_id: project_id.0,
4502 repository_id: id.to_proto(),
4503 });
4504 let response = request.await.context("requesting to open commit buffer")?;
4505 let buffer_id = BufferId::new(response.buffer_id)?;
4506 let buffer = buffer_store
4507 .update(&mut cx, |buffer_store, cx| {
4508 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4509 })
4510 .await?;
4511 if let Some(language_registry) = languages {
4512 let git_commit_language =
4513 language_registry.language_for_name("Git Commit").await?;
4514 buffer.update(&mut cx, |buffer, cx| {
4515 buffer.set_language(Some(git_commit_language), cx);
4516 });
4517 }
4518 this.update(&mut cx, |this, _| {
4519 this.commit_message_buffer = Some(buffer.clone());
4520 });
4521 Ok(buffer)
4522 }
4523 }
4524 });
4525
4526 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4527 }
4528
4529 fn open_local_commit_buffer(
4530 language_registry: Option<Arc<LanguageRegistry>>,
4531 buffer_store: Entity<BufferStore>,
4532 cx: &mut Context<Self>,
4533 ) -> Task<Result<Entity<Buffer>>> {
4534 cx.spawn(async move |repository, cx| {
4535 let git_commit_language = match language_registry {
4536 Some(language_registry) => {
4537 Some(language_registry.language_for_name("Git Commit").await?)
4538 }
4539 None => None,
4540 };
4541 let buffer = buffer_store
4542 .update(cx, |buffer_store, cx| {
4543 buffer_store.create_buffer(git_commit_language, false, cx)
4544 })
4545 .await?;
4546
4547 repository.update(cx, |repository, _| {
4548 repository.commit_message_buffer = Some(buffer.clone());
4549 })?;
4550 Ok(buffer)
4551 })
4552 }
4553
4554 pub fn checkout_files(
4555 &mut self,
4556 commit: &str,
4557 paths: Vec<RepoPath>,
4558 cx: &mut Context<Self>,
4559 ) -> Task<Result<()>> {
4560 let commit = commit.to_string();
4561 let id = self.id;
4562
4563 self.spawn_job_with_tracking(
4564 paths.clone(),
4565 pending_op::GitStatus::Reverted,
4566 cx,
4567 async move |this, cx| {
4568 this.update(cx, |this, _cx| {
4569 this.send_job(
4570 Some(format!("git checkout {}", commit).into()),
4571 move |git_repo, _| async move {
4572 match git_repo {
4573 RepositoryState::Local(LocalRepositoryState {
4574 backend,
4575 environment,
4576 ..
4577 }) => {
4578 backend
4579 .checkout_files(commit, paths, environment.clone())
4580 .await
4581 }
4582 RepositoryState::Remote(RemoteRepositoryState {
4583 project_id,
4584 client,
4585 }) => {
4586 client
4587 .request(proto::GitCheckoutFiles {
4588 project_id: project_id.0,
4589 repository_id: id.to_proto(),
4590 commit,
4591 paths: paths
4592 .into_iter()
4593 .map(|p| p.to_proto())
4594 .collect(),
4595 })
4596 .await?;
4597
4598 Ok(())
4599 }
4600 }
4601 },
4602 )
4603 })?
4604 .await?
4605 },
4606 )
4607 }
4608
4609 pub fn reset(
4610 &mut self,
4611 commit: String,
4612 reset_mode: ResetMode,
4613 _cx: &mut App,
4614 ) -> oneshot::Receiver<Result<()>> {
4615 let id = self.id;
4616
4617 self.send_job(None, move |git_repo, _| async move {
4618 match git_repo {
4619 RepositoryState::Local(LocalRepositoryState {
4620 backend,
4621 environment,
4622 ..
4623 }) => backend.reset(commit, reset_mode, environment).await,
4624 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4625 client
4626 .request(proto::GitReset {
4627 project_id: project_id.0,
4628 repository_id: id.to_proto(),
4629 commit,
4630 mode: match reset_mode {
4631 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4632 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4633 },
4634 })
4635 .await?;
4636
4637 Ok(())
4638 }
4639 }
4640 })
4641 }
4642
4643 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4644 let id = self.id;
4645 self.send_job(None, move |git_repo, _cx| async move {
4646 match git_repo {
4647 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4648 backend.show(commit).await
4649 }
4650 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4651 let resp = client
4652 .request(proto::GitShow {
4653 project_id: project_id.0,
4654 repository_id: id.to_proto(),
4655 commit,
4656 })
4657 .await?;
4658
4659 Ok(CommitDetails {
4660 sha: resp.sha.into(),
4661 message: resp.message.into(),
4662 commit_timestamp: resp.commit_timestamp,
4663 author_email: resp.author_email.into(),
4664 author_name: resp.author_name.into(),
4665 })
4666 }
4667 }
4668 })
4669 }
4670
4671 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4672 let id = self.id;
4673 self.send_job(None, move |git_repo, cx| async move {
4674 match git_repo {
4675 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4676 backend.load_commit(commit, cx).await
4677 }
4678 RepositoryState::Remote(RemoteRepositoryState {
4679 client, project_id, ..
4680 }) => {
4681 let response = client
4682 .request(proto::LoadCommitDiff {
4683 project_id: project_id.0,
4684 repository_id: id.to_proto(),
4685 commit,
4686 })
4687 .await?;
4688 Ok(CommitDiff {
4689 files: response
4690 .files
4691 .into_iter()
4692 .map(|file| {
4693 Ok(CommitFile {
4694 path: RepoPath::from_proto(&file.path)?,
4695 old_text: file.old_text,
4696 new_text: file.new_text,
4697 is_binary: file.is_binary,
4698 })
4699 })
4700 .collect::<Result<Vec<_>>>()?,
4701 })
4702 }
4703 }
4704 })
4705 }
4706
4707 pub fn file_history(
4708 &mut self,
4709 path: RepoPath,
4710 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4711 self.file_history_paginated(path, 0, None)
4712 }
4713
4714 pub fn file_history_paginated(
4715 &mut self,
4716 path: RepoPath,
4717 skip: usize,
4718 limit: Option<usize>,
4719 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4720 let id = self.id;
4721 self.send_job(None, move |git_repo, _cx| async move {
4722 match git_repo {
4723 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4724 backend.file_history_paginated(path, skip, limit).await
4725 }
4726 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4727 let response = client
4728 .request(proto::GitFileHistory {
4729 project_id: project_id.0,
4730 repository_id: id.to_proto(),
4731 path: path.to_proto(),
4732 skip: skip as u64,
4733 limit: limit.map(|l| l as u64),
4734 })
4735 .await?;
4736 Ok(git::repository::FileHistory {
4737 entries: response
4738 .entries
4739 .into_iter()
4740 .map(|entry| git::repository::FileHistoryEntry {
4741 sha: entry.sha.into(),
4742 subject: entry.subject.into(),
4743 message: entry.message.into(),
4744 commit_timestamp: entry.commit_timestamp,
4745 author_name: entry.author_name.into(),
4746 author_email: entry.author_email.into(),
4747 })
4748 .collect(),
4749 path: RepoPath::from_proto(&response.path)?,
4750 })
4751 }
4752 }
4753 })
4754 }
4755
4756 pub fn get_graph_data(
4757 &self,
4758 log_source: LogSource,
4759 log_order: LogOrder,
4760 ) -> Option<&InitialGitGraphData> {
4761 self.initial_graph_data.get(&(log_source, log_order))
4762 }
4763
4764 pub fn search_commits(
4765 &mut self,
4766 log_source: LogSource,
4767 search_args: SearchCommitArgs,
4768 request_tx: smol::channel::Sender<Oid>,
4769 cx: &mut Context<Self>,
4770 ) {
4771 let repository_state = self.repository_state.clone();
4772
4773 cx.background_spawn(async move {
4774 let repo_state = repository_state.await;
4775
4776 match repo_state {
4777 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4778 backend
4779 .search_commits(log_source, search_args, request_tx)
4780 .await
4781 .log_err();
4782 }
4783 Ok(RepositoryState::Remote(_)) => {}
4784 Err(_) => {}
4785 };
4786 })
4787 .detach();
4788 }
4789
4790 pub fn graph_data(
4791 &mut self,
4792 log_source: LogSource,
4793 log_order: LogOrder,
4794 range: Range<usize>,
4795 cx: &mut Context<Self>,
4796 ) -> GraphDataResponse<'_> {
4797 let initial_commit_data = self
4798 .initial_graph_data
4799 .entry((log_source.clone(), log_order))
4800 .or_insert_with(|| {
4801 let state = self.repository_state.clone();
4802 let log_source = log_source.clone();
4803
4804 let fetch_task = cx.spawn(async move |repository, cx| {
4805 let state = state.await;
4806 let result = match state {
4807 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4808 Self::local_git_graph_data(
4809 repository.clone(),
4810 backend,
4811 log_source.clone(),
4812 log_order,
4813 cx,
4814 )
4815 .await
4816 }
4817 Ok(RepositoryState::Remote(_)) => {
4818 Err("Git graph is not supported for collab yet".into())
4819 }
4820 Err(e) => Err(SharedString::from(e)),
4821 };
4822
4823 if let Err(fetch_task_error) = result {
4824 repository
4825 .update(cx, |repository, _| {
4826 if let Some(data) = repository
4827 .initial_graph_data
4828 .get_mut(&(log_source, log_order))
4829 {
4830 data.error = Some(fetch_task_error);
4831 } else {
4832 debug_panic!(
4833 "This task would be dropped if this entry doesn't exist"
4834 );
4835 }
4836 })
4837 .ok();
4838 }
4839 });
4840
4841 InitialGitGraphData {
4842 fetch_task,
4843 error: None,
4844 commit_data: Vec::new(),
4845 commit_oid_to_index: HashMap::default(),
4846 }
4847 });
4848
4849 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4850 let max_end = initial_commit_data.commit_data.len();
4851
4852 GraphDataResponse {
4853 commits: &initial_commit_data.commit_data
4854 [range.start.min(max_start)..range.end.min(max_end)],
4855 is_loading: !initial_commit_data.fetch_task.is_ready(),
4856 error: initial_commit_data.error.clone(),
4857 }
4858 }
4859
4860 async fn local_git_graph_data(
4861 this: WeakEntity<Self>,
4862 backend: Arc<dyn GitRepository>,
4863 log_source: LogSource,
4864 log_order: LogOrder,
4865 cx: &mut AsyncApp,
4866 ) -> Result<(), SharedString> {
4867 let (request_tx, request_rx) =
4868 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4869
4870 let task = cx.background_executor().spawn({
4871 let log_source = log_source.clone();
4872 async move {
4873 backend
4874 .initial_graph_data(log_source, log_order, request_tx)
4875 .await
4876 .map_err(|err| SharedString::from(err.to_string()))
4877 }
4878 });
4879
4880 let graph_data_key = (log_source, log_order);
4881
4882 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4883 this.update(cx, |repository, cx| {
4884 let graph_data = repository
4885 .initial_graph_data
4886 .entry(graph_data_key.clone())
4887 .and_modify(|graph_data| {
4888 for commit_data in initial_graph_commit_data {
4889 graph_data
4890 .commit_oid_to_index
4891 .insert(commit_data.sha, graph_data.commit_data.len());
4892 graph_data.commit_data.push(commit_data);
4893 }
4894 cx.emit(RepositoryEvent::GraphEvent(
4895 graph_data_key.clone(),
4896 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4897 ));
4898 });
4899
4900 match &graph_data {
4901 Entry::Occupied(_) => {}
4902 Entry::Vacant(_) => {
4903 debug_panic!("This task should be dropped if data doesn't exist");
4904 }
4905 }
4906 })
4907 .ok();
4908 }
4909
4910 task.await?;
4911 Ok(())
4912 }
4913
4914 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4915 if !self.commit_data.contains_key(&sha) {
4916 match &self.graph_commit_data_handler {
4917 GraphCommitHandlerState::Open(handler) => {
4918 if handler.commit_data_request.try_send(sha).is_ok() {
4919 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4920 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4921 }
4922 }
4923 GraphCommitHandlerState::Closed => {
4924 self.open_graph_commit_data_handler(cx);
4925 }
4926 GraphCommitHandlerState::Starting => {}
4927 }
4928 }
4929
4930 self.commit_data
4931 .get(&sha)
4932 .unwrap_or(&CommitDataState::Loading)
4933 }
4934
4935 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4936 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4937
4938 let state = self.repository_state.clone();
4939 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4940 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4941
4942 let foreground_task = cx.spawn(async move |this, cx| {
4943 while let Ok((sha, commit_data)) = result_rx.recv().await {
4944 let result = this.update(cx, |this, cx| {
4945 let old_value = this
4946 .commit_data
4947 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4948 debug_assert!(
4949 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4950 "We should never overwrite commit data"
4951 );
4952
4953 cx.notify();
4954 });
4955 if result.is_err() {
4956 break;
4957 }
4958 }
4959
4960 this.update(cx, |this, _cx| {
4961 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4962 })
4963 .ok();
4964 });
4965
4966 let request_tx_for_handler = request_tx;
4967 let background_executor = cx.background_executor().clone();
4968
4969 cx.background_spawn(async move {
4970 let backend = match state.await {
4971 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4972 Ok(RepositoryState::Remote(_)) => {
4973 log::error!("commit_data_reader not supported for remote repositories");
4974 return;
4975 }
4976 Err(error) => {
4977 log::error!("failed to get repository state: {error}");
4978 return;
4979 }
4980 };
4981
4982 let reader = match backend.commit_data_reader() {
4983 Ok(reader) => reader,
4984 Err(error) => {
4985 log::error!("failed to create commit data reader: {error:?}");
4986 return;
4987 }
4988 };
4989
4990 loop {
4991 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4992
4993 futures::select_biased! {
4994 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4995 let Ok(sha) = sha else {
4996 break;
4997 };
4998
4999 match reader.read(sha).await {
5000 Ok(commit_data) => {
5001 if result_tx.send((sha, commit_data)).await.is_err() {
5002 break;
5003 }
5004 }
5005 Err(error) => {
5006 log::error!("failed to read commit data for {sha}: {error:?}");
5007 }
5008 }
5009 }
5010 _ = futures::FutureExt::fuse(timeout) => {
5011 break;
5012 }
5013 }
5014 }
5015
5016 drop(result_tx);
5017 })
5018 .detach();
5019
5020 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
5021 _task: foreground_task,
5022 commit_data_request: request_tx_for_handler,
5023 });
5024 }
5025
5026 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
5027 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
5028 }
5029
5030 fn save_buffers<'a>(
5031 &self,
5032 entries: impl IntoIterator<Item = &'a RepoPath>,
5033 cx: &mut Context<Self>,
5034 ) -> Vec<Task<anyhow::Result<()>>> {
5035 let mut save_futures = Vec::new();
5036 if let Some(buffer_store) = self.buffer_store(cx) {
5037 buffer_store.update(cx, |buffer_store, cx| {
5038 for path in entries {
5039 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5040 continue;
5041 };
5042 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5043 && buffer
5044 .read(cx)
5045 .file()
5046 .is_some_and(|file| file.disk_state().exists())
5047 && buffer.read(cx).has_unsaved_edits()
5048 {
5049 save_futures.push(buffer_store.save_buffer(buffer, cx));
5050 }
5051 }
5052 })
5053 }
5054 save_futures
5055 }
5056
5057 pub fn stage_entries(
5058 &mut self,
5059 entries: Vec<RepoPath>,
5060 cx: &mut Context<Self>,
5061 ) -> Task<anyhow::Result<()>> {
5062 self.stage_or_unstage_entries(true, entries, cx)
5063 }
5064
5065 pub fn unstage_entries(
5066 &mut self,
5067 entries: Vec<RepoPath>,
5068 cx: &mut Context<Self>,
5069 ) -> Task<anyhow::Result<()>> {
5070 self.stage_or_unstage_entries(false, entries, cx)
5071 }
5072
5073 fn stage_or_unstage_entries(
5074 &mut self,
5075 stage: bool,
5076 entries: Vec<RepoPath>,
5077 cx: &mut Context<Self>,
5078 ) -> Task<anyhow::Result<()>> {
5079 if entries.is_empty() {
5080 return Task::ready(Ok(()));
5081 }
5082 let Some(git_store) = self.git_store.upgrade() else {
5083 return Task::ready(Ok(()));
5084 };
5085 let id = self.id;
5086 let save_tasks = self.save_buffers(&entries, cx);
5087 let paths = entries
5088 .iter()
5089 .map(|p| p.as_unix_str())
5090 .collect::<Vec<_>>()
5091 .join(" ");
5092 let status = if stage {
5093 format!("git add {paths}")
5094 } else {
5095 format!("git reset {paths}")
5096 };
5097 let job_key = GitJobKey::WriteIndex(entries.clone());
5098
5099 self.spawn_job_with_tracking(
5100 entries.clone(),
5101 if stage {
5102 pending_op::GitStatus::Staged
5103 } else {
5104 pending_op::GitStatus::Unstaged
5105 },
5106 cx,
5107 async move |this, cx| {
5108 for save_task in save_tasks {
5109 save_task.await?;
5110 }
5111
5112 this.update(cx, |this, cx| {
5113 let weak_this = cx.weak_entity();
5114 this.send_keyed_job(
5115 Some(job_key),
5116 Some(status.into()),
5117 move |git_repo, mut cx| async move {
5118 let hunk_staging_operation_counts = weak_this
5119 .update(&mut cx, |this, cx| {
5120 let mut hunk_staging_operation_counts = HashMap::default();
5121 for path in &entries {
5122 let Some(project_path) =
5123 this.repo_path_to_project_path(path, cx)
5124 else {
5125 continue;
5126 };
5127 let Some(buffer) = git_store
5128 .read(cx)
5129 .buffer_store
5130 .read(cx)
5131 .get_by_path(&project_path)
5132 else {
5133 continue;
5134 };
5135 let Some(diff_state) = git_store
5136 .read(cx)
5137 .diffs
5138 .get(&buffer.read(cx).remote_id())
5139 .cloned()
5140 else {
5141 continue;
5142 };
5143 let Some(uncommitted_diff) =
5144 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5145 |uncommitted_diff| uncommitted_diff.upgrade(),
5146 )
5147 else {
5148 continue;
5149 };
5150 let buffer_snapshot = buffer.read(cx).text_snapshot();
5151 let file_exists = buffer
5152 .read(cx)
5153 .file()
5154 .is_some_and(|file| file.disk_state().exists());
5155 let hunk_staging_operation_count =
5156 diff_state.update(cx, |diff_state, cx| {
5157 uncommitted_diff.update(
5158 cx,
5159 |uncommitted_diff, cx| {
5160 uncommitted_diff
5161 .stage_or_unstage_all_hunks(
5162 stage,
5163 &buffer_snapshot,
5164 file_exists,
5165 cx,
5166 );
5167 },
5168 );
5169
5170 diff_state.hunk_staging_operation_count += 1;
5171 diff_state.hunk_staging_operation_count
5172 });
5173 hunk_staging_operation_counts.insert(
5174 diff_state.downgrade(),
5175 hunk_staging_operation_count,
5176 );
5177 }
5178 hunk_staging_operation_counts
5179 })
5180 .unwrap_or_default();
5181
5182 let result = match git_repo {
5183 RepositoryState::Local(LocalRepositoryState {
5184 backend,
5185 environment,
5186 ..
5187 }) => {
5188 if stage {
5189 backend.stage_paths(entries, environment.clone()).await
5190 } else {
5191 backend.unstage_paths(entries, environment.clone()).await
5192 }
5193 }
5194 RepositoryState::Remote(RemoteRepositoryState {
5195 project_id,
5196 client,
5197 }) => {
5198 if stage {
5199 client
5200 .request(proto::Stage {
5201 project_id: project_id.0,
5202 repository_id: id.to_proto(),
5203 paths: entries
5204 .into_iter()
5205 .map(|repo_path| repo_path.to_proto())
5206 .collect(),
5207 })
5208 .await
5209 .context("sending stage request")
5210 .map(|_| ())
5211 } else {
5212 client
5213 .request(proto::Unstage {
5214 project_id: project_id.0,
5215 repository_id: id.to_proto(),
5216 paths: entries
5217 .into_iter()
5218 .map(|repo_path| repo_path.to_proto())
5219 .collect(),
5220 })
5221 .await
5222 .context("sending unstage request")
5223 .map(|_| ())
5224 }
5225 }
5226 };
5227
5228 for (diff_state, hunk_staging_operation_count) in
5229 hunk_staging_operation_counts
5230 {
5231 diff_state
5232 .update(&mut cx, |diff_state, cx| {
5233 if result.is_ok() {
5234 diff_state.hunk_staging_operation_count_as_of_write =
5235 hunk_staging_operation_count;
5236 } else if let Some(uncommitted_diff) =
5237 &diff_state.uncommitted_diff
5238 {
5239 uncommitted_diff
5240 .update(cx, |uncommitted_diff, cx| {
5241 uncommitted_diff.clear_pending_hunks(cx);
5242 })
5243 .ok();
5244 }
5245 })
5246 .ok();
5247 }
5248
5249 result
5250 },
5251 )
5252 })?
5253 .await?
5254 },
5255 )
5256 }
5257
5258 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5259 let snapshot = self.snapshot.clone();
5260 let pending_ops = self.pending_ops.clone();
5261 let to_stage = cx.background_spawn(async move {
5262 snapshot
5263 .status()
5264 .filter_map(|entry| {
5265 if let Some(ops) =
5266 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5267 {
5268 if ops.staging() || ops.staged() {
5269 None
5270 } else {
5271 Some(entry.repo_path)
5272 }
5273 } else if entry.status.staging().is_fully_staged() {
5274 None
5275 } else {
5276 Some(entry.repo_path)
5277 }
5278 })
5279 .collect()
5280 });
5281
5282 cx.spawn(async move |this, cx| {
5283 let to_stage = to_stage.await;
5284 this.update(cx, |this, cx| {
5285 this.stage_or_unstage_entries(true, to_stage, cx)
5286 })?
5287 .await
5288 })
5289 }
5290
5291 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5292 let snapshot = self.snapshot.clone();
5293 let pending_ops = self.pending_ops.clone();
5294 let to_unstage = cx.background_spawn(async move {
5295 snapshot
5296 .status()
5297 .filter_map(|entry| {
5298 if let Some(ops) =
5299 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5300 {
5301 if !ops.staging() && !ops.staged() {
5302 None
5303 } else {
5304 Some(entry.repo_path)
5305 }
5306 } else if entry.status.staging().is_fully_unstaged() {
5307 None
5308 } else {
5309 Some(entry.repo_path)
5310 }
5311 })
5312 .collect()
5313 });
5314
5315 cx.spawn(async move |this, cx| {
5316 let to_unstage = to_unstage.await;
5317 this.update(cx, |this, cx| {
5318 this.stage_or_unstage_entries(false, to_unstage, cx)
5319 })?
5320 .await
5321 })
5322 }
5323
5324 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5325 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5326
5327 self.stash_entries(to_stash, cx)
5328 }
5329
5330 pub fn stash_entries(
5331 &mut self,
5332 entries: Vec<RepoPath>,
5333 cx: &mut Context<Self>,
5334 ) -> Task<anyhow::Result<()>> {
5335 let id = self.id;
5336
5337 cx.spawn(async move |this, cx| {
5338 this.update(cx, |this, _| {
5339 this.send_job(None, move |git_repo, _cx| async move {
5340 match git_repo {
5341 RepositoryState::Local(LocalRepositoryState {
5342 backend,
5343 environment,
5344 ..
5345 }) => backend.stash_paths(entries, environment).await,
5346 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5347 client
5348 .request(proto::Stash {
5349 project_id: project_id.0,
5350 repository_id: id.to_proto(),
5351 paths: entries
5352 .into_iter()
5353 .map(|repo_path| repo_path.to_proto())
5354 .collect(),
5355 })
5356 .await?;
5357 Ok(())
5358 }
5359 }
5360 })
5361 })?
5362 .await??;
5363 Ok(())
5364 })
5365 }
5366
5367 pub fn stash_pop(
5368 &mut self,
5369 index: Option<usize>,
5370 cx: &mut Context<Self>,
5371 ) -> Task<anyhow::Result<()>> {
5372 let id = self.id;
5373 cx.spawn(async move |this, cx| {
5374 this.update(cx, |this, _| {
5375 this.send_job(None, move |git_repo, _cx| async move {
5376 match git_repo {
5377 RepositoryState::Local(LocalRepositoryState {
5378 backend,
5379 environment,
5380 ..
5381 }) => backend.stash_pop(index, environment).await,
5382 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5383 client
5384 .request(proto::StashPop {
5385 project_id: project_id.0,
5386 repository_id: id.to_proto(),
5387 stash_index: index.map(|i| i as u64),
5388 })
5389 .await
5390 .context("sending stash pop request")?;
5391 Ok(())
5392 }
5393 }
5394 })
5395 })?
5396 .await??;
5397 Ok(())
5398 })
5399 }
5400
5401 pub fn stash_apply(
5402 &mut self,
5403 index: Option<usize>,
5404 cx: &mut Context<Self>,
5405 ) -> Task<anyhow::Result<()>> {
5406 let id = self.id;
5407 cx.spawn(async move |this, cx| {
5408 this.update(cx, |this, _| {
5409 this.send_job(None, move |git_repo, _cx| async move {
5410 match git_repo {
5411 RepositoryState::Local(LocalRepositoryState {
5412 backend,
5413 environment,
5414 ..
5415 }) => backend.stash_apply(index, environment).await,
5416 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5417 client
5418 .request(proto::StashApply {
5419 project_id: project_id.0,
5420 repository_id: id.to_proto(),
5421 stash_index: index.map(|i| i as u64),
5422 })
5423 .await
5424 .context("sending stash apply request")?;
5425 Ok(())
5426 }
5427 }
5428 })
5429 })?
5430 .await??;
5431 Ok(())
5432 })
5433 }
5434
5435 pub fn stash_drop(
5436 &mut self,
5437 index: Option<usize>,
5438 cx: &mut Context<Self>,
5439 ) -> oneshot::Receiver<anyhow::Result<()>> {
5440 let id = self.id;
5441 let updates_tx = self
5442 .git_store()
5443 .and_then(|git_store| match &git_store.read(cx).state {
5444 GitStoreState::Local { downstream, .. } => downstream
5445 .as_ref()
5446 .map(|downstream| downstream.updates_tx.clone()),
5447 _ => None,
5448 });
5449 let this = cx.weak_entity();
5450 self.send_job(None, move |git_repo, mut cx| async move {
5451 match git_repo {
5452 RepositoryState::Local(LocalRepositoryState {
5453 backend,
5454 environment,
5455 ..
5456 }) => {
5457 // TODO would be nice to not have to do this manually
5458 let result = backend.stash_drop(index, environment).await;
5459 if result.is_ok()
5460 && let Ok(stash_entries) = backend.stash_entries().await
5461 {
5462 let snapshot = this.update(&mut cx, |this, cx| {
5463 this.snapshot.stash_entries = stash_entries;
5464 cx.emit(RepositoryEvent::StashEntriesChanged);
5465 this.snapshot.clone()
5466 })?;
5467 if let Some(updates_tx) = updates_tx {
5468 updates_tx
5469 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5470 .ok();
5471 }
5472 }
5473
5474 result
5475 }
5476 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5477 client
5478 .request(proto::StashDrop {
5479 project_id: project_id.0,
5480 repository_id: id.to_proto(),
5481 stash_index: index.map(|i| i as u64),
5482 })
5483 .await
5484 .context("sending stash pop request")?;
5485 Ok(())
5486 }
5487 }
5488 })
5489 }
5490
5491 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5492 let id = self.id;
5493 self.send_job(
5494 Some(format!("git hook {}", hook.as_str()).into()),
5495 move |git_repo, _cx| async move {
5496 match git_repo {
5497 RepositoryState::Local(LocalRepositoryState {
5498 backend,
5499 environment,
5500 ..
5501 }) => backend.run_hook(hook, environment.clone()).await,
5502 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5503 client
5504 .request(proto::RunGitHook {
5505 project_id: project_id.0,
5506 repository_id: id.to_proto(),
5507 hook: hook.to_proto(),
5508 })
5509 .await?;
5510
5511 Ok(())
5512 }
5513 }
5514 },
5515 )
5516 }
5517
5518 pub fn commit(
5519 &mut self,
5520 message: SharedString,
5521 name_and_email: Option<(SharedString, SharedString)>,
5522 options: CommitOptions,
5523 askpass: AskPassDelegate,
5524 cx: &mut App,
5525 ) -> oneshot::Receiver<Result<()>> {
5526 let id = self.id;
5527 let askpass_delegates = self.askpass_delegates.clone();
5528 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5529
5530 let rx = self.run_hook(RunHook::PreCommit, cx);
5531
5532 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5533 rx.await??;
5534
5535 match git_repo {
5536 RepositoryState::Local(LocalRepositoryState {
5537 backend,
5538 environment,
5539 ..
5540 }) => {
5541 backend
5542 .commit(message, name_and_email, options, askpass, environment)
5543 .await
5544 }
5545 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5546 askpass_delegates.lock().insert(askpass_id, askpass);
5547 let _defer = util::defer(|| {
5548 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5549 debug_assert!(askpass_delegate.is_some());
5550 });
5551 let (name, email) = name_and_email.unzip();
5552 client
5553 .request(proto::Commit {
5554 project_id: project_id.0,
5555 repository_id: id.to_proto(),
5556 message: String::from(message),
5557 name: name.map(String::from),
5558 email: email.map(String::from),
5559 options: Some(proto::commit::CommitOptions {
5560 amend: options.amend,
5561 signoff: options.signoff,
5562 allow_empty: options.allow_empty,
5563 }),
5564 askpass_id,
5565 })
5566 .await?;
5567
5568 Ok(())
5569 }
5570 }
5571 })
5572 }
5573
5574 pub fn fetch(
5575 &mut self,
5576 fetch_options: FetchOptions,
5577 askpass: AskPassDelegate,
5578 _cx: &mut App,
5579 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5580 let askpass_delegates = self.askpass_delegates.clone();
5581 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5582 let id = self.id;
5583
5584 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5585 match git_repo {
5586 RepositoryState::Local(LocalRepositoryState {
5587 backend,
5588 environment,
5589 ..
5590 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5591 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5592 askpass_delegates.lock().insert(askpass_id, askpass);
5593 let _defer = util::defer(|| {
5594 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5595 debug_assert!(askpass_delegate.is_some());
5596 });
5597
5598 let response = client
5599 .request(proto::Fetch {
5600 project_id: project_id.0,
5601 repository_id: id.to_proto(),
5602 askpass_id,
5603 remote: fetch_options.to_proto(),
5604 })
5605 .await?;
5606
5607 Ok(RemoteCommandOutput {
5608 stdout: response.stdout,
5609 stderr: response.stderr,
5610 })
5611 }
5612 }
5613 })
5614 }
5615
5616 pub fn push(
5617 &mut self,
5618 branch: SharedString,
5619 remote_branch: SharedString,
5620 remote: SharedString,
5621 options: Option<PushOptions>,
5622 askpass: AskPassDelegate,
5623 cx: &mut Context<Self>,
5624 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5625 let askpass_delegates = self.askpass_delegates.clone();
5626 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5627 let id = self.id;
5628
5629 let args = options
5630 .map(|option| match option {
5631 PushOptions::SetUpstream => " --set-upstream",
5632 PushOptions::Force => " --force-with-lease",
5633 })
5634 .unwrap_or("");
5635
5636 let updates_tx = self
5637 .git_store()
5638 .and_then(|git_store| match &git_store.read(cx).state {
5639 GitStoreState::Local { downstream, .. } => downstream
5640 .as_ref()
5641 .map(|downstream| downstream.updates_tx.clone()),
5642 _ => None,
5643 });
5644
5645 let this = cx.weak_entity();
5646 self.send_job(
5647 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5648 move |git_repo, mut cx| async move {
5649 match git_repo {
5650 RepositoryState::Local(LocalRepositoryState {
5651 backend,
5652 environment,
5653 ..
5654 }) => {
5655 let result = backend
5656 .push(
5657 branch.to_string(),
5658 remote_branch.to_string(),
5659 remote.to_string(),
5660 options,
5661 askpass,
5662 environment.clone(),
5663 cx.clone(),
5664 )
5665 .await;
5666 // TODO would be nice to not have to do this manually
5667 if result.is_ok() {
5668 let branches = backend.branches().await?;
5669 let branch = branches.into_iter().find(|branch| branch.is_head);
5670 log::info!("head branch after scan is {branch:?}");
5671 let snapshot = this.update(&mut cx, |this, cx| {
5672 this.snapshot.branch = branch;
5673 cx.emit(RepositoryEvent::HeadChanged);
5674 this.snapshot.clone()
5675 })?;
5676 if let Some(updates_tx) = updates_tx {
5677 updates_tx
5678 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5679 .ok();
5680 }
5681 }
5682 result
5683 }
5684 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5685 askpass_delegates.lock().insert(askpass_id, askpass);
5686 let _defer = util::defer(|| {
5687 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5688 debug_assert!(askpass_delegate.is_some());
5689 });
5690 let response = client
5691 .request(proto::Push {
5692 project_id: project_id.0,
5693 repository_id: id.to_proto(),
5694 askpass_id,
5695 branch_name: branch.to_string(),
5696 remote_branch_name: remote_branch.to_string(),
5697 remote_name: remote.to_string(),
5698 options: options.map(|options| match options {
5699 PushOptions::Force => proto::push::PushOptions::Force,
5700 PushOptions::SetUpstream => {
5701 proto::push::PushOptions::SetUpstream
5702 }
5703 }
5704 as i32),
5705 })
5706 .await?;
5707
5708 Ok(RemoteCommandOutput {
5709 stdout: response.stdout,
5710 stderr: response.stderr,
5711 })
5712 }
5713 }
5714 },
5715 )
5716 }
5717
5718 pub fn pull(
5719 &mut self,
5720 branch: Option<SharedString>,
5721 remote: SharedString,
5722 rebase: bool,
5723 askpass: AskPassDelegate,
5724 _cx: &mut App,
5725 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5726 let askpass_delegates = self.askpass_delegates.clone();
5727 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5728 let id = self.id;
5729
5730 let mut status = "git pull".to_string();
5731 if rebase {
5732 status.push_str(" --rebase");
5733 }
5734 status.push_str(&format!(" {}", remote));
5735 if let Some(b) = &branch {
5736 status.push_str(&format!(" {}", b));
5737 }
5738
5739 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5740 match git_repo {
5741 RepositoryState::Local(LocalRepositoryState {
5742 backend,
5743 environment,
5744 ..
5745 }) => {
5746 backend
5747 .pull(
5748 branch.as_ref().map(|b| b.to_string()),
5749 remote.to_string(),
5750 rebase,
5751 askpass,
5752 environment.clone(),
5753 cx,
5754 )
5755 .await
5756 }
5757 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5758 askpass_delegates.lock().insert(askpass_id, askpass);
5759 let _defer = util::defer(|| {
5760 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5761 debug_assert!(askpass_delegate.is_some());
5762 });
5763 let response = client
5764 .request(proto::Pull {
5765 project_id: project_id.0,
5766 repository_id: id.to_proto(),
5767 askpass_id,
5768 rebase,
5769 branch_name: branch.as_ref().map(|b| b.to_string()),
5770 remote_name: remote.to_string(),
5771 })
5772 .await?;
5773
5774 Ok(RemoteCommandOutput {
5775 stdout: response.stdout,
5776 stderr: response.stderr,
5777 })
5778 }
5779 }
5780 })
5781 }
5782
5783 fn spawn_set_index_text_job(
5784 &mut self,
5785 path: RepoPath,
5786 content: Option<String>,
5787 hunk_staging_operation_count: Option<usize>,
5788 cx: &mut Context<Self>,
5789 ) -> oneshot::Receiver<anyhow::Result<()>> {
5790 let id = self.id;
5791 let this = cx.weak_entity();
5792 let git_store = self.git_store.clone();
5793 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5794 self.send_keyed_job(
5795 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5796 None,
5797 move |git_repo, mut cx| async move {
5798 log::debug!(
5799 "start updating index text for buffer {}",
5800 path.as_unix_str()
5801 );
5802
5803 match git_repo {
5804 RepositoryState::Local(LocalRepositoryState {
5805 fs,
5806 backend,
5807 environment,
5808 ..
5809 }) => {
5810 let executable = match fs.metadata(&abs_path).await {
5811 Ok(Some(meta)) => meta.is_executable,
5812 Ok(None) => false,
5813 Err(_err) => false,
5814 };
5815 backend
5816 .set_index_text(path.clone(), content, environment.clone(), executable)
5817 .await?;
5818 }
5819 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5820 client
5821 .request(proto::SetIndexText {
5822 project_id: project_id.0,
5823 repository_id: id.to_proto(),
5824 path: path.to_proto(),
5825 text: content,
5826 })
5827 .await?;
5828 }
5829 }
5830 log::debug!(
5831 "finish updating index text for buffer {}",
5832 path.as_unix_str()
5833 );
5834
5835 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5836 let project_path = this
5837 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5838 .ok()
5839 .flatten();
5840 git_store
5841 .update(&mut cx, |git_store, cx| {
5842 let buffer_id = git_store
5843 .buffer_store
5844 .read(cx)
5845 .get_by_path(&project_path?)?
5846 .read(cx)
5847 .remote_id();
5848 let diff_state = git_store.diffs.get(&buffer_id)?;
5849 diff_state.update(cx, |diff_state, _| {
5850 diff_state.hunk_staging_operation_count_as_of_write =
5851 hunk_staging_operation_count;
5852 });
5853 Some(())
5854 })
5855 .context("Git store dropped")?;
5856 }
5857 Ok(())
5858 },
5859 )
5860 }
5861
5862 pub fn create_remote(
5863 &mut self,
5864 remote_name: String,
5865 remote_url: String,
5866 ) -> oneshot::Receiver<Result<()>> {
5867 let id = self.id;
5868 self.send_job(
5869 Some(format!("git remote add {remote_name} {remote_url}").into()),
5870 move |repo, _cx| async move {
5871 match repo {
5872 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5873 backend.create_remote(remote_name, remote_url).await
5874 }
5875 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5876 client
5877 .request(proto::GitCreateRemote {
5878 project_id: project_id.0,
5879 repository_id: id.to_proto(),
5880 remote_name,
5881 remote_url,
5882 })
5883 .await?;
5884
5885 Ok(())
5886 }
5887 }
5888 },
5889 )
5890 }
5891
5892 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5893 let id = self.id;
5894 self.send_job(
5895 Some(format!("git remove remote {remote_name}").into()),
5896 move |repo, _cx| async move {
5897 match repo {
5898 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5899 backend.remove_remote(remote_name).await
5900 }
5901 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5902 client
5903 .request(proto::GitRemoveRemote {
5904 project_id: project_id.0,
5905 repository_id: id.to_proto(),
5906 remote_name,
5907 })
5908 .await?;
5909
5910 Ok(())
5911 }
5912 }
5913 },
5914 )
5915 }
5916
5917 pub fn get_remotes(
5918 &mut self,
5919 branch_name: Option<String>,
5920 is_push: bool,
5921 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5922 let id = self.id;
5923 self.send_job(None, move |repo, _cx| async move {
5924 match repo {
5925 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5926 let remote = if let Some(branch_name) = branch_name {
5927 if is_push {
5928 backend.get_push_remote(branch_name).await?
5929 } else {
5930 backend.get_branch_remote(branch_name).await?
5931 }
5932 } else {
5933 None
5934 };
5935
5936 match remote {
5937 Some(remote) => Ok(vec![remote]),
5938 None => backend.get_all_remotes().await,
5939 }
5940 }
5941 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5942 let response = client
5943 .request(proto::GetRemotes {
5944 project_id: project_id.0,
5945 repository_id: id.to_proto(),
5946 branch_name,
5947 is_push,
5948 })
5949 .await?;
5950
5951 let remotes = response
5952 .remotes
5953 .into_iter()
5954 .map(|remotes| Remote {
5955 name: remotes.name.into(),
5956 })
5957 .collect();
5958
5959 Ok(remotes)
5960 }
5961 }
5962 })
5963 }
5964
5965 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5966 let id = self.id;
5967 self.send_job(None, move |repo, _| async move {
5968 match repo {
5969 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5970 backend.branches().await
5971 }
5972 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5973 let response = client
5974 .request(proto::GitGetBranches {
5975 project_id: project_id.0,
5976 repository_id: id.to_proto(),
5977 })
5978 .await?;
5979
5980 let branches = response
5981 .branches
5982 .into_iter()
5983 .map(|branch| proto_to_branch(&branch))
5984 .collect();
5985
5986 Ok(branches)
5987 }
5988 }
5989 })
5990 }
5991
5992 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5993 /// returns the pathed for the linked worktree.
5994 ///
5995 /// Returns None if this is the main checkout.
5996 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5997 if self.work_directory_abs_path != self.original_repo_abs_path {
5998 Some(&self.work_directory_abs_path)
5999 } else {
6000 None
6001 }
6002 }
6003
6004 pub fn path_for_new_linked_worktree(
6005 &self,
6006 branch_name: &str,
6007 worktree_directory_setting: &str,
6008 ) -> Result<PathBuf> {
6009 let original_repo = self.original_repo_abs_path.clone();
6010 let project_name = original_repo
6011 .file_name()
6012 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
6013 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
6014 Ok(directory.join(branch_name).join(project_name))
6015 }
6016
6017 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
6018 let id = self.id;
6019 self.send_job(None, move |repo, _| async move {
6020 match repo {
6021 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6022 backend.worktrees().await
6023 }
6024 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6025 let response = client
6026 .request(proto::GitGetWorktrees {
6027 project_id: project_id.0,
6028 repository_id: id.to_proto(),
6029 })
6030 .await?;
6031
6032 let worktrees = response
6033 .worktrees
6034 .into_iter()
6035 .map(|worktree| proto_to_worktree(&worktree))
6036 .collect();
6037
6038 Ok(worktrees)
6039 }
6040 }
6041 })
6042 }
6043
6044 pub fn create_worktree(
6045 &mut self,
6046 target: CreateWorktreeTarget,
6047 path: PathBuf,
6048 ) -> oneshot::Receiver<Result<()>> {
6049 let id = self.id;
6050 let job_description = match target.branch_name() {
6051 Some(branch_name) => format!("git worktree add: {branch_name}"),
6052 None => "git worktree add (detached)".to_string(),
6053 };
6054 self.send_job(Some(job_description.into()), move |repo, _cx| async move {
6055 match repo {
6056 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6057 backend.create_worktree(target, path).await
6058 }
6059 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6060 let (name, commit, use_existing_branch) = match target {
6061 CreateWorktreeTarget::ExistingBranch { branch_name } => {
6062 (Some(branch_name), None, true)
6063 }
6064 CreateWorktreeTarget::NewBranch {
6065 branch_name,
6066 base_sha,
6067 } => (Some(branch_name), base_sha, false),
6068 CreateWorktreeTarget::Detached { base_sha } => (None, base_sha, false),
6069 };
6070
6071 client
6072 .request(proto::GitCreateWorktree {
6073 project_id: project_id.0,
6074 repository_id: id.to_proto(),
6075 name: name.unwrap_or_default(),
6076 directory: path.to_string_lossy().to_string(),
6077 commit,
6078 use_existing_branch,
6079 })
6080 .await?;
6081
6082 Ok(())
6083 }
6084 }
6085 })
6086 }
6087
6088 pub fn create_worktree_detached(
6089 &mut self,
6090 path: PathBuf,
6091 commit: String,
6092 ) -> oneshot::Receiver<Result<()>> {
6093 self.create_worktree(
6094 CreateWorktreeTarget::Detached {
6095 base_sha: Some(commit),
6096 },
6097 path,
6098 )
6099 }
6100
6101 pub fn checkout_branch_in_worktree(
6102 &mut self,
6103 branch_name: String,
6104 worktree_path: PathBuf,
6105 create: bool,
6106 ) -> oneshot::Receiver<Result<()>> {
6107 let description = if create {
6108 format!("git checkout -b {branch_name}")
6109 } else {
6110 format!("git checkout {branch_name}")
6111 };
6112 self.send_job(Some(description.into()), move |repo, _cx| async move {
6113 match repo {
6114 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6115 backend
6116 .checkout_branch_in_worktree(branch_name, worktree_path, create)
6117 .await
6118 }
6119 RepositoryState::Remote(_) => {
6120 log::warn!("checkout_branch_in_worktree not supported for remote repositories");
6121 Ok(())
6122 }
6123 }
6124 })
6125 }
6126
6127 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6128 let id = self.id;
6129 self.send_job(None, move |repo, _cx| async move {
6130 match repo {
6131 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6132 Ok(backend.head_sha().await)
6133 }
6134 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6135 let response = client
6136 .request(proto::GitGetHeadSha {
6137 project_id: project_id.0,
6138 repository_id: id.to_proto(),
6139 })
6140 .await?;
6141
6142 Ok(response.sha)
6143 }
6144 }
6145 })
6146 }
6147
6148 pub fn update_ref(
6149 &mut self,
6150 ref_name: String,
6151 commit: String,
6152 ) -> oneshot::Receiver<Result<()>> {
6153 self.send_job(None, move |repo, _cx| async move {
6154 match repo {
6155 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6156 backend.update_ref(ref_name, commit).await
6157 }
6158 RepositoryState::Remote(_) => {
6159 anyhow::bail!("update_ref is not supported for remote repositories")
6160 }
6161 }
6162 })
6163 }
6164
6165 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6166 self.send_job(None, move |repo, _cx| async move {
6167 match repo {
6168 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6169 backend.delete_ref(ref_name).await
6170 }
6171 RepositoryState::Remote(_) => {
6172 anyhow::bail!("delete_ref is not supported for remote repositories")
6173 }
6174 }
6175 })
6176 }
6177
6178 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6179 self.send_job(None, move |repo, _cx| async move {
6180 match repo {
6181 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6182 backend.repair_worktrees().await
6183 }
6184 RepositoryState::Remote(_) => {
6185 anyhow::bail!("repair_worktrees is not supported for remote repositories")
6186 }
6187 }
6188 })
6189 }
6190
6191 pub fn create_archive_checkpoint(&mut self) -> oneshot::Receiver<Result<(String, String)>> {
6192 self.send_job(None, move |repo, _cx| async move {
6193 match repo {
6194 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6195 backend.create_archive_checkpoint().await
6196 }
6197 RepositoryState::Remote(_) => {
6198 anyhow::bail!(
6199 "create_archive_checkpoint is not supported for remote repositories"
6200 )
6201 }
6202 }
6203 })
6204 }
6205
6206 pub fn restore_archive_checkpoint(
6207 &mut self,
6208 staged_sha: String,
6209 unstaged_sha: String,
6210 ) -> oneshot::Receiver<Result<()>> {
6211 self.send_job(None, move |repo, _cx| async move {
6212 match repo {
6213 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6214 backend
6215 .restore_archive_checkpoint(staged_sha, unstaged_sha)
6216 .await
6217 }
6218 RepositoryState::Remote(_) => {
6219 anyhow::bail!(
6220 "restore_archive_checkpoint is not supported for remote repositories"
6221 )
6222 }
6223 }
6224 })
6225 }
6226
6227 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6228 let id = self.id;
6229 self.send_job(
6230 Some(format!("git worktree remove: {}", path.display()).into()),
6231 move |repo, _cx| async move {
6232 match repo {
6233 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6234 backend.remove_worktree(path, force).await
6235 }
6236 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6237 client
6238 .request(proto::GitRemoveWorktree {
6239 project_id: project_id.0,
6240 repository_id: id.to_proto(),
6241 path: path.to_string_lossy().to_string(),
6242 force,
6243 })
6244 .await?;
6245
6246 Ok(())
6247 }
6248 }
6249 },
6250 )
6251 }
6252
6253 pub fn rename_worktree(
6254 &mut self,
6255 old_path: PathBuf,
6256 new_path: PathBuf,
6257 ) -> oneshot::Receiver<Result<()>> {
6258 let id = self.id;
6259 self.send_job(
6260 Some(format!("git worktree move: {}", old_path.display()).into()),
6261 move |repo, _cx| async move {
6262 match repo {
6263 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6264 backend.rename_worktree(old_path, new_path).await
6265 }
6266 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6267 client
6268 .request(proto::GitRenameWorktree {
6269 project_id: project_id.0,
6270 repository_id: id.to_proto(),
6271 old_path: old_path.to_string_lossy().to_string(),
6272 new_path: new_path.to_string_lossy().to_string(),
6273 })
6274 .await?;
6275
6276 Ok(())
6277 }
6278 }
6279 },
6280 )
6281 }
6282
6283 pub fn default_branch(
6284 &mut self,
6285 include_remote_name: bool,
6286 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6287 let id = self.id;
6288 self.send_job(None, move |repo, _| async move {
6289 match repo {
6290 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6291 backend.default_branch(include_remote_name).await
6292 }
6293 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6294 let response = client
6295 .request(proto::GetDefaultBranch {
6296 project_id: project_id.0,
6297 repository_id: id.to_proto(),
6298 })
6299 .await?;
6300
6301 anyhow::Ok(response.branch.map(SharedString::from))
6302 }
6303 }
6304 })
6305 }
6306
6307 pub fn diff_tree(
6308 &mut self,
6309 diff_type: DiffTreeType,
6310 _cx: &App,
6311 ) -> oneshot::Receiver<Result<TreeDiff>> {
6312 let repository_id = self.snapshot.id;
6313 self.send_job(None, move |repo, _cx| async move {
6314 match repo {
6315 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6316 backend.diff_tree(diff_type).await
6317 }
6318 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6319 let response = client
6320 .request(proto::GetTreeDiff {
6321 project_id: project_id.0,
6322 repository_id: repository_id.0,
6323 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6324 base: diff_type.base().to_string(),
6325 head: diff_type.head().to_string(),
6326 })
6327 .await?;
6328
6329 let entries = response
6330 .entries
6331 .into_iter()
6332 .filter_map(|entry| {
6333 let status = match entry.status() {
6334 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6335 proto::tree_diff_status::Status::Modified => {
6336 TreeDiffStatus::Modified {
6337 old: git::Oid::from_str(
6338 &entry.oid.context("missing oid").log_err()?,
6339 )
6340 .log_err()?,
6341 }
6342 }
6343 proto::tree_diff_status::Status::Deleted => {
6344 TreeDiffStatus::Deleted {
6345 old: git::Oid::from_str(
6346 &entry.oid.context("missing oid").log_err()?,
6347 )
6348 .log_err()?,
6349 }
6350 }
6351 };
6352 Some((
6353 RepoPath::from_rel_path(
6354 &RelPath::from_proto(&entry.path).log_err()?,
6355 ),
6356 status,
6357 ))
6358 })
6359 .collect();
6360
6361 Ok(TreeDiff { entries })
6362 }
6363 }
6364 })
6365 }
6366
6367 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6368 let id = self.id;
6369 self.send_job(None, move |repo, _cx| async move {
6370 match repo {
6371 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6372 backend.diff(diff_type).await
6373 }
6374 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6375 let (proto_diff_type, merge_base_ref) = match &diff_type {
6376 DiffType::HeadToIndex => {
6377 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6378 }
6379 DiffType::HeadToWorktree => {
6380 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6381 }
6382 DiffType::MergeBase { base_ref } => (
6383 proto::git_diff::DiffType::MergeBase.into(),
6384 Some(base_ref.to_string()),
6385 ),
6386 };
6387 let response = client
6388 .request(proto::GitDiff {
6389 project_id: project_id.0,
6390 repository_id: id.to_proto(),
6391 diff_type: proto_diff_type,
6392 merge_base_ref,
6393 })
6394 .await?;
6395
6396 Ok(response.diff)
6397 }
6398 }
6399 })
6400 }
6401
6402 pub fn create_branch(
6403 &mut self,
6404 branch_name: String,
6405 base_branch: Option<String>,
6406 ) -> oneshot::Receiver<Result<()>> {
6407 let id = self.id;
6408 let status_msg = if let Some(ref base) = base_branch {
6409 format!("git switch -c {branch_name} {base}").into()
6410 } else {
6411 format!("git switch -c {branch_name}").into()
6412 };
6413 self.send_job(Some(status_msg), move |repo, _cx| async move {
6414 match repo {
6415 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6416 backend.create_branch(branch_name, base_branch).await
6417 }
6418 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6419 client
6420 .request(proto::GitCreateBranch {
6421 project_id: project_id.0,
6422 repository_id: id.to_proto(),
6423 branch_name,
6424 })
6425 .await?;
6426
6427 Ok(())
6428 }
6429 }
6430 })
6431 }
6432
6433 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6434 let id = self.id;
6435 self.send_job(
6436 Some(format!("git switch {branch_name}").into()),
6437 move |repo, _cx| async move {
6438 match repo {
6439 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6440 backend.change_branch(branch_name).await
6441 }
6442 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6443 client
6444 .request(proto::GitChangeBranch {
6445 project_id: project_id.0,
6446 repository_id: id.to_proto(),
6447 branch_name,
6448 })
6449 .await?;
6450
6451 Ok(())
6452 }
6453 }
6454 },
6455 )
6456 }
6457
6458 pub fn delete_branch(
6459 &mut self,
6460 is_remote: bool,
6461 branch_name: String,
6462 ) -> oneshot::Receiver<Result<()>> {
6463 let id = self.id;
6464 self.send_job(
6465 Some(
6466 format!(
6467 "git branch {} {}",
6468 if is_remote { "-dr" } else { "-d" },
6469 branch_name
6470 )
6471 .into(),
6472 ),
6473 move |repo, _cx| async move {
6474 match repo {
6475 RepositoryState::Local(state) => {
6476 state.backend.delete_branch(is_remote, branch_name).await
6477 }
6478 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6479 client
6480 .request(proto::GitDeleteBranch {
6481 project_id: project_id.0,
6482 repository_id: id.to_proto(),
6483 is_remote,
6484 branch_name,
6485 })
6486 .await?;
6487
6488 Ok(())
6489 }
6490 }
6491 },
6492 )
6493 }
6494
6495 pub fn rename_branch(
6496 &mut self,
6497 branch: String,
6498 new_name: String,
6499 ) -> oneshot::Receiver<Result<()>> {
6500 let id = self.id;
6501 self.send_job(
6502 Some(format!("git branch -m {branch} {new_name}").into()),
6503 move |repo, _cx| async move {
6504 match repo {
6505 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6506 backend.rename_branch(branch, new_name).await
6507 }
6508 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6509 client
6510 .request(proto::GitRenameBranch {
6511 project_id: project_id.0,
6512 repository_id: id.to_proto(),
6513 branch,
6514 new_name,
6515 })
6516 .await?;
6517
6518 Ok(())
6519 }
6520 }
6521 },
6522 )
6523 }
6524
6525 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6526 let id = self.id;
6527 self.send_job(None, move |repo, _cx| async move {
6528 match repo {
6529 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6530 backend.check_for_pushed_commit().await
6531 }
6532 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6533 let response = client
6534 .request(proto::CheckForPushedCommits {
6535 project_id: project_id.0,
6536 repository_id: id.to_proto(),
6537 })
6538 .await?;
6539
6540 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6541
6542 Ok(branches)
6543 }
6544 }
6545 })
6546 }
6547
6548 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6549 let id = self.id;
6550 self.send_job(None, move |repo, _cx| async move {
6551 match repo {
6552 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6553 backend.checkpoint().await
6554 }
6555 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6556 let response = client
6557 .request(proto::GitCreateCheckpoint {
6558 project_id: project_id.0,
6559 repository_id: id.to_proto(),
6560 })
6561 .await?;
6562
6563 Ok(GitRepositoryCheckpoint {
6564 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6565 })
6566 }
6567 }
6568 })
6569 }
6570
6571 pub fn restore_checkpoint(
6572 &mut self,
6573 checkpoint: GitRepositoryCheckpoint,
6574 ) -> oneshot::Receiver<Result<()>> {
6575 let id = self.id;
6576 self.send_job(None, move |repo, _cx| async move {
6577 match repo {
6578 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6579 backend.restore_checkpoint(checkpoint).await
6580 }
6581 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6582 client
6583 .request(proto::GitRestoreCheckpoint {
6584 project_id: project_id.0,
6585 repository_id: id.to_proto(),
6586 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6587 })
6588 .await?;
6589 Ok(())
6590 }
6591 }
6592 })
6593 }
6594
6595 pub(crate) fn apply_remote_update(
6596 &mut self,
6597 update: proto::UpdateRepository,
6598 cx: &mut Context<Self>,
6599 ) -> Result<()> {
6600 if let Some(main_path) = &update.original_repo_abs_path {
6601 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6602 }
6603
6604 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6605 let new_head_commit = update
6606 .head_commit_details
6607 .as_ref()
6608 .map(proto_to_commit_details);
6609 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6610 cx.emit(RepositoryEvent::HeadChanged)
6611 }
6612 self.snapshot.branch = new_branch;
6613 self.snapshot.head_commit = new_head_commit;
6614
6615 // We don't store any merge head state for downstream projects; the upstream
6616 // will track it and we will just get the updated conflicts
6617 let new_merge_heads = TreeMap::from_ordered_entries(
6618 update
6619 .current_merge_conflicts
6620 .into_iter()
6621 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6622 );
6623 let conflicts_changed =
6624 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6625 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6626 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6627 let new_stash_entries = GitStash {
6628 entries: update
6629 .stash_entries
6630 .iter()
6631 .filter_map(|entry| proto_to_stash(entry).ok())
6632 .collect(),
6633 };
6634 if self.snapshot.stash_entries != new_stash_entries {
6635 cx.emit(RepositoryEvent::StashEntriesChanged)
6636 }
6637 self.snapshot.stash_entries = new_stash_entries;
6638 let new_linked_worktrees: Arc<[GitWorktree]> = update
6639 .linked_worktrees
6640 .iter()
6641 .map(proto_to_worktree)
6642 .collect();
6643 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6644 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6645 }
6646 self.snapshot.linked_worktrees = new_linked_worktrees;
6647 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6648 self.snapshot.remote_origin_url = update.remote_origin_url;
6649
6650 let edits = update
6651 .removed_statuses
6652 .into_iter()
6653 .filter_map(|path| {
6654 Some(sum_tree::Edit::Remove(PathKey(
6655 RelPath::from_proto(&path).log_err()?,
6656 )))
6657 })
6658 .chain(
6659 update
6660 .updated_statuses
6661 .into_iter()
6662 .filter_map(|updated_status| {
6663 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6664 }),
6665 )
6666 .collect::<Vec<_>>();
6667 if conflicts_changed || !edits.is_empty() {
6668 cx.emit(RepositoryEvent::StatusesChanged);
6669 }
6670 self.snapshot.statuses_by_path.edit(edits, ());
6671
6672 if update.is_last_update {
6673 self.snapshot.scan_id = update.scan_id;
6674 }
6675 self.clear_pending_ops(cx);
6676 Ok(())
6677 }
6678
6679 pub fn compare_checkpoints(
6680 &mut self,
6681 left: GitRepositoryCheckpoint,
6682 right: GitRepositoryCheckpoint,
6683 ) -> oneshot::Receiver<Result<bool>> {
6684 let id = self.id;
6685 self.send_job(None, move |repo, _cx| async move {
6686 match repo {
6687 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6688 backend.compare_checkpoints(left, right).await
6689 }
6690 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6691 let response = client
6692 .request(proto::GitCompareCheckpoints {
6693 project_id: project_id.0,
6694 repository_id: id.to_proto(),
6695 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6696 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6697 })
6698 .await?;
6699 Ok(response.equal)
6700 }
6701 }
6702 })
6703 }
6704
6705 pub fn diff_checkpoints(
6706 &mut self,
6707 base_checkpoint: GitRepositoryCheckpoint,
6708 target_checkpoint: GitRepositoryCheckpoint,
6709 ) -> oneshot::Receiver<Result<String>> {
6710 let id = self.id;
6711 self.send_job(None, move |repo, _cx| async move {
6712 match repo {
6713 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6714 backend
6715 .diff_checkpoints(base_checkpoint, target_checkpoint)
6716 .await
6717 }
6718 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6719 let response = client
6720 .request(proto::GitDiffCheckpoints {
6721 project_id: project_id.0,
6722 repository_id: id.to_proto(),
6723 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6724 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6725 })
6726 .await?;
6727 Ok(response.diff)
6728 }
6729 }
6730 })
6731 }
6732
6733 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6734 let updated = SumTree::from_iter(
6735 self.pending_ops.iter().filter_map(|ops| {
6736 let inner_ops: Vec<PendingOp> =
6737 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6738 if inner_ops.is_empty() {
6739 None
6740 } else {
6741 Some(PendingOps {
6742 repo_path: ops.repo_path.clone(),
6743 ops: inner_ops,
6744 })
6745 }
6746 }),
6747 (),
6748 );
6749
6750 if updated != self.pending_ops {
6751 cx.emit(RepositoryEvent::PendingOpsChanged {
6752 pending_ops: self.pending_ops.clone(),
6753 })
6754 }
6755
6756 self.pending_ops = updated;
6757 }
6758
6759 fn schedule_scan(
6760 &mut self,
6761 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6762 cx: &mut Context<Self>,
6763 ) {
6764 let this = cx.weak_entity();
6765 let _ = self.send_keyed_job(
6766 Some(GitJobKey::ReloadGitState),
6767 None,
6768 |state, mut cx| async move {
6769 log::debug!("run scheduled git status scan");
6770
6771 let Some(this) = this.upgrade() else {
6772 return Ok(());
6773 };
6774 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6775 bail!("not a local repository")
6776 };
6777 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6778 this.update(&mut cx, |this, cx| {
6779 this.clear_pending_ops(cx);
6780 });
6781 if let Some(updates_tx) = updates_tx {
6782 updates_tx
6783 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6784 .ok();
6785 }
6786 Ok(())
6787 },
6788 );
6789 }
6790
6791 fn spawn_local_git_worker(
6792 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6793 cx: &mut Context<Self>,
6794 ) -> mpsc::UnboundedSender<GitJob> {
6795 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6796
6797 cx.spawn(async move |_, cx| {
6798 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6799 if let Some(git_hosting_provider_registry) =
6800 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6801 {
6802 git_hosting_providers::register_additional_providers(
6803 git_hosting_provider_registry,
6804 state.backend.clone(),
6805 )
6806 .await;
6807 }
6808 let state = RepositoryState::Local(state);
6809 let mut jobs = VecDeque::new();
6810 loop {
6811 while let Ok(next_job) = job_rx.try_recv() {
6812 jobs.push_back(next_job);
6813 }
6814
6815 if let Some(job) = jobs.pop_front() {
6816 if let Some(current_key) = &job.key
6817 && jobs
6818 .iter()
6819 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6820 {
6821 continue;
6822 }
6823 (job.job)(state.clone(), cx).await;
6824 } else if let Some(job) = job_rx.next().await {
6825 jobs.push_back(job);
6826 } else {
6827 break;
6828 }
6829 }
6830 anyhow::Ok(())
6831 })
6832 .detach_and_log_err(cx);
6833
6834 job_tx
6835 }
6836
6837 fn spawn_remote_git_worker(
6838 state: RemoteRepositoryState,
6839 cx: &mut Context<Self>,
6840 ) -> mpsc::UnboundedSender<GitJob> {
6841 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6842
6843 cx.spawn(async move |_, cx| {
6844 let state = RepositoryState::Remote(state);
6845 let mut jobs = VecDeque::new();
6846 loop {
6847 while let Ok(next_job) = job_rx.try_recv() {
6848 jobs.push_back(next_job);
6849 }
6850
6851 if let Some(job) = jobs.pop_front() {
6852 if let Some(current_key) = &job.key
6853 && jobs
6854 .iter()
6855 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6856 {
6857 continue;
6858 }
6859 (job.job)(state.clone(), cx).await;
6860 } else if let Some(job) = job_rx.next().await {
6861 jobs.push_back(job);
6862 } else {
6863 break;
6864 }
6865 }
6866 anyhow::Ok(())
6867 })
6868 .detach_and_log_err(cx);
6869
6870 job_tx
6871 }
6872
6873 fn load_staged_text(
6874 &mut self,
6875 buffer_id: BufferId,
6876 repo_path: RepoPath,
6877 cx: &App,
6878 ) -> Task<Result<Option<String>>> {
6879 let rx = self.send_job(None, move |state, _| async move {
6880 match state {
6881 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6882 anyhow::Ok(backend.load_index_text(repo_path).await)
6883 }
6884 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6885 let response = client
6886 .request(proto::OpenUnstagedDiff {
6887 project_id: project_id.to_proto(),
6888 buffer_id: buffer_id.to_proto(),
6889 })
6890 .await?;
6891 Ok(response.staged_text)
6892 }
6893 }
6894 });
6895 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6896 }
6897
6898 fn load_committed_text(
6899 &mut self,
6900 buffer_id: BufferId,
6901 repo_path: RepoPath,
6902 cx: &App,
6903 ) -> Task<Result<DiffBasesChange>> {
6904 let rx = self.send_job(None, move |state, _| async move {
6905 match state {
6906 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6907 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6908 let staged_text = backend.load_index_text(repo_path).await;
6909 let diff_bases_change = if committed_text == staged_text {
6910 DiffBasesChange::SetBoth(committed_text)
6911 } else {
6912 DiffBasesChange::SetEach {
6913 index: staged_text,
6914 head: committed_text,
6915 }
6916 };
6917 anyhow::Ok(diff_bases_change)
6918 }
6919 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6920 use proto::open_uncommitted_diff_response::Mode;
6921
6922 let response = client
6923 .request(proto::OpenUncommittedDiff {
6924 project_id: project_id.to_proto(),
6925 buffer_id: buffer_id.to_proto(),
6926 })
6927 .await?;
6928 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6929 let bases = match mode {
6930 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6931 Mode::IndexAndHead => DiffBasesChange::SetEach {
6932 head: response.committed_text,
6933 index: response.staged_text,
6934 },
6935 };
6936 Ok(bases)
6937 }
6938 }
6939 });
6940
6941 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6942 }
6943
6944 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6945 let repository_id = self.snapshot.id;
6946 let rx = self.send_job(None, move |state, _| async move {
6947 match state {
6948 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6949 backend.load_blob_content(oid).await
6950 }
6951 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6952 let response = client
6953 .request(proto::GetBlobContent {
6954 project_id: project_id.to_proto(),
6955 repository_id: repository_id.0,
6956 oid: oid.to_string(),
6957 })
6958 .await?;
6959 Ok(response.content)
6960 }
6961 }
6962 });
6963 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6964 }
6965
6966 fn paths_changed(
6967 &mut self,
6968 paths: Vec<RepoPath>,
6969 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6970 cx: &mut Context<Self>,
6971 ) {
6972 if !paths.is_empty() {
6973 self.paths_needing_status_update.push(paths);
6974 }
6975
6976 let this = cx.weak_entity();
6977 let _ = self.send_keyed_job(
6978 Some(GitJobKey::RefreshStatuses),
6979 None,
6980 |state, mut cx| async move {
6981 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6982 (
6983 this.snapshot.clone(),
6984 mem::take(&mut this.paths_needing_status_update),
6985 )
6986 })?;
6987 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6988 bail!("not a local repository")
6989 };
6990
6991 if changed_paths.is_empty() {
6992 return Ok(());
6993 }
6994
6995 let has_head = prev_snapshot.head_commit.is_some();
6996
6997 let stash_entries = backend.stash_entries().await?;
6998 let changed_path_statuses = cx
6999 .background_spawn(async move {
7000 let mut changed_paths =
7001 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
7002 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
7003
7004 let status_task = backend.status(&changed_paths_vec);
7005 let diff_stat_future = if has_head {
7006 backend.diff_stat(&changed_paths_vec)
7007 } else {
7008 future::ready(Ok(status::GitDiffStat {
7009 entries: Arc::default(),
7010 }))
7011 .boxed()
7012 };
7013
7014 let (statuses, diff_stats) =
7015 futures::future::try_join(status_task, diff_stat_future).await?;
7016
7017 let diff_stats: HashMap<RepoPath, DiffStat> =
7018 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
7019
7020 let mut changed_path_statuses = Vec::new();
7021 let prev_statuses = prev_snapshot.statuses_by_path.clone();
7022 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7023
7024 for (repo_path, status) in &*statuses.entries {
7025 let current_diff_stat = diff_stats.get(repo_path).copied();
7026
7027 changed_paths.remove(repo_path);
7028 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
7029 && cursor.item().is_some_and(|entry| {
7030 entry.status == *status && entry.diff_stat == current_diff_stat
7031 })
7032 {
7033 continue;
7034 }
7035
7036 changed_path_statuses.push(Edit::Insert(StatusEntry {
7037 repo_path: repo_path.clone(),
7038 status: *status,
7039 diff_stat: current_diff_stat,
7040 }));
7041 }
7042 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7043 for path in changed_paths.into_iter() {
7044 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
7045 changed_path_statuses
7046 .push(Edit::Remove(PathKey(path.as_ref().clone())));
7047 }
7048 }
7049 anyhow::Ok(changed_path_statuses)
7050 })
7051 .await?;
7052
7053 this.update(&mut cx, |this, cx| {
7054 if this.snapshot.stash_entries != stash_entries {
7055 cx.emit(RepositoryEvent::StashEntriesChanged);
7056 this.snapshot.stash_entries = stash_entries;
7057 }
7058
7059 if !changed_path_statuses.is_empty() {
7060 cx.emit(RepositoryEvent::StatusesChanged);
7061 this.snapshot
7062 .statuses_by_path
7063 .edit(changed_path_statuses, ());
7064 this.snapshot.scan_id += 1;
7065 }
7066
7067 if let Some(updates_tx) = updates_tx {
7068 updates_tx
7069 .unbounded_send(DownstreamUpdate::UpdateRepository(
7070 this.snapshot.clone(),
7071 ))
7072 .ok();
7073 }
7074 })
7075 },
7076 );
7077 }
7078
7079 /// currently running git command and when it started
7080 pub fn current_job(&self) -> Option<JobInfo> {
7081 self.active_jobs.values().next().cloned()
7082 }
7083
7084 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7085 self.send_job(None, |_, _| async {})
7086 }
7087
7088 fn spawn_job_with_tracking<AsyncFn>(
7089 &mut self,
7090 paths: Vec<RepoPath>,
7091 git_status: pending_op::GitStatus,
7092 cx: &mut Context<Self>,
7093 f: AsyncFn,
7094 ) -> Task<Result<()>>
7095 where
7096 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7097 {
7098 let ids = self.new_pending_ops_for_paths(paths, git_status);
7099
7100 cx.spawn(async move |this, cx| {
7101 let (job_status, result) = match f(this.clone(), cx).await {
7102 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7103 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7104 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7105 };
7106
7107 this.update(cx, |this, _| {
7108 let mut edits = Vec::with_capacity(ids.len());
7109 for (id, entry) in ids {
7110 if let Some(mut ops) = this
7111 .pending_ops
7112 .get(&PathKey(entry.as_ref().clone()), ())
7113 .cloned()
7114 {
7115 if let Some(op) = ops.op_by_id_mut(id) {
7116 op.job_status = job_status;
7117 }
7118 edits.push(sum_tree::Edit::Insert(ops));
7119 }
7120 }
7121 this.pending_ops.edit(edits, ());
7122 })?;
7123
7124 result
7125 })
7126 }
7127
7128 fn new_pending_ops_for_paths(
7129 &mut self,
7130 paths: Vec<RepoPath>,
7131 git_status: pending_op::GitStatus,
7132 ) -> Vec<(PendingOpId, RepoPath)> {
7133 let mut edits = Vec::with_capacity(paths.len());
7134 let mut ids = Vec::with_capacity(paths.len());
7135 for path in paths {
7136 let mut ops = self
7137 .pending_ops
7138 .get(&PathKey(path.as_ref().clone()), ())
7139 .cloned()
7140 .unwrap_or_else(|| PendingOps::new(&path));
7141 let id = ops.max_id() + 1;
7142 ops.ops.push(PendingOp {
7143 id,
7144 git_status,
7145 job_status: pending_op::JobStatus::Running,
7146 });
7147 edits.push(sum_tree::Edit::Insert(ops));
7148 ids.push((id, path));
7149 }
7150 self.pending_ops.edit(edits, ());
7151 ids
7152 }
7153 pub fn default_remote_url(&self) -> Option<String> {
7154 self.remote_upstream_url
7155 .clone()
7156 .or(self.remote_origin_url.clone())
7157 }
7158}
7159
7160/// If `path` is a git linked worktree checkout, resolves it to the main
7161/// repository's working directory path. Returns `None` if `path` is a normal
7162/// repository, not a git repo, or if resolution fails.
7163///
7164/// Resolution works by:
7165/// 1. Reading the `.git` file to get the `gitdir:` pointer
7166/// 2. Following that to the worktree-specific git directory
7167/// 3. Reading the `commondir` file to find the shared `.git` directory
7168/// 4. Deriving the main repo's working directory from the common dir
7169pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7170 let dot_git = path.join(".git");
7171 let metadata = fs.metadata(&dot_git).await.ok()??;
7172 if metadata.is_dir {
7173 return None; // Normal repo, not a linked worktree
7174 }
7175 // It's a .git file — parse the gitdir: pointer
7176 let content = fs.load(&dot_git).await.ok()?;
7177 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7178 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7179 // Read commondir to find the main .git directory
7180 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7181 let common_dir = fs
7182 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7183 .await
7184 .ok()?;
7185 Some(git::repository::original_repo_path_from_common_dir(
7186 &common_dir,
7187 ))
7188}
7189
7190/// Validates that the resolved worktree directory is acceptable:
7191/// - The setting must not be an absolute path.
7192/// - The resolved path must be either a subdirectory of the working
7193/// directory or a subdirectory of its parent (i.e., a sibling).
7194///
7195/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7196pub fn worktrees_directory_for_repo(
7197 original_repo_abs_path: &Path,
7198 worktree_directory_setting: &str,
7199) -> Result<PathBuf> {
7200 // Check the original setting before trimming, since a path like "///"
7201 // is absolute but becomes "" after stripping trailing separators.
7202 // Also check for leading `/` or `\` explicitly, because on Windows
7203 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7204 // would slip through even though it's clearly not a relative path.
7205 if Path::new(worktree_directory_setting).is_absolute()
7206 || worktree_directory_setting.starts_with('/')
7207 || worktree_directory_setting.starts_with('\\')
7208 {
7209 anyhow::bail!(
7210 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7211 );
7212 }
7213
7214 if worktree_directory_setting.is_empty() {
7215 anyhow::bail!("git.worktree_directory must not be empty");
7216 }
7217
7218 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7219 if trimmed == ".." {
7220 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7221 }
7222
7223 let joined = original_repo_abs_path.join(trimmed);
7224 let resolved = util::normalize_path(&joined);
7225 let resolved = if resolved.starts_with(original_repo_abs_path) {
7226 resolved
7227 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7228 resolved.join(repo_dir_name)
7229 } else {
7230 resolved
7231 };
7232
7233 let parent = original_repo_abs_path
7234 .parent()
7235 .unwrap_or(original_repo_abs_path);
7236
7237 if !resolved.starts_with(parent) {
7238 anyhow::bail!(
7239 "git.worktree_directory resolved to {resolved:?}, which is outside \
7240 the project root and its parent directory. It must resolve to a \
7241 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7242 );
7243 }
7244
7245 Ok(resolved)
7246}
7247
7248/// Returns a short name for a linked worktree suitable for UI display
7249///
7250/// Uses the main worktree path to come up with a short name that disambiguates
7251/// the linked worktree from the main worktree.
7252pub fn linked_worktree_short_name(
7253 main_worktree_path: &Path,
7254 linked_worktree_path: &Path,
7255) -> Option<SharedString> {
7256 if main_worktree_path == linked_worktree_path {
7257 return None;
7258 }
7259
7260 let project_name = main_worktree_path.file_name()?.to_str()?;
7261 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7262 let name = if directory_name != project_name {
7263 directory_name.to_string()
7264 } else {
7265 linked_worktree_path
7266 .parent()?
7267 .file_name()?
7268 .to_str()?
7269 .to_string()
7270 };
7271 Some(name.into())
7272}
7273
7274fn get_permalink_in_rust_registry_src(
7275 provider_registry: Arc<GitHostingProviderRegistry>,
7276 path: PathBuf,
7277 selection: Range<u32>,
7278) -> Result<url::Url> {
7279 #[derive(Deserialize)]
7280 struct CargoVcsGit {
7281 sha1: String,
7282 }
7283
7284 #[derive(Deserialize)]
7285 struct CargoVcsInfo {
7286 git: CargoVcsGit,
7287 path_in_vcs: String,
7288 }
7289
7290 #[derive(Deserialize)]
7291 struct CargoPackage {
7292 repository: String,
7293 }
7294
7295 #[derive(Deserialize)]
7296 struct CargoToml {
7297 package: CargoPackage,
7298 }
7299
7300 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7301 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7302 Some((dir, json))
7303 }) else {
7304 bail!("No .cargo_vcs_info.json found in parent directories")
7305 };
7306 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7307 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7308 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7309 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7310 .context("parsing package.repository field of manifest")?;
7311 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7312 let permalink = provider.build_permalink(
7313 remote,
7314 BuildPermalinkParams::new(
7315 &cargo_vcs_info.git.sha1,
7316 &RepoPath::from_rel_path(
7317 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7318 ),
7319 Some(selection),
7320 ),
7321 );
7322 Ok(permalink)
7323}
7324
7325fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7326 let Some(blame) = blame else {
7327 return proto::BlameBufferResponse {
7328 blame_response: None,
7329 };
7330 };
7331
7332 let entries = blame
7333 .entries
7334 .into_iter()
7335 .map(|entry| proto::BlameEntry {
7336 sha: entry.sha.as_bytes().into(),
7337 start_line: entry.range.start,
7338 end_line: entry.range.end,
7339 original_line_number: entry.original_line_number,
7340 author: entry.author,
7341 author_mail: entry.author_mail,
7342 author_time: entry.author_time,
7343 author_tz: entry.author_tz,
7344 committer: entry.committer_name,
7345 committer_mail: entry.committer_email,
7346 committer_time: entry.committer_time,
7347 committer_tz: entry.committer_tz,
7348 summary: entry.summary,
7349 previous: entry.previous,
7350 filename: entry.filename,
7351 })
7352 .collect::<Vec<_>>();
7353
7354 let messages = blame
7355 .messages
7356 .into_iter()
7357 .map(|(oid, message)| proto::CommitMessage {
7358 oid: oid.as_bytes().into(),
7359 message,
7360 })
7361 .collect::<Vec<_>>();
7362
7363 proto::BlameBufferResponse {
7364 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7365 }
7366}
7367
7368fn deserialize_blame_buffer_response(
7369 response: proto::BlameBufferResponse,
7370) -> Option<git::blame::Blame> {
7371 let response = response.blame_response?;
7372 let entries = response
7373 .entries
7374 .into_iter()
7375 .filter_map(|entry| {
7376 Some(git::blame::BlameEntry {
7377 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7378 range: entry.start_line..entry.end_line,
7379 original_line_number: entry.original_line_number,
7380 committer_name: entry.committer,
7381 committer_time: entry.committer_time,
7382 committer_tz: entry.committer_tz,
7383 committer_email: entry.committer_mail,
7384 author: entry.author,
7385 author_mail: entry.author_mail,
7386 author_time: entry.author_time,
7387 author_tz: entry.author_tz,
7388 summary: entry.summary,
7389 previous: entry.previous,
7390 filename: entry.filename,
7391 })
7392 })
7393 .collect::<Vec<_>>();
7394
7395 let messages = response
7396 .messages
7397 .into_iter()
7398 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7399 .collect::<HashMap<_, _>>();
7400
7401 Some(Blame { entries, messages })
7402}
7403
7404fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7405 proto::Branch {
7406 is_head: branch.is_head,
7407 ref_name: branch.ref_name.to_string(),
7408 unix_timestamp: branch
7409 .most_recent_commit
7410 .as_ref()
7411 .map(|commit| commit.commit_timestamp as u64),
7412 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7413 ref_name: upstream.ref_name.to_string(),
7414 tracking: upstream
7415 .tracking
7416 .status()
7417 .map(|upstream| proto::UpstreamTracking {
7418 ahead: upstream.ahead as u64,
7419 behind: upstream.behind as u64,
7420 }),
7421 }),
7422 most_recent_commit: branch
7423 .most_recent_commit
7424 .as_ref()
7425 .map(|commit| proto::CommitSummary {
7426 sha: commit.sha.to_string(),
7427 subject: commit.subject.to_string(),
7428 commit_timestamp: commit.commit_timestamp,
7429 author_name: commit.author_name.to_string(),
7430 }),
7431 }
7432}
7433
7434fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7435 proto::Worktree {
7436 path: worktree.path.to_string_lossy().to_string(),
7437 ref_name: worktree
7438 .ref_name
7439 .as_ref()
7440 .map(|s| s.to_string())
7441 .unwrap_or_default(),
7442 sha: worktree.sha.to_string(),
7443 is_main: worktree.is_main,
7444 }
7445}
7446
7447fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7448 git::repository::Worktree {
7449 path: PathBuf::from(proto.path.clone()),
7450 ref_name: Some(SharedString::from(&proto.ref_name)),
7451 sha: proto.sha.clone().into(),
7452 is_main: proto.is_main,
7453 }
7454}
7455
7456fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7457 git::repository::Branch {
7458 is_head: proto.is_head,
7459 ref_name: proto.ref_name.clone().into(),
7460 upstream: proto
7461 .upstream
7462 .as_ref()
7463 .map(|upstream| git::repository::Upstream {
7464 ref_name: upstream.ref_name.to_string().into(),
7465 tracking: upstream
7466 .tracking
7467 .as_ref()
7468 .map(|tracking| {
7469 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7470 ahead: tracking.ahead as u32,
7471 behind: tracking.behind as u32,
7472 })
7473 })
7474 .unwrap_or(git::repository::UpstreamTracking::Gone),
7475 }),
7476 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7477 git::repository::CommitSummary {
7478 sha: commit.sha.to_string().into(),
7479 subject: commit.subject.to_string().into(),
7480 commit_timestamp: commit.commit_timestamp,
7481 author_name: commit.author_name.to_string().into(),
7482 has_parent: true,
7483 }
7484 }),
7485 }
7486}
7487
7488fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7489 proto::GitCommitDetails {
7490 sha: commit.sha.to_string(),
7491 message: commit.message.to_string(),
7492 commit_timestamp: commit.commit_timestamp,
7493 author_email: commit.author_email.to_string(),
7494 author_name: commit.author_name.to_string(),
7495 }
7496}
7497
7498fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7499 CommitDetails {
7500 sha: proto.sha.clone().into(),
7501 message: proto.message.clone().into(),
7502 commit_timestamp: proto.commit_timestamp,
7503 author_email: proto.author_email.clone().into(),
7504 author_name: proto.author_name.clone().into(),
7505 }
7506}
7507
7508/// This snapshot computes the repository state on the foreground thread while
7509/// running the git commands on the background thread. We update branch, head,
7510/// remotes, and worktrees first so the UI can react sooner, then compute file
7511/// state and emit those events immediately after.
7512async fn compute_snapshot(
7513 this: Entity<Repository>,
7514 backend: Arc<dyn GitRepository>,
7515 cx: &mut AsyncApp,
7516) -> Result<RepositorySnapshot> {
7517 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7518 this.paths_needing_status_update.clear();
7519 (
7520 this.id,
7521 this.work_directory_abs_path.clone(),
7522 this.snapshot.clone(),
7523 )
7524 });
7525
7526 let head_commit_future = {
7527 let backend = backend.clone();
7528 async move {
7529 Ok(match backend.head_sha().await {
7530 Some(head_sha) => backend.show(head_sha).await.log_err(),
7531 None => None,
7532 })
7533 }
7534 };
7535 let (branches, head_commit, all_worktrees) = cx
7536 .background_spawn({
7537 let backend = backend.clone();
7538 async move {
7539 futures::future::try_join3(
7540 backend.branches(),
7541 head_commit_future,
7542 backend.worktrees(),
7543 )
7544 .await
7545 }
7546 })
7547 .await?;
7548 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7549 let branch_list: Arc<[Branch]> = branches.into();
7550
7551 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7552 .into_iter()
7553 .filter(|wt| wt.path != *work_directory_abs_path)
7554 .collect();
7555
7556 let (remote_origin_url, remote_upstream_url) = cx
7557 .background_spawn({
7558 let backend = backend.clone();
7559 async move {
7560 Ok::<_, anyhow::Error>(
7561 futures::future::join(
7562 backend.remote_url("origin"),
7563 backend.remote_url("upstream"),
7564 )
7565 .await,
7566 )
7567 }
7568 })
7569 .await?;
7570
7571 let snapshot = this.update(cx, |this, cx| {
7572 let head_changed =
7573 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7574 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7575 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7576
7577 this.snapshot = RepositorySnapshot {
7578 id,
7579 work_directory_abs_path,
7580 branch,
7581 branch_list: branch_list.clone(),
7582 head_commit,
7583 remote_origin_url,
7584 remote_upstream_url,
7585 linked_worktrees,
7586 scan_id: prev_snapshot.scan_id + 1,
7587 ..prev_snapshot
7588 };
7589
7590 if head_changed {
7591 cx.emit(RepositoryEvent::HeadChanged);
7592 }
7593
7594 if branch_list_changed {
7595 cx.emit(RepositoryEvent::BranchListChanged);
7596 }
7597
7598 if worktrees_changed {
7599 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7600 }
7601
7602 this.snapshot.clone()
7603 });
7604
7605 let (statuses, diff_stats, stash_entries) = cx
7606 .background_spawn({
7607 let backend = backend.clone();
7608 let snapshot = snapshot.clone();
7609 async move {
7610 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7611 if snapshot.head_commit.is_some() {
7612 backend.diff_stat(&[])
7613 } else {
7614 future::ready(Ok(status::GitDiffStat {
7615 entries: Arc::default(),
7616 }))
7617 .boxed()
7618 };
7619 futures::future::try_join3(
7620 backend.status(&[RepoPath::from_rel_path(
7621 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7622 )]),
7623 diff_stat_future,
7624 backend.stash_entries(),
7625 )
7626 .await
7627 }
7628 })
7629 .await?;
7630
7631 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7632 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7633 let mut conflicted_paths = Vec::new();
7634 let statuses_by_path = SumTree::from_iter(
7635 statuses.entries.iter().map(|(repo_path, status)| {
7636 if status.is_conflicted() {
7637 conflicted_paths.push(repo_path.clone());
7638 }
7639 StatusEntry {
7640 repo_path: repo_path.clone(),
7641 status: *status,
7642 diff_stat: diff_stat_map.get(repo_path).copied(),
7643 }
7644 }),
7645 (),
7646 );
7647
7648 let merge_details = cx
7649 .background_spawn({
7650 let backend = backend.clone();
7651 let mut merge_details = snapshot.merge.clone();
7652 async move {
7653 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7654 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7655 }
7656 })
7657 .await?;
7658 let (merge_details, conflicts_changed) = merge_details;
7659 log::debug!("new merge details: {merge_details:?}");
7660
7661 Ok(this.update(cx, |this, cx| {
7662 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7663 cx.emit(RepositoryEvent::StatusesChanged);
7664 }
7665 if stash_entries != this.snapshot.stash_entries {
7666 cx.emit(RepositoryEvent::StashEntriesChanged);
7667 }
7668
7669 this.snapshot.scan_id += 1;
7670 this.snapshot.merge = merge_details;
7671 this.snapshot.statuses_by_path = statuses_by_path;
7672 this.snapshot.stash_entries = stash_entries;
7673
7674 this.snapshot.clone()
7675 }))
7676}
7677
7678fn status_from_proto(
7679 simple_status: i32,
7680 status: Option<proto::GitFileStatus>,
7681) -> anyhow::Result<FileStatus> {
7682 use proto::git_file_status::Variant;
7683
7684 let Some(variant) = status.and_then(|status| status.variant) else {
7685 let code = proto::GitStatus::from_i32(simple_status)
7686 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7687 let result = match code {
7688 proto::GitStatus::Added => TrackedStatus {
7689 worktree_status: StatusCode::Added,
7690 index_status: StatusCode::Unmodified,
7691 }
7692 .into(),
7693 proto::GitStatus::Modified => TrackedStatus {
7694 worktree_status: StatusCode::Modified,
7695 index_status: StatusCode::Unmodified,
7696 }
7697 .into(),
7698 proto::GitStatus::Conflict => UnmergedStatus {
7699 first_head: UnmergedStatusCode::Updated,
7700 second_head: UnmergedStatusCode::Updated,
7701 }
7702 .into(),
7703 proto::GitStatus::Deleted => TrackedStatus {
7704 worktree_status: StatusCode::Deleted,
7705 index_status: StatusCode::Unmodified,
7706 }
7707 .into(),
7708 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7709 };
7710 return Ok(result);
7711 };
7712
7713 let result = match variant {
7714 Variant::Untracked(_) => FileStatus::Untracked,
7715 Variant::Ignored(_) => FileStatus::Ignored,
7716 Variant::Unmerged(unmerged) => {
7717 let [first_head, second_head] =
7718 [unmerged.first_head, unmerged.second_head].map(|head| {
7719 let code = proto::GitStatus::from_i32(head)
7720 .with_context(|| format!("Invalid git status code: {head}"))?;
7721 let result = match code {
7722 proto::GitStatus::Added => UnmergedStatusCode::Added,
7723 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7724 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7725 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7726 };
7727 Ok(result)
7728 });
7729 let [first_head, second_head] = [first_head?, second_head?];
7730 UnmergedStatus {
7731 first_head,
7732 second_head,
7733 }
7734 .into()
7735 }
7736 Variant::Tracked(tracked) => {
7737 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7738 .map(|status| {
7739 let code = proto::GitStatus::from_i32(status)
7740 .with_context(|| format!("Invalid git status code: {status}"))?;
7741 let result = match code {
7742 proto::GitStatus::Modified => StatusCode::Modified,
7743 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7744 proto::GitStatus::Added => StatusCode::Added,
7745 proto::GitStatus::Deleted => StatusCode::Deleted,
7746 proto::GitStatus::Renamed => StatusCode::Renamed,
7747 proto::GitStatus::Copied => StatusCode::Copied,
7748 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7749 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7750 };
7751 Ok(result)
7752 });
7753 let [index_status, worktree_status] = [index_status?, worktree_status?];
7754 TrackedStatus {
7755 index_status,
7756 worktree_status,
7757 }
7758 .into()
7759 }
7760 };
7761 Ok(result)
7762}
7763
7764fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7765 use proto::git_file_status::{Tracked, Unmerged, Variant};
7766
7767 let variant = match status {
7768 FileStatus::Untracked => Variant::Untracked(Default::default()),
7769 FileStatus::Ignored => Variant::Ignored(Default::default()),
7770 FileStatus::Unmerged(UnmergedStatus {
7771 first_head,
7772 second_head,
7773 }) => Variant::Unmerged(Unmerged {
7774 first_head: unmerged_status_to_proto(first_head),
7775 second_head: unmerged_status_to_proto(second_head),
7776 }),
7777 FileStatus::Tracked(TrackedStatus {
7778 index_status,
7779 worktree_status,
7780 }) => Variant::Tracked(Tracked {
7781 index_status: tracked_status_to_proto(index_status),
7782 worktree_status: tracked_status_to_proto(worktree_status),
7783 }),
7784 };
7785 proto::GitFileStatus {
7786 variant: Some(variant),
7787 }
7788}
7789
7790fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7791 match code {
7792 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7793 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7794 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7795 }
7796}
7797
7798fn tracked_status_to_proto(code: StatusCode) -> i32 {
7799 match code {
7800 StatusCode::Added => proto::GitStatus::Added as _,
7801 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7802 StatusCode::Modified => proto::GitStatus::Modified as _,
7803 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7804 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7805 StatusCode::Copied => proto::GitStatus::Copied as _,
7806 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7807 }
7808}