1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
36 DiffType, FetchOptions, GitRepository, GitRepositoryCheckpoint, GraphCommitData,
37 InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RemoteCommandOutput,
38 RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332pub struct Repository {
333 this: WeakEntity<Self>,
334 snapshot: RepositorySnapshot,
335 commit_message_buffer: Option<Entity<Buffer>>,
336 git_store: WeakEntity<GitStore>,
337 // For a local repository, holds paths that have had worktree events since the last status scan completed,
338 // and that should be examined during the next status scan.
339 paths_needing_status_update: Vec<Vec<RepoPath>>,
340 job_sender: mpsc::UnboundedSender<GitJob>,
341 active_jobs: HashMap<JobId, JobInfo>,
342 pending_ops: SumTree<PendingOps>,
343 job_id: JobId,
344 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
345 latest_askpass_id: u64,
346 repository_state: Shared<Task<Result<RepositoryState, String>>>,
347 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
348 graph_commit_data_handler: GraphCommitHandlerState,
349 commit_data: HashMap<Oid, CommitDataState>,
350}
351
352impl std::ops::Deref for Repository {
353 type Target = RepositorySnapshot;
354
355 fn deref(&self) -> &Self::Target {
356 &self.snapshot
357 }
358}
359
360#[derive(Clone)]
361pub struct LocalRepositoryState {
362 pub fs: Arc<dyn Fs>,
363 pub backend: Arc<dyn GitRepository>,
364 pub environment: Arc<HashMap<String, String>>,
365}
366
367impl LocalRepositoryState {
368 async fn new(
369 work_directory_abs_path: Arc<Path>,
370 dot_git_abs_path: Arc<Path>,
371 project_environment: WeakEntity<ProjectEnvironment>,
372 fs: Arc<dyn Fs>,
373 is_trusted: bool,
374 cx: &mut AsyncApp,
375 ) -> anyhow::Result<Self> {
376 let environment = project_environment
377 .update(cx, |project_environment, cx| {
378 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
379 })?
380 .await
381 .unwrap_or_else(|| {
382 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
383 HashMap::default()
384 });
385 let search_paths = environment.get("PATH").map(|val| val.to_owned());
386 let backend = cx
387 .background_spawn({
388 let fs = fs.clone();
389 async move {
390 let system_git_binary_path = search_paths
391 .and_then(|search_paths| {
392 which::which_in("git", Some(search_paths), &work_directory_abs_path)
393 .ok()
394 })
395 .or_else(|| which::which("git").ok());
396 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
397 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
398 }
399 })
400 .await?;
401 backend.set_trusted(is_trusted);
402 Ok(LocalRepositoryState {
403 backend,
404 environment: Arc::new(environment),
405 fs,
406 })
407 }
408}
409
410#[derive(Clone)]
411pub struct RemoteRepositoryState {
412 pub project_id: ProjectId,
413 pub client: AnyProtoClient,
414}
415
416#[derive(Clone)]
417pub enum RepositoryState {
418 Local(LocalRepositoryState),
419 Remote(RemoteRepositoryState),
420}
421
422#[derive(Clone, Debug, PartialEq, Eq)]
423pub enum GitGraphEvent {
424 CountUpdated(usize),
425 FullyLoaded,
426 LoadingError,
427}
428
429#[derive(Clone, Debug, PartialEq, Eq)]
430pub enum RepositoryEvent {
431 StatusesChanged,
432 HeadChanged,
433 BranchListChanged,
434 StashEntriesChanged,
435 GitWorktreeListChanged,
436 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
437 GraphEvent((LogSource, LogOrder), GitGraphEvent),
438}
439
440#[derive(Clone, Debug)]
441pub struct JobsUpdated;
442
443#[derive(Debug)]
444pub enum GitStoreEvent {
445 ActiveRepositoryChanged(Option<RepositoryId>),
446 /// Bool is true when the repository that's updated is the active repository
447 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
448 RepositoryAdded,
449 RepositoryRemoved(RepositoryId),
450 IndexWriteError(anyhow::Error),
451 JobsUpdated,
452 ConflictsUpdated,
453}
454
455impl EventEmitter<RepositoryEvent> for Repository {}
456impl EventEmitter<JobsUpdated> for Repository {}
457impl EventEmitter<GitStoreEvent> for GitStore {}
458
459pub struct GitJob {
460 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
461 key: Option<GitJobKey>,
462}
463
464#[derive(PartialEq, Eq)]
465enum GitJobKey {
466 WriteIndex(Vec<RepoPath>),
467 ReloadBufferDiffBases,
468 RefreshStatuses,
469 ReloadGitState,
470}
471
472impl GitStore {
473 pub fn local(
474 worktree_store: &Entity<WorktreeStore>,
475 buffer_store: Entity<BufferStore>,
476 environment: Entity<ProjectEnvironment>,
477 fs: Arc<dyn Fs>,
478 cx: &mut Context<Self>,
479 ) -> Self {
480 Self::new(
481 worktree_store.clone(),
482 buffer_store,
483 GitStoreState::Local {
484 next_repository_id: Arc::new(AtomicU64::new(1)),
485 downstream: None,
486 project_environment: environment,
487 fs,
488 },
489 cx,
490 )
491 }
492
493 pub fn remote(
494 worktree_store: &Entity<WorktreeStore>,
495 buffer_store: Entity<BufferStore>,
496 upstream_client: AnyProtoClient,
497 project_id: u64,
498 cx: &mut Context<Self>,
499 ) -> Self {
500 Self::new(
501 worktree_store.clone(),
502 buffer_store,
503 GitStoreState::Remote {
504 upstream_client,
505 upstream_project_id: project_id,
506 downstream: None,
507 },
508 cx,
509 )
510 }
511
512 fn new(
513 worktree_store: Entity<WorktreeStore>,
514 buffer_store: Entity<BufferStore>,
515 state: GitStoreState,
516 cx: &mut Context<Self>,
517 ) -> Self {
518 let mut _subscriptions = vec![
519 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
520 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
521 ];
522
523 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
524 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
525 }
526
527 GitStore {
528 state,
529 buffer_store,
530 worktree_store,
531 repositories: HashMap::default(),
532 worktree_ids: HashMap::default(),
533 active_repo_id: None,
534 _subscriptions,
535 loading_diffs: HashMap::default(),
536 shared_diffs: HashMap::default(),
537 diffs: HashMap::default(),
538 }
539 }
540
541 pub fn init(client: &AnyProtoClient) {
542 client.add_entity_request_handler(Self::handle_get_remotes);
543 client.add_entity_request_handler(Self::handle_get_branches);
544 client.add_entity_request_handler(Self::handle_get_default_branch);
545 client.add_entity_request_handler(Self::handle_change_branch);
546 client.add_entity_request_handler(Self::handle_create_branch);
547 client.add_entity_request_handler(Self::handle_rename_branch);
548 client.add_entity_request_handler(Self::handle_create_remote);
549 client.add_entity_request_handler(Self::handle_remove_remote);
550 client.add_entity_request_handler(Self::handle_delete_branch);
551 client.add_entity_request_handler(Self::handle_git_init);
552 client.add_entity_request_handler(Self::handle_push);
553 client.add_entity_request_handler(Self::handle_pull);
554 client.add_entity_request_handler(Self::handle_fetch);
555 client.add_entity_request_handler(Self::handle_stage);
556 client.add_entity_request_handler(Self::handle_unstage);
557 client.add_entity_request_handler(Self::handle_stash);
558 client.add_entity_request_handler(Self::handle_stash_pop);
559 client.add_entity_request_handler(Self::handle_stash_apply);
560 client.add_entity_request_handler(Self::handle_stash_drop);
561 client.add_entity_request_handler(Self::handle_commit);
562 client.add_entity_request_handler(Self::handle_run_hook);
563 client.add_entity_request_handler(Self::handle_reset);
564 client.add_entity_request_handler(Self::handle_show);
565 client.add_entity_request_handler(Self::handle_create_checkpoint);
566 client.add_entity_request_handler(Self::handle_restore_checkpoint);
567 client.add_entity_request_handler(Self::handle_compare_checkpoints);
568 client.add_entity_request_handler(Self::handle_diff_checkpoints);
569 client.add_entity_request_handler(Self::handle_load_commit_diff);
570 client.add_entity_request_handler(Self::handle_file_history);
571 client.add_entity_request_handler(Self::handle_checkout_files);
572 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
573 client.add_entity_request_handler(Self::handle_set_index_text);
574 client.add_entity_request_handler(Self::handle_askpass);
575 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
576 client.add_entity_request_handler(Self::handle_git_diff);
577 client.add_entity_request_handler(Self::handle_tree_diff);
578 client.add_entity_request_handler(Self::handle_get_blob_content);
579 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
580 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
581 client.add_entity_message_handler(Self::handle_update_diff_bases);
582 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
583 client.add_entity_request_handler(Self::handle_blame_buffer);
584 client.add_entity_message_handler(Self::handle_update_repository);
585 client.add_entity_message_handler(Self::handle_remove_repository);
586 client.add_entity_request_handler(Self::handle_git_clone);
587 client.add_entity_request_handler(Self::handle_get_worktrees);
588 client.add_entity_request_handler(Self::handle_create_worktree);
589 client.add_entity_request_handler(Self::handle_remove_worktree);
590 client.add_entity_request_handler(Self::handle_rename_worktree);
591 client.add_entity_request_handler(Self::handle_get_head_sha);
592 }
593
594 pub fn is_local(&self) -> bool {
595 matches!(self.state, GitStoreState::Local { .. })
596 }
597
598 fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context<Self>) {
599 if self.active_repo_id != Some(repo_id) {
600 self.active_repo_id = Some(repo_id);
601 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
602 }
603 }
604
605 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
606 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
607 self.set_active_repo_id(repo.read(cx).id, cx);
608 }
609 }
610
611 pub fn set_active_repo_for_worktree(
612 &mut self,
613 worktree_id: WorktreeId,
614 cx: &mut Context<Self>,
615 ) {
616 let Some(worktree) = self
617 .worktree_store
618 .read(cx)
619 .worktree_for_id(worktree_id, cx)
620 else {
621 return;
622 };
623 let worktree_abs_path = worktree.read(cx).abs_path();
624 let Some(repo_id) = self
625 .repositories
626 .values()
627 .filter(|repo| {
628 let repo_path = &repo.read(cx).work_directory_abs_path;
629 *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
630 })
631 .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
632 .map(|repo| repo.read(cx).id)
633 else {
634 return;
635 };
636
637 self.set_active_repo_id(repo_id, cx);
638 }
639
640 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
641 match &mut self.state {
642 GitStoreState::Remote {
643 downstream: downstream_client,
644 ..
645 } => {
646 for repo in self.repositories.values() {
647 let update = repo.read(cx).snapshot.initial_update(project_id);
648 for update in split_repository_update(update) {
649 client.send(update).log_err();
650 }
651 }
652 *downstream_client = Some((client, ProjectId(project_id)));
653 }
654 GitStoreState::Local {
655 downstream: downstream_client,
656 ..
657 } => {
658 let mut snapshots = HashMap::default();
659 let (updates_tx, mut updates_rx) = mpsc::unbounded();
660 for repo in self.repositories.values() {
661 updates_tx
662 .unbounded_send(DownstreamUpdate::UpdateRepository(
663 repo.read(cx).snapshot.clone(),
664 ))
665 .ok();
666 }
667 *downstream_client = Some(LocalDownstreamState {
668 client: client.clone(),
669 project_id: ProjectId(project_id),
670 updates_tx,
671 _task: cx.spawn(async move |this, cx| {
672 cx.background_spawn(async move {
673 while let Some(update) = updates_rx.next().await {
674 match update {
675 DownstreamUpdate::UpdateRepository(snapshot) => {
676 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
677 {
678 let update =
679 snapshot.build_update(old_snapshot, project_id);
680 *old_snapshot = snapshot;
681 for update in split_repository_update(update) {
682 client.send(update)?;
683 }
684 } else {
685 let update = snapshot.initial_update(project_id);
686 for update in split_repository_update(update) {
687 client.send(update)?;
688 }
689 snapshots.insert(snapshot.id, snapshot);
690 }
691 }
692 DownstreamUpdate::RemoveRepository(id) => {
693 client.send(proto::RemoveRepository {
694 project_id,
695 id: id.to_proto(),
696 })?;
697 }
698 }
699 }
700 anyhow::Ok(())
701 })
702 .await
703 .ok();
704 this.update(cx, |this, _| {
705 if let GitStoreState::Local {
706 downstream: downstream_client,
707 ..
708 } = &mut this.state
709 {
710 downstream_client.take();
711 } else {
712 unreachable!("unshared called on remote store");
713 }
714 })
715 }),
716 });
717 }
718 }
719 }
720
721 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
722 match &mut self.state {
723 GitStoreState::Local {
724 downstream: downstream_client,
725 ..
726 } => {
727 downstream_client.take();
728 }
729 GitStoreState::Remote {
730 downstream: downstream_client,
731 ..
732 } => {
733 downstream_client.take();
734 }
735 }
736 self.shared_diffs.clear();
737 }
738
739 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
740 self.shared_diffs.remove(peer_id);
741 }
742
743 pub fn active_repository(&self) -> Option<Entity<Repository>> {
744 self.active_repo_id
745 .as_ref()
746 .map(|id| self.repositories[id].clone())
747 }
748
749 pub fn open_unstaged_diff(
750 &mut self,
751 buffer: Entity<Buffer>,
752 cx: &mut Context<Self>,
753 ) -> Task<Result<Entity<BufferDiff>>> {
754 let buffer_id = buffer.read(cx).remote_id();
755 if let Some(diff_state) = self.diffs.get(&buffer_id)
756 && let Some(unstaged_diff) = diff_state
757 .read(cx)
758 .unstaged_diff
759 .as_ref()
760 .and_then(|weak| weak.upgrade())
761 {
762 if let Some(task) =
763 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
764 {
765 return cx.background_executor().spawn(async move {
766 task.await;
767 Ok(unstaged_diff)
768 });
769 }
770 return Task::ready(Ok(unstaged_diff));
771 }
772
773 let Some((repo, repo_path)) =
774 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
775 else {
776 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
777 };
778
779 let task = self
780 .loading_diffs
781 .entry((buffer_id, DiffKind::Unstaged))
782 .or_insert_with(|| {
783 let staged_text = repo.update(cx, |repo, cx| {
784 repo.load_staged_text(buffer_id, repo_path, cx)
785 });
786 cx.spawn(async move |this, cx| {
787 Self::open_diff_internal(
788 this,
789 DiffKind::Unstaged,
790 staged_text.await.map(DiffBasesChange::SetIndex),
791 buffer,
792 cx,
793 )
794 .await
795 .map_err(Arc::new)
796 })
797 .shared()
798 })
799 .clone();
800
801 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
802 }
803
804 pub fn open_diff_since(
805 &mut self,
806 oid: Option<git::Oid>,
807 buffer: Entity<Buffer>,
808 repo: Entity<Repository>,
809 cx: &mut Context<Self>,
810 ) -> Task<Result<Entity<BufferDiff>>> {
811 let buffer_id = buffer.read(cx).remote_id();
812
813 if let Some(diff_state) = self.diffs.get(&buffer_id)
814 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
815 {
816 if let Some(task) =
817 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
818 {
819 return cx.background_executor().spawn(async move {
820 task.await;
821 Ok(oid_diff)
822 });
823 }
824 return Task::ready(Ok(oid_diff));
825 }
826
827 let diff_kind = DiffKind::SinceOid(oid);
828 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
829 let task = task.clone();
830 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
831 }
832
833 let task = cx
834 .spawn(async move |this, cx| {
835 let result: Result<Entity<BufferDiff>> = async {
836 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
837 let language_registry =
838 buffer.update(cx, |buffer, _| buffer.language_registry());
839 let content: Option<Arc<str>> = match oid {
840 None => None,
841 Some(oid) => Some(
842 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
843 .await?
844 .into(),
845 ),
846 };
847 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
848
849 buffer_diff
850 .update(cx, |buffer_diff, cx| {
851 buffer_diff.language_changed(
852 buffer_snapshot.language().cloned(),
853 language_registry,
854 cx,
855 );
856 buffer_diff.set_base_text(
857 content.clone(),
858 buffer_snapshot.language().cloned(),
859 buffer_snapshot.text,
860 cx,
861 )
862 })
863 .await?;
864 let unstaged_diff = this
865 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
866 .await?;
867 buffer_diff.update(cx, |buffer_diff, _| {
868 buffer_diff.set_secondary_diff(unstaged_diff);
869 });
870
871 this.update(cx, |this, cx| {
872 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
873 .detach();
874
875 this.loading_diffs.remove(&(buffer_id, diff_kind));
876
877 let git_store = cx.weak_entity();
878 let diff_state = this
879 .diffs
880 .entry(buffer_id)
881 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
882
883 diff_state.update(cx, |state, _| {
884 if let Some(oid) = oid {
885 if let Some(content) = content {
886 state.oid_texts.insert(oid, content);
887 }
888 }
889 state.oid_diffs.insert(oid, buffer_diff.downgrade());
890 });
891 })?;
892
893 Ok(buffer_diff)
894 }
895 .await;
896 result.map_err(Arc::new)
897 })
898 .shared();
899
900 self.loading_diffs
901 .insert((buffer_id, diff_kind), task.clone());
902 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
903 }
904
905 #[ztracing::instrument(skip_all)]
906 pub fn open_uncommitted_diff(
907 &mut self,
908 buffer: Entity<Buffer>,
909 cx: &mut Context<Self>,
910 ) -> Task<Result<Entity<BufferDiff>>> {
911 let buffer_id = buffer.read(cx).remote_id();
912
913 if let Some(diff_state) = self.diffs.get(&buffer_id)
914 && let Some(uncommitted_diff) = diff_state
915 .read(cx)
916 .uncommitted_diff
917 .as_ref()
918 .and_then(|weak| weak.upgrade())
919 {
920 if let Some(task) =
921 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
922 {
923 return cx.background_executor().spawn(async move {
924 task.await;
925 Ok(uncommitted_diff)
926 });
927 }
928 return Task::ready(Ok(uncommitted_diff));
929 }
930
931 let Some((repo, repo_path)) =
932 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
933 else {
934 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
935 };
936
937 let task = self
938 .loading_diffs
939 .entry((buffer_id, DiffKind::Uncommitted))
940 .or_insert_with(|| {
941 let changes = repo.update(cx, |repo, cx| {
942 repo.load_committed_text(buffer_id, repo_path, cx)
943 });
944
945 // todo(lw): hot foreground spawn
946 cx.spawn(async move |this, cx| {
947 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
948 .await
949 .map_err(Arc::new)
950 })
951 .shared()
952 })
953 .clone();
954
955 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
956 }
957
958 #[ztracing::instrument(skip_all)]
959 async fn open_diff_internal(
960 this: WeakEntity<Self>,
961 kind: DiffKind,
962 texts: Result<DiffBasesChange>,
963 buffer_entity: Entity<Buffer>,
964 cx: &mut AsyncApp,
965 ) -> Result<Entity<BufferDiff>> {
966 let diff_bases_change = match texts {
967 Err(e) => {
968 this.update(cx, |this, cx| {
969 let buffer = buffer_entity.read(cx);
970 let buffer_id = buffer.remote_id();
971 this.loading_diffs.remove(&(buffer_id, kind));
972 })?;
973 return Err(e);
974 }
975 Ok(change) => change,
976 };
977
978 this.update(cx, |this, cx| {
979 let buffer = buffer_entity.read(cx);
980 let buffer_id = buffer.remote_id();
981 let language = buffer.language().cloned();
982 let language_registry = buffer.language_registry();
983 let text_snapshot = buffer.text_snapshot();
984 this.loading_diffs.remove(&(buffer_id, kind));
985
986 let git_store = cx.weak_entity();
987 let diff_state = this
988 .diffs
989 .entry(buffer_id)
990 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
991
992 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
993
994 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
995 diff_state.update(cx, |diff_state, cx| {
996 diff_state.language_changed = true;
997 diff_state.language = language;
998 diff_state.language_registry = language_registry;
999
1000 match kind {
1001 DiffKind::Unstaged => {
1002 diff_state.unstaged_diff.get_or_insert(diff.downgrade());
1003 }
1004 DiffKind::Uncommitted => {
1005 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1006 diff
1007 } else {
1008 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1009 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1010 unstaged_diff
1011 };
1012
1013 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1014 diff_state.uncommitted_diff = Some(diff.downgrade())
1015 }
1016 DiffKind::SinceOid(_) => {
1017 unreachable!("open_diff_internal is not used for OID diffs")
1018 }
1019 }
1020
1021 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
1022 let rx = diff_state.wait_for_recalculation();
1023
1024 anyhow::Ok(async move {
1025 if let Some(rx) = rx {
1026 rx.await;
1027 }
1028 Ok(diff)
1029 })
1030 })
1031 })??
1032 .await
1033 }
1034
1035 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1036 let diff_state = self.diffs.get(&buffer_id)?;
1037 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1038 }
1039
1040 pub fn get_uncommitted_diff(
1041 &self,
1042 buffer_id: BufferId,
1043 cx: &App,
1044 ) -> Option<Entity<BufferDiff>> {
1045 let diff_state = self.diffs.get(&buffer_id)?;
1046 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1047 }
1048
1049 pub fn get_diff_since_oid(
1050 &self,
1051 buffer_id: BufferId,
1052 oid: Option<git::Oid>,
1053 cx: &App,
1054 ) -> Option<Entity<BufferDiff>> {
1055 let diff_state = self.diffs.get(&buffer_id)?;
1056 diff_state.read(cx).oid_diff(oid)
1057 }
1058
1059 pub fn open_conflict_set(
1060 &mut self,
1061 buffer: Entity<Buffer>,
1062 cx: &mut Context<Self>,
1063 ) -> Entity<ConflictSet> {
1064 log::debug!("open conflict set");
1065 let buffer_id = buffer.read(cx).remote_id();
1066
1067 if let Some(git_state) = self.diffs.get(&buffer_id)
1068 && let Some(conflict_set) = git_state
1069 .read(cx)
1070 .conflict_set
1071 .as_ref()
1072 .and_then(|weak| weak.upgrade())
1073 {
1074 let conflict_set = conflict_set;
1075 let buffer_snapshot = buffer.read(cx).text_snapshot();
1076
1077 git_state.update(cx, |state, cx| {
1078 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1079 });
1080
1081 return conflict_set;
1082 }
1083
1084 let is_unmerged = self
1085 .repository_and_path_for_buffer_id(buffer_id, cx)
1086 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1087 let git_store = cx.weak_entity();
1088 let buffer_git_state = self
1089 .diffs
1090 .entry(buffer_id)
1091 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1092 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1093
1094 self._subscriptions
1095 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1096 cx.emit(GitStoreEvent::ConflictsUpdated);
1097 }));
1098
1099 buffer_git_state.update(cx, |state, cx| {
1100 state.conflict_set = Some(conflict_set.downgrade());
1101 let buffer_snapshot = buffer.read(cx).text_snapshot();
1102 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1103 });
1104
1105 conflict_set
1106 }
1107
1108 pub fn project_path_git_status(
1109 &self,
1110 project_path: &ProjectPath,
1111 cx: &App,
1112 ) -> Option<FileStatus> {
1113 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1114 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1115 }
1116
1117 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1118 let mut work_directory_abs_paths = Vec::new();
1119 let mut checkpoints = Vec::new();
1120 for repository in self.repositories.values() {
1121 repository.update(cx, |repository, _| {
1122 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1123 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1124 });
1125 }
1126
1127 cx.background_executor().spawn(async move {
1128 let checkpoints = future::try_join_all(checkpoints).await?;
1129 Ok(GitStoreCheckpoint {
1130 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1131 .into_iter()
1132 .zip(checkpoints)
1133 .collect(),
1134 })
1135 })
1136 }
1137
1138 pub fn restore_checkpoint(
1139 &self,
1140 checkpoint: GitStoreCheckpoint,
1141 cx: &mut App,
1142 ) -> Task<Result<()>> {
1143 let repositories_by_work_dir_abs_path = self
1144 .repositories
1145 .values()
1146 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1147 .collect::<HashMap<_, _>>();
1148
1149 let mut tasks = Vec::new();
1150 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1151 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1152 let restore = repository.update(cx, |repository, _| {
1153 repository.restore_checkpoint(checkpoint)
1154 });
1155 tasks.push(async move { restore.await? });
1156 }
1157 }
1158 cx.background_spawn(async move {
1159 future::try_join_all(tasks).await?;
1160 Ok(())
1161 })
1162 }
1163
1164 /// Compares two checkpoints, returning true if they are equal.
1165 pub fn compare_checkpoints(
1166 &self,
1167 left: GitStoreCheckpoint,
1168 mut right: GitStoreCheckpoint,
1169 cx: &mut App,
1170 ) -> Task<Result<bool>> {
1171 let repositories_by_work_dir_abs_path = self
1172 .repositories
1173 .values()
1174 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1175 .collect::<HashMap<_, _>>();
1176
1177 let mut tasks = Vec::new();
1178 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1179 if let Some(right_checkpoint) = right
1180 .checkpoints_by_work_dir_abs_path
1181 .remove(&work_dir_abs_path)
1182 {
1183 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1184 {
1185 let compare = repository.update(cx, |repository, _| {
1186 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1187 });
1188
1189 tasks.push(async move { compare.await? });
1190 }
1191 } else {
1192 return Task::ready(Ok(false));
1193 }
1194 }
1195 cx.background_spawn(async move {
1196 Ok(future::try_join_all(tasks)
1197 .await?
1198 .into_iter()
1199 .all(|result| result))
1200 })
1201 }
1202
1203 /// Blames a buffer.
1204 pub fn blame_buffer(
1205 &self,
1206 buffer: &Entity<Buffer>,
1207 version: Option<clock::Global>,
1208 cx: &mut Context<Self>,
1209 ) -> Task<Result<Option<Blame>>> {
1210 let buffer = buffer.read(cx);
1211 let Some((repo, repo_path)) =
1212 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1213 else {
1214 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1215 };
1216 let content = match &version {
1217 Some(version) => buffer.rope_for_version(version),
1218 None => buffer.as_rope().clone(),
1219 };
1220 let line_ending = buffer.line_ending();
1221 let version = version.unwrap_or(buffer.version());
1222 let buffer_id = buffer.remote_id();
1223
1224 let repo = repo.downgrade();
1225 cx.spawn(async move |_, cx| {
1226 let repository_state = repo
1227 .update(cx, |repo, _| repo.repository_state.clone())?
1228 .await
1229 .map_err(|err| anyhow::anyhow!(err))?;
1230 match repository_state {
1231 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1232 .blame(repo_path.clone(), content, line_ending)
1233 .await
1234 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1235 .map(Some),
1236 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1237 let response = client
1238 .request(proto::BlameBuffer {
1239 project_id: project_id.to_proto(),
1240 buffer_id: buffer_id.into(),
1241 version: serialize_version(&version),
1242 })
1243 .await?;
1244 Ok(deserialize_blame_buffer_response(response))
1245 }
1246 }
1247 })
1248 }
1249
1250 pub fn file_history(
1251 &self,
1252 repo: &Entity<Repository>,
1253 path: RepoPath,
1254 cx: &mut App,
1255 ) -> Task<Result<git::repository::FileHistory>> {
1256 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1257
1258 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1259 }
1260
1261 pub fn file_history_paginated(
1262 &self,
1263 repo: &Entity<Repository>,
1264 path: RepoPath,
1265 skip: usize,
1266 limit: Option<usize>,
1267 cx: &mut App,
1268 ) -> Task<Result<git::repository::FileHistory>> {
1269 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1270
1271 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1272 }
1273
1274 pub fn get_permalink_to_line(
1275 &self,
1276 buffer: &Entity<Buffer>,
1277 selection: Range<u32>,
1278 cx: &mut App,
1279 ) -> Task<Result<url::Url>> {
1280 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1281 return Task::ready(Err(anyhow!("buffer has no file")));
1282 };
1283
1284 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1285 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1286 cx,
1287 ) else {
1288 // If we're not in a Git repo, check whether this is a Rust source
1289 // file in the Cargo registry (presumably opened with go-to-definition
1290 // from a normal Rust file). If so, we can put together a permalink
1291 // using crate metadata.
1292 if buffer
1293 .read(cx)
1294 .language()
1295 .is_none_or(|lang| lang.name() != "Rust")
1296 {
1297 return Task::ready(Err(anyhow!("no permalink available")));
1298 }
1299 let file_path = file.worktree.read(cx).absolutize(&file.path);
1300 return cx.spawn(async move |cx| {
1301 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1302 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1303 .context("no permalink available")
1304 });
1305 };
1306
1307 let buffer_id = buffer.read(cx).remote_id();
1308 let branch = repo.read(cx).branch.clone();
1309 let remote = branch
1310 .as_ref()
1311 .and_then(|b| b.upstream.as_ref())
1312 .and_then(|b| b.remote_name())
1313 .unwrap_or("origin")
1314 .to_string();
1315
1316 let rx = repo.update(cx, |repo, _| {
1317 repo.send_job(None, move |state, cx| async move {
1318 match state {
1319 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1320 let origin_url = backend
1321 .remote_url(&remote)
1322 .await
1323 .with_context(|| format!("remote \"{remote}\" not found"))?;
1324
1325 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1326
1327 let provider_registry =
1328 cx.update(GitHostingProviderRegistry::default_global);
1329
1330 let (provider, remote) =
1331 parse_git_remote_url(provider_registry, &origin_url)
1332 .context("parsing Git remote URL")?;
1333
1334 Ok(provider.build_permalink(
1335 remote,
1336 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1337 ))
1338 }
1339 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1340 let response = client
1341 .request(proto::GetPermalinkToLine {
1342 project_id: project_id.to_proto(),
1343 buffer_id: buffer_id.into(),
1344 selection: Some(proto::Range {
1345 start: selection.start as u64,
1346 end: selection.end as u64,
1347 }),
1348 })
1349 .await?;
1350
1351 url::Url::parse(&response.permalink).context("failed to parse permalink")
1352 }
1353 }
1354 })
1355 });
1356 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1357 }
1358
1359 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1360 match &self.state {
1361 GitStoreState::Local {
1362 downstream: downstream_client,
1363 ..
1364 } => downstream_client
1365 .as_ref()
1366 .map(|state| (state.client.clone(), state.project_id)),
1367 GitStoreState::Remote {
1368 downstream: downstream_client,
1369 ..
1370 } => downstream_client.clone(),
1371 }
1372 }
1373
1374 fn upstream_client(&self) -> Option<AnyProtoClient> {
1375 match &self.state {
1376 GitStoreState::Local { .. } => None,
1377 GitStoreState::Remote {
1378 upstream_client, ..
1379 } => Some(upstream_client.clone()),
1380 }
1381 }
1382
1383 fn on_worktree_store_event(
1384 &mut self,
1385 worktree_store: Entity<WorktreeStore>,
1386 event: &WorktreeStoreEvent,
1387 cx: &mut Context<Self>,
1388 ) {
1389 let GitStoreState::Local {
1390 project_environment,
1391 downstream,
1392 next_repository_id,
1393 fs,
1394 } = &self.state
1395 else {
1396 return;
1397 };
1398
1399 match event {
1400 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1401 if let Some(worktree) = self
1402 .worktree_store
1403 .read(cx)
1404 .worktree_for_id(*worktree_id, cx)
1405 {
1406 let paths_by_git_repo =
1407 self.process_updated_entries(&worktree, updated_entries, cx);
1408 let downstream = downstream
1409 .as_ref()
1410 .map(|downstream| downstream.updates_tx.clone());
1411 cx.spawn(async move |_, cx| {
1412 let paths_by_git_repo = paths_by_git_repo.await;
1413 for (repo, paths) in paths_by_git_repo {
1414 repo.update(cx, |repo, cx| {
1415 repo.paths_changed(paths, downstream.clone(), cx);
1416 });
1417 }
1418 })
1419 .detach();
1420 }
1421 }
1422 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1423 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1424 else {
1425 return;
1426 };
1427 if !worktree.read(cx).is_visible() {
1428 log::debug!(
1429 "not adding repositories for local worktree {:?} because it's not visible",
1430 worktree.read(cx).abs_path()
1431 );
1432 return;
1433 }
1434 self.update_repositories_from_worktree(
1435 *worktree_id,
1436 project_environment.clone(),
1437 next_repository_id.clone(),
1438 downstream
1439 .as_ref()
1440 .map(|downstream| downstream.updates_tx.clone()),
1441 changed_repos.clone(),
1442 fs.clone(),
1443 cx,
1444 );
1445 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1446 }
1447 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1448 let repos_without_worktree: Vec<RepositoryId> = self
1449 .worktree_ids
1450 .iter_mut()
1451 .filter_map(|(repo_id, worktree_ids)| {
1452 worktree_ids.remove(worktree_id);
1453 if worktree_ids.is_empty() {
1454 Some(*repo_id)
1455 } else {
1456 None
1457 }
1458 })
1459 .collect();
1460 let is_active_repo_removed = repos_without_worktree
1461 .iter()
1462 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1463
1464 for repo_id in repos_without_worktree {
1465 self.repositories.remove(&repo_id);
1466 self.worktree_ids.remove(&repo_id);
1467 if let Some(updates_tx) =
1468 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1469 {
1470 updates_tx
1471 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1472 .ok();
1473 }
1474 }
1475
1476 if is_active_repo_removed {
1477 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1478 self.active_repo_id = Some(repo_id);
1479 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1480 } else {
1481 self.active_repo_id = None;
1482 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1483 }
1484 }
1485 }
1486 _ => {}
1487 }
1488 }
1489 fn on_repository_event(
1490 &mut self,
1491 repo: Entity<Repository>,
1492 event: &RepositoryEvent,
1493 cx: &mut Context<Self>,
1494 ) {
1495 let id = repo.read(cx).id;
1496 let repo_snapshot = repo.read(cx).snapshot.clone();
1497 for (buffer_id, diff) in self.diffs.iter() {
1498 if let Some((buffer_repo, repo_path)) =
1499 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1500 && buffer_repo == repo
1501 {
1502 diff.update(cx, |diff, cx| {
1503 if let Some(conflict_set) = &diff.conflict_set {
1504 let conflict_status_changed =
1505 conflict_set.update(cx, |conflict_set, cx| {
1506 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1507 conflict_set.set_has_conflict(has_conflict, cx)
1508 })?;
1509 if conflict_status_changed {
1510 let buffer_store = self.buffer_store.read(cx);
1511 if let Some(buffer) = buffer_store.get(*buffer_id) {
1512 let _ = diff
1513 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1514 }
1515 }
1516 }
1517 anyhow::Ok(())
1518 })
1519 .ok();
1520 }
1521 }
1522 cx.emit(GitStoreEvent::RepositoryUpdated(
1523 id,
1524 event.clone(),
1525 self.active_repo_id == Some(id),
1526 ))
1527 }
1528
1529 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1530 cx.emit(GitStoreEvent::JobsUpdated)
1531 }
1532
1533 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1534 fn update_repositories_from_worktree(
1535 &mut self,
1536 worktree_id: WorktreeId,
1537 project_environment: Entity<ProjectEnvironment>,
1538 next_repository_id: Arc<AtomicU64>,
1539 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1540 updated_git_repositories: UpdatedGitRepositoriesSet,
1541 fs: Arc<dyn Fs>,
1542 cx: &mut Context<Self>,
1543 ) {
1544 let mut removed_ids = Vec::new();
1545 for update in updated_git_repositories.iter() {
1546 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1547 let existing_work_directory_abs_path =
1548 repo.read(cx).work_directory_abs_path.clone();
1549 Some(&existing_work_directory_abs_path)
1550 == update.old_work_directory_abs_path.as_ref()
1551 || Some(&existing_work_directory_abs_path)
1552 == update.new_work_directory_abs_path.as_ref()
1553 }) {
1554 let repo_id = *id;
1555 if let Some(new_work_directory_abs_path) =
1556 update.new_work_directory_abs_path.clone()
1557 {
1558 self.worktree_ids
1559 .entry(repo_id)
1560 .or_insert_with(HashSet::new)
1561 .insert(worktree_id);
1562 existing.update(cx, |existing, cx| {
1563 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1564 existing.schedule_scan(updates_tx.clone(), cx);
1565 });
1566 } else {
1567 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1568 worktree_ids.remove(&worktree_id);
1569 if worktree_ids.is_empty() {
1570 removed_ids.push(repo_id);
1571 }
1572 }
1573 }
1574 } else if let UpdatedGitRepository {
1575 new_work_directory_abs_path: Some(work_directory_abs_path),
1576 dot_git_abs_path: Some(dot_git_abs_path),
1577 repository_dir_abs_path: Some(repository_dir_abs_path),
1578 common_dir_abs_path: Some(common_dir_abs_path),
1579 ..
1580 } = update
1581 {
1582 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1583 work_directory_abs_path,
1584 common_dir_abs_path,
1585 repository_dir_abs_path,
1586 )
1587 .into();
1588 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1589 let is_trusted = TrustedWorktrees::try_get_global(cx)
1590 .map(|trusted_worktrees| {
1591 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1592 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1593 })
1594 })
1595 .unwrap_or(false);
1596 let git_store = cx.weak_entity();
1597 let repo = cx.new(|cx| {
1598 let mut repo = Repository::local(
1599 id,
1600 work_directory_abs_path.clone(),
1601 original_repo_abs_path.clone(),
1602 dot_git_abs_path.clone(),
1603 project_environment.downgrade(),
1604 fs.clone(),
1605 is_trusted,
1606 git_store,
1607 cx,
1608 );
1609 if let Some(updates_tx) = updates_tx.as_ref() {
1610 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1611 updates_tx
1612 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1613 .ok();
1614 }
1615 repo.schedule_scan(updates_tx.clone(), cx);
1616 repo
1617 });
1618 self._subscriptions
1619 .push(cx.subscribe(&repo, Self::on_repository_event));
1620 self._subscriptions
1621 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1622 self.repositories.insert(id, repo);
1623 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1624 cx.emit(GitStoreEvent::RepositoryAdded);
1625 self.active_repo_id.get_or_insert_with(|| {
1626 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1627 id
1628 });
1629 }
1630 }
1631
1632 for id in removed_ids {
1633 if self.active_repo_id == Some(id) {
1634 self.active_repo_id = None;
1635 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1636 }
1637 self.repositories.remove(&id);
1638 if let Some(updates_tx) = updates_tx.as_ref() {
1639 updates_tx
1640 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1641 .ok();
1642 }
1643 }
1644 }
1645
1646 fn on_trusted_worktrees_event(
1647 &mut self,
1648 _: Entity<TrustedWorktreesStore>,
1649 event: &TrustedWorktreesEvent,
1650 cx: &mut Context<Self>,
1651 ) {
1652 if !matches!(self.state, GitStoreState::Local { .. }) {
1653 return;
1654 }
1655
1656 let (is_trusted, event_paths) = match event {
1657 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1658 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1659 };
1660
1661 for (repo_id, worktree_ids) in &self.worktree_ids {
1662 if worktree_ids
1663 .iter()
1664 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1665 {
1666 if let Some(repo) = self.repositories.get(repo_id) {
1667 let repository_state = repo.read(cx).repository_state.clone();
1668 cx.background_spawn(async move {
1669 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1670 state.backend.set_trusted(is_trusted);
1671 }
1672 })
1673 .detach();
1674 }
1675 }
1676 }
1677 }
1678
1679 fn on_buffer_store_event(
1680 &mut self,
1681 _: Entity<BufferStore>,
1682 event: &BufferStoreEvent,
1683 cx: &mut Context<Self>,
1684 ) {
1685 match event {
1686 BufferStoreEvent::BufferAdded(buffer) => {
1687 cx.subscribe(buffer, |this, buffer, event, cx| {
1688 if let BufferEvent::LanguageChanged(_) = event {
1689 let buffer_id = buffer.read(cx).remote_id();
1690 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1691 diff_state.update(cx, |diff_state, cx| {
1692 diff_state.buffer_language_changed(buffer, cx);
1693 });
1694 }
1695 }
1696 })
1697 .detach();
1698 }
1699 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1700 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1701 diffs.remove(buffer_id);
1702 }
1703 }
1704 BufferStoreEvent::BufferDropped(buffer_id) => {
1705 self.diffs.remove(buffer_id);
1706 for diffs in self.shared_diffs.values_mut() {
1707 diffs.remove(buffer_id);
1708 }
1709 }
1710 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1711 // Whenever a buffer's file path changes, it's possible that the
1712 // new path is actually a path that is being tracked by a git
1713 // repository. In that case, we'll want to update the buffer's
1714 // `BufferDiffState`, in case it already has one.
1715 let buffer_id = buffer.read(cx).remote_id();
1716 let diff_state = self.diffs.get(&buffer_id);
1717 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1718
1719 if let Some(diff_state) = diff_state
1720 && let Some((repo, repo_path)) = repo
1721 {
1722 let buffer = buffer.clone();
1723 let diff_state = diff_state.clone();
1724
1725 cx.spawn(async move |_git_store, cx| {
1726 async {
1727 let diff_bases_change = repo
1728 .update(cx, |repo, cx| {
1729 repo.load_committed_text(buffer_id, repo_path, cx)
1730 })
1731 .await?;
1732
1733 diff_state.update(cx, |diff_state, cx| {
1734 let buffer_snapshot = buffer.read(cx).text_snapshot();
1735 diff_state.diff_bases_changed(
1736 buffer_snapshot,
1737 Some(diff_bases_change),
1738 cx,
1739 );
1740 });
1741 anyhow::Ok(())
1742 }
1743 .await
1744 .log_err();
1745 })
1746 .detach();
1747 }
1748 }
1749 }
1750 }
1751
1752 pub fn recalculate_buffer_diffs(
1753 &mut self,
1754 buffers: Vec<Entity<Buffer>>,
1755 cx: &mut Context<Self>,
1756 ) -> impl Future<Output = ()> + use<> {
1757 let mut futures = Vec::new();
1758 for buffer in buffers {
1759 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1760 let buffer = buffer.read(cx).text_snapshot();
1761 diff_state.update(cx, |diff_state, cx| {
1762 diff_state.recalculate_diffs(buffer.clone(), cx);
1763 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1764 });
1765 futures.push(diff_state.update(cx, |diff_state, cx| {
1766 diff_state
1767 .reparse_conflict_markers(buffer, cx)
1768 .map(|_| {})
1769 .boxed()
1770 }));
1771 }
1772 }
1773 async move {
1774 futures::future::join_all(futures).await;
1775 }
1776 }
1777
1778 fn on_buffer_diff_event(
1779 &mut self,
1780 diff: Entity<buffer_diff::BufferDiff>,
1781 event: &BufferDiffEvent,
1782 cx: &mut Context<Self>,
1783 ) {
1784 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1785 let buffer_id = diff.read(cx).buffer_id;
1786 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1787 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1788 diff_state.hunk_staging_operation_count += 1;
1789 diff_state.hunk_staging_operation_count
1790 });
1791 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1792 let recv = repo.update(cx, |repo, cx| {
1793 log::debug!("hunks changed for {}", path.as_unix_str());
1794 repo.spawn_set_index_text_job(
1795 path,
1796 new_index_text.as_ref().map(|rope| rope.to_string()),
1797 Some(hunk_staging_operation_count),
1798 cx,
1799 )
1800 });
1801 let diff = diff.downgrade();
1802 cx.spawn(async move |this, cx| {
1803 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1804 diff.update(cx, |diff, cx| {
1805 diff.clear_pending_hunks(cx);
1806 })
1807 .ok();
1808 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1809 .ok();
1810 }
1811 })
1812 .detach();
1813 }
1814 }
1815 }
1816 }
1817
1818 fn local_worktree_git_repos_changed(
1819 &mut self,
1820 worktree: Entity<Worktree>,
1821 changed_repos: &UpdatedGitRepositoriesSet,
1822 cx: &mut Context<Self>,
1823 ) {
1824 log::debug!("local worktree repos changed");
1825 debug_assert!(worktree.read(cx).is_local());
1826
1827 for repository in self.repositories.values() {
1828 repository.update(cx, |repository, cx| {
1829 let repo_abs_path = &repository.work_directory_abs_path;
1830 if changed_repos.iter().any(|update| {
1831 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1832 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1833 }) {
1834 repository.reload_buffer_diff_bases(cx);
1835 }
1836 });
1837 }
1838 }
1839
1840 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1841 &self.repositories
1842 }
1843
1844 /// Returns the original (main) repository working directory for the given worktree.
1845 /// For normal checkouts this equals the worktree's own path; for linked
1846 /// worktrees it points back to the original repo.
1847 pub fn original_repo_path_for_worktree(
1848 &self,
1849 worktree_id: WorktreeId,
1850 cx: &App,
1851 ) -> Option<Arc<Path>> {
1852 self.active_repo_id
1853 .iter()
1854 .chain(self.worktree_ids.keys())
1855 .find(|repo_id| {
1856 self.worktree_ids
1857 .get(repo_id)
1858 .is_some_and(|ids| ids.contains(&worktree_id))
1859 })
1860 .and_then(|repo_id| self.repositories.get(repo_id))
1861 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1862 }
1863
1864 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1865 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1866 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1867 Some(status.status)
1868 }
1869
1870 pub fn repository_and_path_for_buffer_id(
1871 &self,
1872 buffer_id: BufferId,
1873 cx: &App,
1874 ) -> Option<(Entity<Repository>, RepoPath)> {
1875 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1876 let project_path = buffer.read(cx).project_path(cx)?;
1877 self.repository_and_path_for_project_path(&project_path, cx)
1878 }
1879
1880 pub fn repository_and_path_for_project_path(
1881 &self,
1882 path: &ProjectPath,
1883 cx: &App,
1884 ) -> Option<(Entity<Repository>, RepoPath)> {
1885 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1886 self.repositories
1887 .values()
1888 .filter_map(|repo| {
1889 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1890 Some((repo.clone(), repo_path))
1891 })
1892 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1893 }
1894
1895 pub fn git_init(
1896 &self,
1897 path: Arc<Path>,
1898 fallback_branch_name: String,
1899 cx: &App,
1900 ) -> Task<Result<()>> {
1901 match &self.state {
1902 GitStoreState::Local { fs, .. } => {
1903 let fs = fs.clone();
1904 cx.background_executor()
1905 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1906 }
1907 GitStoreState::Remote {
1908 upstream_client,
1909 upstream_project_id: project_id,
1910 ..
1911 } => {
1912 let client = upstream_client.clone();
1913 let project_id = *project_id;
1914 cx.background_executor().spawn(async move {
1915 client
1916 .request(proto::GitInit {
1917 project_id: project_id,
1918 abs_path: path.to_string_lossy().into_owned(),
1919 fallback_branch_name,
1920 })
1921 .await?;
1922 Ok(())
1923 })
1924 }
1925 }
1926 }
1927
1928 pub fn git_clone(
1929 &self,
1930 repo: String,
1931 path: impl Into<Arc<std::path::Path>>,
1932 cx: &App,
1933 ) -> Task<Result<()>> {
1934 let path = path.into();
1935 match &self.state {
1936 GitStoreState::Local { fs, .. } => {
1937 let fs = fs.clone();
1938 cx.background_executor()
1939 .spawn(async move { fs.git_clone(&repo, &path).await })
1940 }
1941 GitStoreState::Remote {
1942 upstream_client,
1943 upstream_project_id,
1944 ..
1945 } => {
1946 if upstream_client.is_via_collab() {
1947 return Task::ready(Err(anyhow!(
1948 "Git Clone isn't supported for project guests"
1949 )));
1950 }
1951 let request = upstream_client.request(proto::GitClone {
1952 project_id: *upstream_project_id,
1953 abs_path: path.to_string_lossy().into_owned(),
1954 remote_repo: repo,
1955 });
1956
1957 cx.background_spawn(async move {
1958 let result = request.await?;
1959
1960 match result.success {
1961 true => Ok(()),
1962 false => Err(anyhow!("Git Clone failed")),
1963 }
1964 })
1965 }
1966 }
1967 }
1968
1969 async fn handle_update_repository(
1970 this: Entity<Self>,
1971 envelope: TypedEnvelope<proto::UpdateRepository>,
1972 mut cx: AsyncApp,
1973 ) -> Result<()> {
1974 this.update(&mut cx, |this, cx| {
1975 let path_style = this.worktree_store.read(cx).path_style();
1976 let mut update = envelope.payload;
1977
1978 let id = RepositoryId::from_proto(update.id);
1979 let client = this.upstream_client().context("no upstream client")?;
1980
1981 let original_repo_abs_path: Option<Arc<Path>> = update
1982 .original_repo_abs_path
1983 .as_deref()
1984 .map(|p| Path::new(p).into());
1985
1986 let mut repo_subscription = None;
1987 let repo = this.repositories.entry(id).or_insert_with(|| {
1988 let git_store = cx.weak_entity();
1989 let repo = cx.new(|cx| {
1990 Repository::remote(
1991 id,
1992 Path::new(&update.abs_path).into(),
1993 original_repo_abs_path.clone(),
1994 path_style,
1995 ProjectId(update.project_id),
1996 client,
1997 git_store,
1998 cx,
1999 )
2000 });
2001 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
2002 cx.emit(GitStoreEvent::RepositoryAdded);
2003 repo
2004 });
2005 this._subscriptions.extend(repo_subscription);
2006
2007 repo.update(cx, {
2008 let update = update.clone();
2009 |repo, cx| repo.apply_remote_update(update, cx)
2010 })?;
2011
2012 this.active_repo_id.get_or_insert_with(|| {
2013 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2014 id
2015 });
2016
2017 if let Some((client, project_id)) = this.downstream_client() {
2018 update.project_id = project_id.to_proto();
2019 client.send(update).log_err();
2020 }
2021 Ok(())
2022 })
2023 }
2024
2025 async fn handle_remove_repository(
2026 this: Entity<Self>,
2027 envelope: TypedEnvelope<proto::RemoveRepository>,
2028 mut cx: AsyncApp,
2029 ) -> Result<()> {
2030 this.update(&mut cx, |this, cx| {
2031 let mut update = envelope.payload;
2032 let id = RepositoryId::from_proto(update.id);
2033 this.repositories.remove(&id);
2034 if let Some((client, project_id)) = this.downstream_client() {
2035 update.project_id = project_id.to_proto();
2036 client.send(update).log_err();
2037 }
2038 if this.active_repo_id == Some(id) {
2039 this.active_repo_id = None;
2040 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2041 }
2042 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2043 });
2044 Ok(())
2045 }
2046
2047 async fn handle_git_init(
2048 this: Entity<Self>,
2049 envelope: TypedEnvelope<proto::GitInit>,
2050 cx: AsyncApp,
2051 ) -> Result<proto::Ack> {
2052 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2053 let name = envelope.payload.fallback_branch_name;
2054 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2055 .await?;
2056
2057 Ok(proto::Ack {})
2058 }
2059
2060 async fn handle_git_clone(
2061 this: Entity<Self>,
2062 envelope: TypedEnvelope<proto::GitClone>,
2063 cx: AsyncApp,
2064 ) -> Result<proto::GitCloneResponse> {
2065 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2066 let repo_name = envelope.payload.remote_repo;
2067 let result = cx
2068 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2069 .await;
2070
2071 Ok(proto::GitCloneResponse {
2072 success: result.is_ok(),
2073 })
2074 }
2075
2076 async fn handle_fetch(
2077 this: Entity<Self>,
2078 envelope: TypedEnvelope<proto::Fetch>,
2079 mut cx: AsyncApp,
2080 ) -> Result<proto::RemoteMessageResponse> {
2081 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2082 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2083 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2084 let askpass_id = envelope.payload.askpass_id;
2085
2086 let askpass = make_remote_delegate(
2087 this,
2088 envelope.payload.project_id,
2089 repository_id,
2090 askpass_id,
2091 &mut cx,
2092 );
2093
2094 let remote_output = repository_handle
2095 .update(&mut cx, |repository_handle, cx| {
2096 repository_handle.fetch(fetch_options, askpass, cx)
2097 })
2098 .await??;
2099
2100 Ok(proto::RemoteMessageResponse {
2101 stdout: remote_output.stdout,
2102 stderr: remote_output.stderr,
2103 })
2104 }
2105
2106 async fn handle_push(
2107 this: Entity<Self>,
2108 envelope: TypedEnvelope<proto::Push>,
2109 mut cx: AsyncApp,
2110 ) -> Result<proto::RemoteMessageResponse> {
2111 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2112 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2113
2114 let askpass_id = envelope.payload.askpass_id;
2115 let askpass = make_remote_delegate(
2116 this,
2117 envelope.payload.project_id,
2118 repository_id,
2119 askpass_id,
2120 &mut cx,
2121 );
2122
2123 let options = envelope
2124 .payload
2125 .options
2126 .as_ref()
2127 .map(|_| match envelope.payload.options() {
2128 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2129 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2130 });
2131
2132 let branch_name = envelope.payload.branch_name.into();
2133 let remote_branch_name = envelope.payload.remote_branch_name.into();
2134 let remote_name = envelope.payload.remote_name.into();
2135
2136 let remote_output = repository_handle
2137 .update(&mut cx, |repository_handle, cx| {
2138 repository_handle.push(
2139 branch_name,
2140 remote_branch_name,
2141 remote_name,
2142 options,
2143 askpass,
2144 cx,
2145 )
2146 })
2147 .await??;
2148 Ok(proto::RemoteMessageResponse {
2149 stdout: remote_output.stdout,
2150 stderr: remote_output.stderr,
2151 })
2152 }
2153
2154 async fn handle_pull(
2155 this: Entity<Self>,
2156 envelope: TypedEnvelope<proto::Pull>,
2157 mut cx: AsyncApp,
2158 ) -> Result<proto::RemoteMessageResponse> {
2159 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2160 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2161 let askpass_id = envelope.payload.askpass_id;
2162 let askpass = make_remote_delegate(
2163 this,
2164 envelope.payload.project_id,
2165 repository_id,
2166 askpass_id,
2167 &mut cx,
2168 );
2169
2170 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2171 let remote_name = envelope.payload.remote_name.into();
2172 let rebase = envelope.payload.rebase;
2173
2174 let remote_message = repository_handle
2175 .update(&mut cx, |repository_handle, cx| {
2176 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2177 })
2178 .await??;
2179
2180 Ok(proto::RemoteMessageResponse {
2181 stdout: remote_message.stdout,
2182 stderr: remote_message.stderr,
2183 })
2184 }
2185
2186 async fn handle_stage(
2187 this: Entity<Self>,
2188 envelope: TypedEnvelope<proto::Stage>,
2189 mut cx: AsyncApp,
2190 ) -> Result<proto::Ack> {
2191 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2192 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2193
2194 let entries = envelope
2195 .payload
2196 .paths
2197 .into_iter()
2198 .map(|path| RepoPath::new(&path))
2199 .collect::<Result<Vec<_>>>()?;
2200
2201 repository_handle
2202 .update(&mut cx, |repository_handle, cx| {
2203 repository_handle.stage_entries(entries, cx)
2204 })
2205 .await?;
2206 Ok(proto::Ack {})
2207 }
2208
2209 async fn handle_unstage(
2210 this: Entity<Self>,
2211 envelope: TypedEnvelope<proto::Unstage>,
2212 mut cx: AsyncApp,
2213 ) -> Result<proto::Ack> {
2214 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2215 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2216
2217 let entries = envelope
2218 .payload
2219 .paths
2220 .into_iter()
2221 .map(|path| RepoPath::new(&path))
2222 .collect::<Result<Vec<_>>>()?;
2223
2224 repository_handle
2225 .update(&mut cx, |repository_handle, cx| {
2226 repository_handle.unstage_entries(entries, cx)
2227 })
2228 .await?;
2229
2230 Ok(proto::Ack {})
2231 }
2232
2233 async fn handle_stash(
2234 this: Entity<Self>,
2235 envelope: TypedEnvelope<proto::Stash>,
2236 mut cx: AsyncApp,
2237 ) -> Result<proto::Ack> {
2238 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2239 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2240
2241 let entries = envelope
2242 .payload
2243 .paths
2244 .into_iter()
2245 .map(|path| RepoPath::new(&path))
2246 .collect::<Result<Vec<_>>>()?;
2247
2248 repository_handle
2249 .update(&mut cx, |repository_handle, cx| {
2250 repository_handle.stash_entries(entries, cx)
2251 })
2252 .await?;
2253
2254 Ok(proto::Ack {})
2255 }
2256
2257 async fn handle_stash_pop(
2258 this: Entity<Self>,
2259 envelope: TypedEnvelope<proto::StashPop>,
2260 mut cx: AsyncApp,
2261 ) -> Result<proto::Ack> {
2262 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2263 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2264 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2265
2266 repository_handle
2267 .update(&mut cx, |repository_handle, cx| {
2268 repository_handle.stash_pop(stash_index, cx)
2269 })
2270 .await?;
2271
2272 Ok(proto::Ack {})
2273 }
2274
2275 async fn handle_stash_apply(
2276 this: Entity<Self>,
2277 envelope: TypedEnvelope<proto::StashApply>,
2278 mut cx: AsyncApp,
2279 ) -> Result<proto::Ack> {
2280 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2281 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2282 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2283
2284 repository_handle
2285 .update(&mut cx, |repository_handle, cx| {
2286 repository_handle.stash_apply(stash_index, cx)
2287 })
2288 .await?;
2289
2290 Ok(proto::Ack {})
2291 }
2292
2293 async fn handle_stash_drop(
2294 this: Entity<Self>,
2295 envelope: TypedEnvelope<proto::StashDrop>,
2296 mut cx: AsyncApp,
2297 ) -> Result<proto::Ack> {
2298 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2299 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2300 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2301
2302 repository_handle
2303 .update(&mut cx, |repository_handle, cx| {
2304 repository_handle.stash_drop(stash_index, cx)
2305 })
2306 .await??;
2307
2308 Ok(proto::Ack {})
2309 }
2310
2311 async fn handle_set_index_text(
2312 this: Entity<Self>,
2313 envelope: TypedEnvelope<proto::SetIndexText>,
2314 mut cx: AsyncApp,
2315 ) -> Result<proto::Ack> {
2316 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2317 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2318 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2319
2320 repository_handle
2321 .update(&mut cx, |repository_handle, cx| {
2322 repository_handle.spawn_set_index_text_job(
2323 repo_path,
2324 envelope.payload.text,
2325 None,
2326 cx,
2327 )
2328 })
2329 .await??;
2330 Ok(proto::Ack {})
2331 }
2332
2333 async fn handle_run_hook(
2334 this: Entity<Self>,
2335 envelope: TypedEnvelope<proto::RunGitHook>,
2336 mut cx: AsyncApp,
2337 ) -> Result<proto::Ack> {
2338 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2339 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2340 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2341 repository_handle
2342 .update(&mut cx, |repository_handle, cx| {
2343 repository_handle.run_hook(hook, cx)
2344 })
2345 .await??;
2346 Ok(proto::Ack {})
2347 }
2348
2349 async fn handle_commit(
2350 this: Entity<Self>,
2351 envelope: TypedEnvelope<proto::Commit>,
2352 mut cx: AsyncApp,
2353 ) -> Result<proto::Ack> {
2354 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2355 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2356 let askpass_id = envelope.payload.askpass_id;
2357
2358 let askpass = make_remote_delegate(
2359 this,
2360 envelope.payload.project_id,
2361 repository_id,
2362 askpass_id,
2363 &mut cx,
2364 );
2365
2366 let message = SharedString::from(envelope.payload.message);
2367 let name = envelope.payload.name.map(SharedString::from);
2368 let email = envelope.payload.email.map(SharedString::from);
2369 let options = envelope.payload.options.unwrap_or_default();
2370
2371 repository_handle
2372 .update(&mut cx, |repository_handle, cx| {
2373 repository_handle.commit(
2374 message,
2375 name.zip(email),
2376 CommitOptions {
2377 amend: options.amend,
2378 signoff: options.signoff,
2379 allow_empty: options.allow_empty,
2380 },
2381 askpass,
2382 cx,
2383 )
2384 })
2385 .await??;
2386 Ok(proto::Ack {})
2387 }
2388
2389 async fn handle_get_remotes(
2390 this: Entity<Self>,
2391 envelope: TypedEnvelope<proto::GetRemotes>,
2392 mut cx: AsyncApp,
2393 ) -> Result<proto::GetRemotesResponse> {
2394 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2395 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2396
2397 let branch_name = envelope.payload.branch_name;
2398 let is_push = envelope.payload.is_push;
2399
2400 let remotes = repository_handle
2401 .update(&mut cx, |repository_handle, _| {
2402 repository_handle.get_remotes(branch_name, is_push)
2403 })
2404 .await??;
2405
2406 Ok(proto::GetRemotesResponse {
2407 remotes: remotes
2408 .into_iter()
2409 .map(|remotes| proto::get_remotes_response::Remote {
2410 name: remotes.name.to_string(),
2411 })
2412 .collect::<Vec<_>>(),
2413 })
2414 }
2415
2416 async fn handle_get_worktrees(
2417 this: Entity<Self>,
2418 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2419 mut cx: AsyncApp,
2420 ) -> Result<proto::GitWorktreesResponse> {
2421 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2422 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2423
2424 let worktrees = repository_handle
2425 .update(&mut cx, |repository_handle, _| {
2426 repository_handle.worktrees()
2427 })
2428 .await??;
2429
2430 Ok(proto::GitWorktreesResponse {
2431 worktrees: worktrees
2432 .into_iter()
2433 .map(|worktree| worktree_to_proto(&worktree))
2434 .collect::<Vec<_>>(),
2435 })
2436 }
2437
2438 async fn handle_create_worktree(
2439 this: Entity<Self>,
2440 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2441 mut cx: AsyncApp,
2442 ) -> Result<proto::Ack> {
2443 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2444 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2445 let directory = PathBuf::from(envelope.payload.directory);
2446 let name = envelope.payload.name;
2447 let commit = envelope.payload.commit;
2448 let use_existing_branch = envelope.payload.use_existing_branch;
2449 let target = if name.is_empty() {
2450 CreateWorktreeTarget::Detached { base_sha: commit }
2451 } else if use_existing_branch {
2452 CreateWorktreeTarget::ExistingBranch { branch_name: name }
2453 } else {
2454 CreateWorktreeTarget::NewBranch {
2455 branch_name: name,
2456 base_sha: commit,
2457 }
2458 };
2459
2460 repository_handle
2461 .update(&mut cx, |repository_handle, _| {
2462 repository_handle.create_worktree(target, directory)
2463 })
2464 .await??;
2465
2466 Ok(proto::Ack {})
2467 }
2468
2469 async fn handle_remove_worktree(
2470 this: Entity<Self>,
2471 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2472 mut cx: AsyncApp,
2473 ) -> Result<proto::Ack> {
2474 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2475 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2476 let path = PathBuf::from(envelope.payload.path);
2477 let force = envelope.payload.force;
2478
2479 repository_handle
2480 .update(&mut cx, |repository_handle, _| {
2481 repository_handle.remove_worktree(path, force)
2482 })
2483 .await??;
2484
2485 Ok(proto::Ack {})
2486 }
2487
2488 async fn handle_rename_worktree(
2489 this: Entity<Self>,
2490 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2491 mut cx: AsyncApp,
2492 ) -> Result<proto::Ack> {
2493 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2494 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2495 let old_path = PathBuf::from(envelope.payload.old_path);
2496 let new_path = PathBuf::from(envelope.payload.new_path);
2497
2498 repository_handle
2499 .update(&mut cx, |repository_handle, _| {
2500 repository_handle.rename_worktree(old_path, new_path)
2501 })
2502 .await??;
2503
2504 Ok(proto::Ack {})
2505 }
2506
2507 async fn handle_get_head_sha(
2508 this: Entity<Self>,
2509 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2510 mut cx: AsyncApp,
2511 ) -> Result<proto::GitGetHeadShaResponse> {
2512 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2513 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2514
2515 let head_sha = repository_handle
2516 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2517 .await??;
2518
2519 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2520 }
2521
2522 async fn handle_get_branches(
2523 this: Entity<Self>,
2524 envelope: TypedEnvelope<proto::GitGetBranches>,
2525 mut cx: AsyncApp,
2526 ) -> Result<proto::GitBranchesResponse> {
2527 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2528 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2529
2530 let branches = repository_handle
2531 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2532 .await??;
2533
2534 Ok(proto::GitBranchesResponse {
2535 branches: branches
2536 .into_iter()
2537 .map(|branch| branch_to_proto(&branch))
2538 .collect::<Vec<_>>(),
2539 })
2540 }
2541 async fn handle_get_default_branch(
2542 this: Entity<Self>,
2543 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2544 mut cx: AsyncApp,
2545 ) -> Result<proto::GetDefaultBranchResponse> {
2546 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2547 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2548
2549 let branch = repository_handle
2550 .update(&mut cx, |repository_handle, _| {
2551 repository_handle.default_branch(false)
2552 })
2553 .await??
2554 .map(Into::into);
2555
2556 Ok(proto::GetDefaultBranchResponse { branch })
2557 }
2558 async fn handle_create_branch(
2559 this: Entity<Self>,
2560 envelope: TypedEnvelope<proto::GitCreateBranch>,
2561 mut cx: AsyncApp,
2562 ) -> Result<proto::Ack> {
2563 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2564 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2565 let branch_name = envelope.payload.branch_name;
2566
2567 repository_handle
2568 .update(&mut cx, |repository_handle, _| {
2569 repository_handle.create_branch(branch_name, None)
2570 })
2571 .await??;
2572
2573 Ok(proto::Ack {})
2574 }
2575
2576 async fn handle_change_branch(
2577 this: Entity<Self>,
2578 envelope: TypedEnvelope<proto::GitChangeBranch>,
2579 mut cx: AsyncApp,
2580 ) -> Result<proto::Ack> {
2581 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2582 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2583 let branch_name = envelope.payload.branch_name;
2584
2585 repository_handle
2586 .update(&mut cx, |repository_handle, _| {
2587 repository_handle.change_branch(branch_name)
2588 })
2589 .await??;
2590
2591 Ok(proto::Ack {})
2592 }
2593
2594 async fn handle_rename_branch(
2595 this: Entity<Self>,
2596 envelope: TypedEnvelope<proto::GitRenameBranch>,
2597 mut cx: AsyncApp,
2598 ) -> Result<proto::Ack> {
2599 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2600 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2601 let branch = envelope.payload.branch;
2602 let new_name = envelope.payload.new_name;
2603
2604 repository_handle
2605 .update(&mut cx, |repository_handle, _| {
2606 repository_handle.rename_branch(branch, new_name)
2607 })
2608 .await??;
2609
2610 Ok(proto::Ack {})
2611 }
2612
2613 async fn handle_create_remote(
2614 this: Entity<Self>,
2615 envelope: TypedEnvelope<proto::GitCreateRemote>,
2616 mut cx: AsyncApp,
2617 ) -> Result<proto::Ack> {
2618 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2619 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2620 let remote_name = envelope.payload.remote_name;
2621 let remote_url = envelope.payload.remote_url;
2622
2623 repository_handle
2624 .update(&mut cx, |repository_handle, _| {
2625 repository_handle.create_remote(remote_name, remote_url)
2626 })
2627 .await??;
2628
2629 Ok(proto::Ack {})
2630 }
2631
2632 async fn handle_delete_branch(
2633 this: Entity<Self>,
2634 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2635 mut cx: AsyncApp,
2636 ) -> Result<proto::Ack> {
2637 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2638 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2639 let is_remote = envelope.payload.is_remote;
2640 let branch_name = envelope.payload.branch_name;
2641
2642 repository_handle
2643 .update(&mut cx, |repository_handle, _| {
2644 repository_handle.delete_branch(is_remote, branch_name)
2645 })
2646 .await??;
2647
2648 Ok(proto::Ack {})
2649 }
2650
2651 async fn handle_remove_remote(
2652 this: Entity<Self>,
2653 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2654 mut cx: AsyncApp,
2655 ) -> Result<proto::Ack> {
2656 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2657 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2658 let remote_name = envelope.payload.remote_name;
2659
2660 repository_handle
2661 .update(&mut cx, |repository_handle, _| {
2662 repository_handle.remove_remote(remote_name)
2663 })
2664 .await??;
2665
2666 Ok(proto::Ack {})
2667 }
2668
2669 async fn handle_show(
2670 this: Entity<Self>,
2671 envelope: TypedEnvelope<proto::GitShow>,
2672 mut cx: AsyncApp,
2673 ) -> Result<proto::GitCommitDetails> {
2674 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2675 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2676
2677 let commit = repository_handle
2678 .update(&mut cx, |repository_handle, _| {
2679 repository_handle.show(envelope.payload.commit)
2680 })
2681 .await??;
2682 Ok(proto::GitCommitDetails {
2683 sha: commit.sha.into(),
2684 message: commit.message.into(),
2685 commit_timestamp: commit.commit_timestamp,
2686 author_email: commit.author_email.into(),
2687 author_name: commit.author_name.into(),
2688 })
2689 }
2690
2691 async fn handle_create_checkpoint(
2692 this: Entity<Self>,
2693 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2694 mut cx: AsyncApp,
2695 ) -> Result<proto::GitCreateCheckpointResponse> {
2696 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2697 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2698
2699 let checkpoint = repository_handle
2700 .update(&mut cx, |repository, _| repository.checkpoint())
2701 .await??;
2702
2703 Ok(proto::GitCreateCheckpointResponse {
2704 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2705 })
2706 }
2707
2708 async fn handle_restore_checkpoint(
2709 this: Entity<Self>,
2710 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2711 mut cx: AsyncApp,
2712 ) -> Result<proto::Ack> {
2713 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2714 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2715
2716 let checkpoint = GitRepositoryCheckpoint {
2717 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2718 };
2719
2720 repository_handle
2721 .update(&mut cx, |repository, _| {
2722 repository.restore_checkpoint(checkpoint)
2723 })
2724 .await??;
2725
2726 Ok(proto::Ack {})
2727 }
2728
2729 async fn handle_compare_checkpoints(
2730 this: Entity<Self>,
2731 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2732 mut cx: AsyncApp,
2733 ) -> Result<proto::GitCompareCheckpointsResponse> {
2734 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2735 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2736
2737 let left = GitRepositoryCheckpoint {
2738 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2739 };
2740 let right = GitRepositoryCheckpoint {
2741 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2742 };
2743
2744 let equal = repository_handle
2745 .update(&mut cx, |repository, _| {
2746 repository.compare_checkpoints(left, right)
2747 })
2748 .await??;
2749
2750 Ok(proto::GitCompareCheckpointsResponse { equal })
2751 }
2752
2753 async fn handle_diff_checkpoints(
2754 this: Entity<Self>,
2755 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2756 mut cx: AsyncApp,
2757 ) -> Result<proto::GitDiffCheckpointsResponse> {
2758 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2759 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2760
2761 let base = GitRepositoryCheckpoint {
2762 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2763 };
2764 let target = GitRepositoryCheckpoint {
2765 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2766 };
2767
2768 let diff = repository_handle
2769 .update(&mut cx, |repository, _| {
2770 repository.diff_checkpoints(base, target)
2771 })
2772 .await??;
2773
2774 Ok(proto::GitDiffCheckpointsResponse { diff })
2775 }
2776
2777 async fn handle_load_commit_diff(
2778 this: Entity<Self>,
2779 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2780 mut cx: AsyncApp,
2781 ) -> Result<proto::LoadCommitDiffResponse> {
2782 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2783 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2784
2785 let commit_diff = repository_handle
2786 .update(&mut cx, |repository_handle, _| {
2787 repository_handle.load_commit_diff(envelope.payload.commit)
2788 })
2789 .await??;
2790 Ok(proto::LoadCommitDiffResponse {
2791 files: commit_diff
2792 .files
2793 .into_iter()
2794 .map(|file| proto::CommitFile {
2795 path: file.path.to_proto(),
2796 old_text: file.old_text,
2797 new_text: file.new_text,
2798 is_binary: file.is_binary,
2799 })
2800 .collect(),
2801 })
2802 }
2803
2804 async fn handle_file_history(
2805 this: Entity<Self>,
2806 envelope: TypedEnvelope<proto::GitFileHistory>,
2807 mut cx: AsyncApp,
2808 ) -> Result<proto::GitFileHistoryResponse> {
2809 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2810 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2811 let path = RepoPath::from_proto(&envelope.payload.path)?;
2812 let skip = envelope.payload.skip as usize;
2813 let limit = envelope.payload.limit.map(|l| l as usize);
2814
2815 let file_history = repository_handle
2816 .update(&mut cx, |repository_handle, _| {
2817 repository_handle.file_history_paginated(path, skip, limit)
2818 })
2819 .await??;
2820
2821 Ok(proto::GitFileHistoryResponse {
2822 entries: file_history
2823 .entries
2824 .into_iter()
2825 .map(|entry| proto::FileHistoryEntry {
2826 sha: entry.sha.to_string(),
2827 subject: entry.subject.to_string(),
2828 message: entry.message.to_string(),
2829 commit_timestamp: entry.commit_timestamp,
2830 author_name: entry.author_name.to_string(),
2831 author_email: entry.author_email.to_string(),
2832 })
2833 .collect(),
2834 path: file_history.path.to_proto(),
2835 })
2836 }
2837
2838 async fn handle_reset(
2839 this: Entity<Self>,
2840 envelope: TypedEnvelope<proto::GitReset>,
2841 mut cx: AsyncApp,
2842 ) -> Result<proto::Ack> {
2843 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2844 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2845
2846 let mode = match envelope.payload.mode() {
2847 git_reset::ResetMode::Soft => ResetMode::Soft,
2848 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2849 };
2850
2851 repository_handle
2852 .update(&mut cx, |repository_handle, cx| {
2853 repository_handle.reset(envelope.payload.commit, mode, cx)
2854 })
2855 .await??;
2856 Ok(proto::Ack {})
2857 }
2858
2859 async fn handle_checkout_files(
2860 this: Entity<Self>,
2861 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2862 mut cx: AsyncApp,
2863 ) -> Result<proto::Ack> {
2864 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2865 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2866 let paths = envelope
2867 .payload
2868 .paths
2869 .iter()
2870 .map(|s| RepoPath::from_proto(s))
2871 .collect::<Result<Vec<_>>>()?;
2872
2873 repository_handle
2874 .update(&mut cx, |repository_handle, cx| {
2875 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2876 })
2877 .await?;
2878 Ok(proto::Ack {})
2879 }
2880
2881 async fn handle_open_commit_message_buffer(
2882 this: Entity<Self>,
2883 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2884 mut cx: AsyncApp,
2885 ) -> Result<proto::OpenBufferResponse> {
2886 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2887 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2888 let buffer = repository
2889 .update(&mut cx, |repository, cx| {
2890 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2891 })
2892 .await?;
2893
2894 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2895 this.update(&mut cx, |this, cx| {
2896 this.buffer_store.update(cx, |buffer_store, cx| {
2897 buffer_store
2898 .create_buffer_for_peer(
2899 &buffer,
2900 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2901 cx,
2902 )
2903 .detach_and_log_err(cx);
2904 })
2905 });
2906
2907 Ok(proto::OpenBufferResponse {
2908 buffer_id: buffer_id.to_proto(),
2909 })
2910 }
2911
2912 async fn handle_askpass(
2913 this: Entity<Self>,
2914 envelope: TypedEnvelope<proto::AskPassRequest>,
2915 mut cx: AsyncApp,
2916 ) -> Result<proto::AskPassResponse> {
2917 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2918 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2919
2920 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2921 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2922 debug_panic!("no askpass found");
2923 anyhow::bail!("no askpass found");
2924 };
2925
2926 let response = askpass
2927 .ask_password(envelope.payload.prompt)
2928 .await
2929 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2930
2931 delegates
2932 .lock()
2933 .insert(envelope.payload.askpass_id, askpass);
2934
2935 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2936 Ok(proto::AskPassResponse {
2937 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2938 })
2939 }
2940
2941 async fn handle_check_for_pushed_commits(
2942 this: Entity<Self>,
2943 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2944 mut cx: AsyncApp,
2945 ) -> Result<proto::CheckForPushedCommitsResponse> {
2946 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2947 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2948
2949 let branches = repository_handle
2950 .update(&mut cx, |repository_handle, _| {
2951 repository_handle.check_for_pushed_commits()
2952 })
2953 .await??;
2954 Ok(proto::CheckForPushedCommitsResponse {
2955 pushed_to: branches
2956 .into_iter()
2957 .map(|commit| commit.to_string())
2958 .collect(),
2959 })
2960 }
2961
2962 async fn handle_git_diff(
2963 this: Entity<Self>,
2964 envelope: TypedEnvelope<proto::GitDiff>,
2965 mut cx: AsyncApp,
2966 ) -> Result<proto::GitDiffResponse> {
2967 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2968 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2969 let diff_type = match envelope.payload.diff_type() {
2970 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2971 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2972 proto::git_diff::DiffType::MergeBase => {
2973 let base_ref = envelope
2974 .payload
2975 .merge_base_ref
2976 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2977 DiffType::MergeBase {
2978 base_ref: base_ref.into(),
2979 }
2980 }
2981 };
2982
2983 let mut diff = repository_handle
2984 .update(&mut cx, |repository_handle, cx| {
2985 repository_handle.diff(diff_type, cx)
2986 })
2987 .await??;
2988 const ONE_MB: usize = 1_000_000;
2989 if diff.len() > ONE_MB {
2990 diff = diff.chars().take(ONE_MB).collect()
2991 }
2992
2993 Ok(proto::GitDiffResponse { diff })
2994 }
2995
2996 async fn handle_tree_diff(
2997 this: Entity<Self>,
2998 request: TypedEnvelope<proto::GetTreeDiff>,
2999 mut cx: AsyncApp,
3000 ) -> Result<proto::GetTreeDiffResponse> {
3001 let repository_id = RepositoryId(request.payload.repository_id);
3002 let diff_type = if request.payload.is_merge {
3003 DiffTreeType::MergeBase {
3004 base: request.payload.base.into(),
3005 head: request.payload.head.into(),
3006 }
3007 } else {
3008 DiffTreeType::Since {
3009 base: request.payload.base.into(),
3010 head: request.payload.head.into(),
3011 }
3012 };
3013
3014 let diff = this
3015 .update(&mut cx, |this, cx| {
3016 let repository = this.repositories().get(&repository_id)?;
3017 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
3018 })
3019 .context("missing repository")?
3020 .await??;
3021
3022 Ok(proto::GetTreeDiffResponse {
3023 entries: diff
3024 .entries
3025 .into_iter()
3026 .map(|(path, status)| proto::TreeDiffStatus {
3027 path: path.as_ref().to_proto(),
3028 status: match status {
3029 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
3030 TreeDiffStatus::Modified { .. } => {
3031 proto::tree_diff_status::Status::Modified.into()
3032 }
3033 TreeDiffStatus::Deleted { .. } => {
3034 proto::tree_diff_status::Status::Deleted.into()
3035 }
3036 },
3037 oid: match status {
3038 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3039 Some(old.to_string())
3040 }
3041 TreeDiffStatus::Added => None,
3042 },
3043 })
3044 .collect(),
3045 })
3046 }
3047
3048 async fn handle_get_blob_content(
3049 this: Entity<Self>,
3050 request: TypedEnvelope<proto::GetBlobContent>,
3051 mut cx: AsyncApp,
3052 ) -> Result<proto::GetBlobContentResponse> {
3053 let oid = git::Oid::from_str(&request.payload.oid)?;
3054 let repository_id = RepositoryId(request.payload.repository_id);
3055 let content = this
3056 .update(&mut cx, |this, cx| {
3057 let repository = this.repositories().get(&repository_id)?;
3058 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3059 })
3060 .context("missing repository")?
3061 .await?;
3062 Ok(proto::GetBlobContentResponse { content })
3063 }
3064
3065 async fn handle_open_unstaged_diff(
3066 this: Entity<Self>,
3067 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3068 mut cx: AsyncApp,
3069 ) -> Result<proto::OpenUnstagedDiffResponse> {
3070 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3071 let diff = this
3072 .update(&mut cx, |this, cx| {
3073 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3074 Some(this.open_unstaged_diff(buffer, cx))
3075 })
3076 .context("missing buffer")?
3077 .await?;
3078 this.update(&mut cx, |this, _| {
3079 let shared_diffs = this
3080 .shared_diffs
3081 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3082 .or_default();
3083 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3084 });
3085 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3086 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3087 }
3088
3089 async fn handle_open_uncommitted_diff(
3090 this: Entity<Self>,
3091 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3092 mut cx: AsyncApp,
3093 ) -> Result<proto::OpenUncommittedDiffResponse> {
3094 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3095 let diff = this
3096 .update(&mut cx, |this, cx| {
3097 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3098 Some(this.open_uncommitted_diff(buffer, cx))
3099 })
3100 .context("missing buffer")?
3101 .await?;
3102 this.update(&mut cx, |this, _| {
3103 let shared_diffs = this
3104 .shared_diffs
3105 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3106 .or_default();
3107 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3108 });
3109 Ok(diff.read_with(&cx, |diff, cx| {
3110 use proto::open_uncommitted_diff_response::Mode;
3111
3112 let unstaged_diff = diff.secondary_diff();
3113 let index_snapshot = unstaged_diff.and_then(|diff| {
3114 let diff = diff.read(cx);
3115 diff.base_text_exists().then(|| diff.base_text(cx))
3116 });
3117
3118 let mode;
3119 let staged_text;
3120 let committed_text;
3121 if diff.base_text_exists() {
3122 let committed_snapshot = diff.base_text(cx);
3123 committed_text = Some(committed_snapshot.text());
3124 if let Some(index_text) = index_snapshot {
3125 if index_text.remote_id() == committed_snapshot.remote_id() {
3126 mode = Mode::IndexMatchesHead;
3127 staged_text = None;
3128 } else {
3129 mode = Mode::IndexAndHead;
3130 staged_text = Some(index_text.text());
3131 }
3132 } else {
3133 mode = Mode::IndexAndHead;
3134 staged_text = None;
3135 }
3136 } else {
3137 mode = Mode::IndexAndHead;
3138 committed_text = None;
3139 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3140 }
3141
3142 proto::OpenUncommittedDiffResponse {
3143 committed_text,
3144 staged_text,
3145 mode: mode.into(),
3146 }
3147 }))
3148 }
3149
3150 async fn handle_update_diff_bases(
3151 this: Entity<Self>,
3152 request: TypedEnvelope<proto::UpdateDiffBases>,
3153 mut cx: AsyncApp,
3154 ) -> Result<()> {
3155 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3156 this.update(&mut cx, |this, cx| {
3157 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3158 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3159 {
3160 let buffer = buffer.read(cx).text_snapshot();
3161 diff_state.update(cx, |diff_state, cx| {
3162 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3163 })
3164 }
3165 });
3166 Ok(())
3167 }
3168
3169 async fn handle_blame_buffer(
3170 this: Entity<Self>,
3171 envelope: TypedEnvelope<proto::BlameBuffer>,
3172 mut cx: AsyncApp,
3173 ) -> Result<proto::BlameBufferResponse> {
3174 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3175 let version = deserialize_version(&envelope.payload.version);
3176 let buffer = this.read_with(&cx, |this, cx| {
3177 this.buffer_store.read(cx).get_existing(buffer_id)
3178 })?;
3179 buffer
3180 .update(&mut cx, |buffer, _| {
3181 buffer.wait_for_version(version.clone())
3182 })
3183 .await?;
3184 let blame = this
3185 .update(&mut cx, |this, cx| {
3186 this.blame_buffer(&buffer, Some(version), cx)
3187 })
3188 .await?;
3189 Ok(serialize_blame_buffer_response(blame))
3190 }
3191
3192 async fn handle_get_permalink_to_line(
3193 this: Entity<Self>,
3194 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3195 mut cx: AsyncApp,
3196 ) -> Result<proto::GetPermalinkToLineResponse> {
3197 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3198 // let version = deserialize_version(&envelope.payload.version);
3199 let selection = {
3200 let proto_selection = envelope
3201 .payload
3202 .selection
3203 .context("no selection to get permalink for defined")?;
3204 proto_selection.start as u32..proto_selection.end as u32
3205 };
3206 let buffer = this.read_with(&cx, |this, cx| {
3207 this.buffer_store.read(cx).get_existing(buffer_id)
3208 })?;
3209 let permalink = this
3210 .update(&mut cx, |this, cx| {
3211 this.get_permalink_to_line(&buffer, selection, cx)
3212 })
3213 .await?;
3214 Ok(proto::GetPermalinkToLineResponse {
3215 permalink: permalink.to_string(),
3216 })
3217 }
3218
3219 fn repository_for_request(
3220 this: &Entity<Self>,
3221 id: RepositoryId,
3222 cx: &mut AsyncApp,
3223 ) -> Result<Entity<Repository>> {
3224 this.read_with(cx, |this, _| {
3225 this.repositories
3226 .get(&id)
3227 .context("missing repository handle")
3228 .cloned()
3229 })
3230 }
3231
3232 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3233 self.repositories
3234 .iter()
3235 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3236 .collect()
3237 }
3238
3239 fn process_updated_entries(
3240 &self,
3241 worktree: &Entity<Worktree>,
3242 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3243 cx: &mut App,
3244 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3245 let path_style = worktree.read(cx).path_style();
3246 let mut repo_paths = self
3247 .repositories
3248 .values()
3249 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3250 .collect::<Vec<_>>();
3251 let mut entries: Vec<_> = updated_entries
3252 .iter()
3253 .map(|(path, _, _)| path.clone())
3254 .collect();
3255 entries.sort();
3256 let worktree = worktree.read(cx);
3257
3258 let entries = entries
3259 .into_iter()
3260 .map(|path| worktree.absolutize(&path))
3261 .collect::<Arc<[_]>>();
3262
3263 let executor = cx.background_executor().clone();
3264 cx.background_executor().spawn(async move {
3265 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3266 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3267 let mut tasks = FuturesOrdered::new();
3268 for (repo_path, repo) in repo_paths.into_iter().rev() {
3269 let entries = entries.clone();
3270 let task = executor.spawn(async move {
3271 // Find all repository paths that belong to this repo
3272 let mut ix = entries.partition_point(|path| path < &*repo_path);
3273 if ix == entries.len() {
3274 return None;
3275 };
3276
3277 let mut paths = Vec::new();
3278 // All paths prefixed by a given repo will constitute a continuous range.
3279 while let Some(path) = entries.get(ix)
3280 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3281 &repo_path, path, path_style,
3282 )
3283 {
3284 paths.push((repo_path, ix));
3285 ix += 1;
3286 }
3287 if paths.is_empty() {
3288 None
3289 } else {
3290 Some((repo, paths))
3291 }
3292 });
3293 tasks.push_back(task);
3294 }
3295
3296 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3297 let mut path_was_used = vec![false; entries.len()];
3298 let tasks = tasks.collect::<Vec<_>>().await;
3299 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3300 // We always want to assign a path to it's innermost repository.
3301 for t in tasks {
3302 let Some((repo, paths)) = t else {
3303 continue;
3304 };
3305 let entry = paths_by_git_repo.entry(repo).or_default();
3306 for (repo_path, ix) in paths {
3307 if path_was_used[ix] {
3308 continue;
3309 }
3310 path_was_used[ix] = true;
3311 entry.push(repo_path);
3312 }
3313 }
3314
3315 paths_by_git_repo
3316 })
3317 }
3318}
3319
3320impl BufferGitState {
3321 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3322 Self {
3323 unstaged_diff: Default::default(),
3324 uncommitted_diff: Default::default(),
3325 oid_diffs: Default::default(),
3326 recalculate_diff_task: Default::default(),
3327 language: Default::default(),
3328 language_registry: Default::default(),
3329 recalculating_tx: postage::watch::channel_with(false).0,
3330 hunk_staging_operation_count: 0,
3331 hunk_staging_operation_count_as_of_write: 0,
3332 head_text: Default::default(),
3333 index_text: Default::default(),
3334 oid_texts: Default::default(),
3335 head_changed: Default::default(),
3336 index_changed: Default::default(),
3337 language_changed: Default::default(),
3338 conflict_updated_futures: Default::default(),
3339 conflict_set: Default::default(),
3340 reparse_conflict_markers_task: Default::default(),
3341 }
3342 }
3343
3344 #[ztracing::instrument(skip_all)]
3345 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3346 self.language = buffer.read(cx).language().cloned();
3347 self.language_changed = true;
3348 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3349 }
3350
3351 fn reparse_conflict_markers(
3352 &mut self,
3353 buffer: text::BufferSnapshot,
3354 cx: &mut Context<Self>,
3355 ) -> oneshot::Receiver<()> {
3356 let (tx, rx) = oneshot::channel();
3357
3358 let Some(conflict_set) = self
3359 .conflict_set
3360 .as_ref()
3361 .and_then(|conflict_set| conflict_set.upgrade())
3362 else {
3363 return rx;
3364 };
3365
3366 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3367 if conflict_set.has_conflict {
3368 Some(conflict_set.snapshot())
3369 } else {
3370 None
3371 }
3372 });
3373
3374 if let Some(old_snapshot) = old_snapshot {
3375 self.conflict_updated_futures.push(tx);
3376 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3377 let (snapshot, changed_range) = cx
3378 .background_spawn(async move {
3379 let new_snapshot = ConflictSet::parse(&buffer);
3380 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3381 (new_snapshot, changed_range)
3382 })
3383 .await;
3384 this.update(cx, |this, cx| {
3385 if let Some(conflict_set) = &this.conflict_set {
3386 conflict_set
3387 .update(cx, |conflict_set, cx| {
3388 conflict_set.set_snapshot(snapshot, changed_range, cx);
3389 })
3390 .ok();
3391 }
3392 let futures = std::mem::take(&mut this.conflict_updated_futures);
3393 for tx in futures {
3394 tx.send(()).ok();
3395 }
3396 })
3397 }))
3398 }
3399
3400 rx
3401 }
3402
3403 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3404 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3405 }
3406
3407 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3408 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3409 }
3410
3411 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3412 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3413 }
3414
3415 fn handle_base_texts_updated(
3416 &mut self,
3417 buffer: text::BufferSnapshot,
3418 message: proto::UpdateDiffBases,
3419 cx: &mut Context<Self>,
3420 ) {
3421 use proto::update_diff_bases::Mode;
3422
3423 let Some(mode) = Mode::from_i32(message.mode) else {
3424 return;
3425 };
3426
3427 let diff_bases_change = match mode {
3428 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3429 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3430 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3431 Mode::IndexAndHead => DiffBasesChange::SetEach {
3432 index: message.staged_text,
3433 head: message.committed_text,
3434 },
3435 };
3436
3437 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3438 }
3439
3440 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3441 if *self.recalculating_tx.borrow() {
3442 let mut rx = self.recalculating_tx.subscribe();
3443 Some(async move {
3444 loop {
3445 let is_recalculating = rx.recv().await;
3446 if is_recalculating != Some(true) {
3447 break;
3448 }
3449 }
3450 })
3451 } else {
3452 None
3453 }
3454 }
3455
3456 fn diff_bases_changed(
3457 &mut self,
3458 buffer: text::BufferSnapshot,
3459 diff_bases_change: Option<DiffBasesChange>,
3460 cx: &mut Context<Self>,
3461 ) {
3462 match diff_bases_change {
3463 Some(DiffBasesChange::SetIndex(index)) => {
3464 self.index_text = index.map(|mut index| {
3465 text::LineEnding::normalize(&mut index);
3466 Arc::from(index.as_str())
3467 });
3468 self.index_changed = true;
3469 }
3470 Some(DiffBasesChange::SetHead(head)) => {
3471 self.head_text = head.map(|mut head| {
3472 text::LineEnding::normalize(&mut head);
3473 Arc::from(head.as_str())
3474 });
3475 self.head_changed = true;
3476 }
3477 Some(DiffBasesChange::SetBoth(text)) => {
3478 let text = text.map(|mut text| {
3479 text::LineEnding::normalize(&mut text);
3480 Arc::from(text.as_str())
3481 });
3482 self.head_text = text.clone();
3483 self.index_text = text;
3484 self.head_changed = true;
3485 self.index_changed = true;
3486 }
3487 Some(DiffBasesChange::SetEach { index, head }) => {
3488 self.index_text = index.map(|mut index| {
3489 text::LineEnding::normalize(&mut index);
3490 Arc::from(index.as_str())
3491 });
3492 self.index_changed = true;
3493 self.head_text = head.map(|mut head| {
3494 text::LineEnding::normalize(&mut head);
3495 Arc::from(head.as_str())
3496 });
3497 self.head_changed = true;
3498 }
3499 None => {}
3500 }
3501
3502 self.recalculate_diffs(buffer, cx)
3503 }
3504
3505 #[ztracing::instrument(skip_all)]
3506 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3507 *self.recalculating_tx.borrow_mut() = true;
3508
3509 let language = self.language.clone();
3510 let language_registry = self.language_registry.clone();
3511 let unstaged_diff = self.unstaged_diff();
3512 let uncommitted_diff = self.uncommitted_diff();
3513 let head = self.head_text.clone();
3514 let index = self.index_text.clone();
3515 let index_changed = self.index_changed;
3516 let head_changed = self.head_changed;
3517 let language_changed = self.language_changed;
3518 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3519 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3520 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3521 (None, None) => true,
3522 _ => false,
3523 };
3524
3525 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3526 .oid_diffs
3527 .iter()
3528 .filter_map(|(oid, weak)| {
3529 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3530 weak.upgrade().map(|diff| (*oid, diff, base_text))
3531 })
3532 .collect();
3533
3534 self.oid_diffs.retain(|oid, weak| {
3535 let alive = weak.upgrade().is_some();
3536 if !alive {
3537 if let Some(oid) = oid {
3538 self.oid_texts.remove(oid);
3539 }
3540 }
3541 alive
3542 });
3543 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3544 log::debug!(
3545 "start recalculating diffs for buffer {}",
3546 buffer.remote_id()
3547 );
3548
3549 let mut new_unstaged_diff = None;
3550 if let Some(unstaged_diff) = &unstaged_diff {
3551 new_unstaged_diff = Some(
3552 cx.update(|cx| {
3553 unstaged_diff.read(cx).update_diff(
3554 buffer.clone(),
3555 index,
3556 index_changed.then_some(false),
3557 language.clone(),
3558 cx,
3559 )
3560 })
3561 .await,
3562 );
3563 }
3564
3565 // Dropping BufferDiff can be expensive, so yield back to the event loop
3566 // for a bit
3567 yield_now().await;
3568
3569 let mut new_uncommitted_diff = None;
3570 if let Some(uncommitted_diff) = &uncommitted_diff {
3571 new_uncommitted_diff = if index_matches_head {
3572 new_unstaged_diff.clone()
3573 } else {
3574 Some(
3575 cx.update(|cx| {
3576 uncommitted_diff.read(cx).update_diff(
3577 buffer.clone(),
3578 head,
3579 head_changed.then_some(true),
3580 language.clone(),
3581 cx,
3582 )
3583 })
3584 .await,
3585 )
3586 }
3587 }
3588
3589 // Dropping BufferDiff can be expensive, so yield back to the event loop
3590 // for a bit
3591 yield_now().await;
3592
3593 let cancel = this.update(cx, |this, _| {
3594 // This checks whether all pending stage/unstage operations
3595 // have quiesced (i.e. both the corresponding write and the
3596 // read of that write have completed). If not, then we cancel
3597 // this recalculation attempt to avoid invalidating pending
3598 // state too quickly; another recalculation will come along
3599 // later and clear the pending state once the state of the index has settled.
3600 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3601 *this.recalculating_tx.borrow_mut() = false;
3602 true
3603 } else {
3604 false
3605 }
3606 })?;
3607 if cancel {
3608 log::debug!(
3609 concat!(
3610 "aborting recalculating diffs for buffer {}",
3611 "due to subsequent hunk operations",
3612 ),
3613 buffer.remote_id()
3614 );
3615 return Ok(());
3616 }
3617
3618 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3619 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3620 {
3621 let task = unstaged_diff.update(cx, |diff, cx| {
3622 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3623 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3624 // view multibuffers.
3625 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3626 });
3627 Some(task.await)
3628 } else {
3629 None
3630 };
3631
3632 yield_now().await;
3633
3634 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3635 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3636 {
3637 uncommitted_diff
3638 .update(cx, |diff, cx| {
3639 if language_changed {
3640 diff.language_changed(language.clone(), language_registry.clone(), cx);
3641 }
3642 diff.set_snapshot_with_secondary(
3643 new_uncommitted_diff,
3644 &buffer,
3645 unstaged_changed_range.flatten(),
3646 true,
3647 cx,
3648 )
3649 })
3650 .await;
3651 }
3652
3653 yield_now().await;
3654
3655 for (oid, oid_diff, base_text) in oid_diffs {
3656 let new_oid_diff = cx
3657 .update(|cx| {
3658 oid_diff.read(cx).update_diff(
3659 buffer.clone(),
3660 base_text,
3661 None,
3662 language.clone(),
3663 cx,
3664 )
3665 })
3666 .await;
3667
3668 oid_diff
3669 .update(cx, |diff, cx| {
3670 if language_changed {
3671 diff.language_changed(language.clone(), language_registry.clone(), cx);
3672 }
3673 diff.set_snapshot(new_oid_diff, &buffer, cx)
3674 })
3675 .await;
3676
3677 log::debug!(
3678 "finished recalculating oid diff for buffer {} oid {:?}",
3679 buffer.remote_id(),
3680 oid
3681 );
3682
3683 yield_now().await;
3684 }
3685
3686 log::debug!(
3687 "finished recalculating diffs for buffer {}",
3688 buffer.remote_id()
3689 );
3690
3691 if let Some(this) = this.upgrade() {
3692 this.update(cx, |this, _| {
3693 this.index_changed = false;
3694 this.head_changed = false;
3695 this.language_changed = false;
3696 *this.recalculating_tx.borrow_mut() = false;
3697 });
3698 }
3699
3700 Ok(())
3701 }));
3702 }
3703}
3704
3705fn make_remote_delegate(
3706 this: Entity<GitStore>,
3707 project_id: u64,
3708 repository_id: RepositoryId,
3709 askpass_id: u64,
3710 cx: &mut AsyncApp,
3711) -> AskPassDelegate {
3712 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3713 this.update(cx, |this, cx| {
3714 let Some((client, _)) = this.downstream_client() else {
3715 return;
3716 };
3717 let response = client.request(proto::AskPassRequest {
3718 project_id,
3719 repository_id: repository_id.to_proto(),
3720 askpass_id,
3721 prompt,
3722 });
3723 cx.spawn(async move |_, _| {
3724 let mut response = response.await?.response;
3725 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3726 .ok();
3727 response.zeroize();
3728 anyhow::Ok(())
3729 })
3730 .detach_and_log_err(cx);
3731 });
3732 })
3733}
3734
3735impl RepositoryId {
3736 pub fn to_proto(self) -> u64 {
3737 self.0
3738 }
3739
3740 pub fn from_proto(id: u64) -> Self {
3741 RepositoryId(id)
3742 }
3743}
3744
3745impl RepositorySnapshot {
3746 fn empty(
3747 id: RepositoryId,
3748 work_directory_abs_path: Arc<Path>,
3749 original_repo_abs_path: Option<Arc<Path>>,
3750 path_style: PathStyle,
3751 ) -> Self {
3752 Self {
3753 id,
3754 statuses_by_path: Default::default(),
3755 original_repo_abs_path: original_repo_abs_path
3756 .unwrap_or_else(|| work_directory_abs_path.clone()),
3757 work_directory_abs_path,
3758 branch: None,
3759 branch_list: Arc::from([]),
3760 head_commit: None,
3761 scan_id: 0,
3762 merge: Default::default(),
3763 remote_origin_url: None,
3764 remote_upstream_url: None,
3765 stash_entries: Default::default(),
3766 linked_worktrees: Arc::from([]),
3767 path_style,
3768 }
3769 }
3770
3771 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3772 proto::UpdateRepository {
3773 branch_summary: self.branch.as_ref().map(branch_to_proto),
3774 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3775 updated_statuses: self
3776 .statuses_by_path
3777 .iter()
3778 .map(|entry| entry.to_proto())
3779 .collect(),
3780 removed_statuses: Default::default(),
3781 current_merge_conflicts: self
3782 .merge
3783 .merge_heads_by_conflicted_path
3784 .iter()
3785 .map(|(repo_path, _)| repo_path.to_proto())
3786 .collect(),
3787 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3788 project_id,
3789 id: self.id.to_proto(),
3790 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3791 entry_ids: vec![self.id.to_proto()],
3792 scan_id: self.scan_id,
3793 is_last_update: true,
3794 stash_entries: self
3795 .stash_entries
3796 .entries
3797 .iter()
3798 .map(stash_to_proto)
3799 .collect(),
3800 remote_upstream_url: self.remote_upstream_url.clone(),
3801 remote_origin_url: self.remote_origin_url.clone(),
3802 original_repo_abs_path: Some(
3803 self.original_repo_abs_path.to_string_lossy().into_owned(),
3804 ),
3805 linked_worktrees: self
3806 .linked_worktrees
3807 .iter()
3808 .map(worktree_to_proto)
3809 .collect(),
3810 }
3811 }
3812
3813 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3814 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3815 let mut removed_statuses: Vec<String> = Vec::new();
3816
3817 let mut new_statuses = self.statuses_by_path.iter().peekable();
3818 let mut old_statuses = old.statuses_by_path.iter().peekable();
3819
3820 let mut current_new_entry = new_statuses.next();
3821 let mut current_old_entry = old_statuses.next();
3822 loop {
3823 match (current_new_entry, current_old_entry) {
3824 (Some(new_entry), Some(old_entry)) => {
3825 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3826 Ordering::Less => {
3827 updated_statuses.push(new_entry.to_proto());
3828 current_new_entry = new_statuses.next();
3829 }
3830 Ordering::Equal => {
3831 if new_entry.status != old_entry.status
3832 || new_entry.diff_stat != old_entry.diff_stat
3833 {
3834 updated_statuses.push(new_entry.to_proto());
3835 }
3836 current_old_entry = old_statuses.next();
3837 current_new_entry = new_statuses.next();
3838 }
3839 Ordering::Greater => {
3840 removed_statuses.push(old_entry.repo_path.to_proto());
3841 current_old_entry = old_statuses.next();
3842 }
3843 }
3844 }
3845 (None, Some(old_entry)) => {
3846 removed_statuses.push(old_entry.repo_path.to_proto());
3847 current_old_entry = old_statuses.next();
3848 }
3849 (Some(new_entry), None) => {
3850 updated_statuses.push(new_entry.to_proto());
3851 current_new_entry = new_statuses.next();
3852 }
3853 (None, None) => break,
3854 }
3855 }
3856
3857 proto::UpdateRepository {
3858 branch_summary: self.branch.as_ref().map(branch_to_proto),
3859 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3860 updated_statuses,
3861 removed_statuses,
3862 current_merge_conflicts: self
3863 .merge
3864 .merge_heads_by_conflicted_path
3865 .iter()
3866 .map(|(path, _)| path.to_proto())
3867 .collect(),
3868 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3869 project_id,
3870 id: self.id.to_proto(),
3871 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3872 entry_ids: vec![],
3873 scan_id: self.scan_id,
3874 is_last_update: true,
3875 stash_entries: self
3876 .stash_entries
3877 .entries
3878 .iter()
3879 .map(stash_to_proto)
3880 .collect(),
3881 remote_upstream_url: self.remote_upstream_url.clone(),
3882 remote_origin_url: self.remote_origin_url.clone(),
3883 original_repo_abs_path: Some(
3884 self.original_repo_abs_path.to_string_lossy().into_owned(),
3885 ),
3886 linked_worktrees: self
3887 .linked_worktrees
3888 .iter()
3889 .map(worktree_to_proto)
3890 .collect(),
3891 }
3892 }
3893
3894 /// The main worktree is the original checkout that other worktrees were
3895 /// created from.
3896 ///
3897 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3898 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3899 ///
3900 /// Submodules also return `true` here, since they are not linked worktrees.
3901 pub fn is_main_worktree(&self) -> bool {
3902 self.work_directory_abs_path == self.original_repo_abs_path
3903 }
3904
3905 /// Returns true if this repository is a linked worktree, that is, one that
3906 /// was created from another worktree.
3907 ///
3908 /// Returns `false` for both the main worktree and submodules.
3909 pub fn is_linked_worktree(&self) -> bool {
3910 !self.is_main_worktree()
3911 }
3912
3913 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3914 &self.linked_worktrees
3915 }
3916
3917 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3918 self.statuses_by_path.iter().cloned()
3919 }
3920
3921 pub fn status_summary(&self) -> GitSummary {
3922 self.statuses_by_path.summary().item_summary
3923 }
3924
3925 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3926 self.statuses_by_path
3927 .get(&PathKey(path.as_ref().clone()), ())
3928 .cloned()
3929 }
3930
3931 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3932 self.statuses_by_path
3933 .get(&PathKey(path.as_ref().clone()), ())
3934 .and_then(|entry| entry.diff_stat)
3935 }
3936
3937 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3938 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3939 }
3940
3941 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3942 self.path_style
3943 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3944 .unwrap()
3945 .into()
3946 }
3947
3948 #[inline]
3949 fn abs_path_to_repo_path_inner(
3950 work_directory_abs_path: &Path,
3951 abs_path: &Path,
3952 path_style: PathStyle,
3953 ) -> Option<RepoPath> {
3954 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3955 Some(RepoPath::from_rel_path(&rel_path))
3956 }
3957
3958 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3959 self.merge
3960 .merge_heads_by_conflicted_path
3961 .contains_key(repo_path)
3962 }
3963
3964 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3965 let had_conflict_on_last_merge_head_change = self
3966 .merge
3967 .merge_heads_by_conflicted_path
3968 .contains_key(repo_path);
3969 let has_conflict_currently = self
3970 .status_for_path(repo_path)
3971 .is_some_and(|entry| entry.status.is_conflicted());
3972 had_conflict_on_last_merge_head_change || has_conflict_currently
3973 }
3974
3975 /// This is the name that will be displayed in the repository selector for this repository.
3976 pub fn display_name(&self) -> SharedString {
3977 self.work_directory_abs_path
3978 .file_name()
3979 .unwrap_or_default()
3980 .to_string_lossy()
3981 .to_string()
3982 .into()
3983 }
3984}
3985
3986pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3987 proto::StashEntry {
3988 oid: entry.oid.as_bytes().to_vec(),
3989 message: entry.message.clone(),
3990 branch: entry.branch.clone(),
3991 index: entry.index as u64,
3992 timestamp: entry.timestamp,
3993 }
3994}
3995
3996pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3997 Ok(StashEntry {
3998 oid: Oid::from_bytes(&entry.oid)?,
3999 message: entry.message.clone(),
4000 index: entry.index as usize,
4001 branch: entry.branch.clone(),
4002 timestamp: entry.timestamp,
4003 })
4004}
4005
4006impl MergeDetails {
4007 async fn update(
4008 &mut self,
4009 backend: &Arc<dyn GitRepository>,
4010 current_conflicted_paths: Vec<RepoPath>,
4011 ) -> Result<bool> {
4012 log::debug!("load merge details");
4013 self.message = backend.merge_message().await.map(SharedString::from);
4014 let heads = backend
4015 .revparse_batch(vec![
4016 "MERGE_HEAD".into(),
4017 "CHERRY_PICK_HEAD".into(),
4018 "REBASE_HEAD".into(),
4019 "REVERT_HEAD".into(),
4020 "APPLY_HEAD".into(),
4021 ])
4022 .await
4023 .log_err()
4024 .unwrap_or_default()
4025 .into_iter()
4026 .map(|opt| opt.map(SharedString::from))
4027 .collect::<Vec<_>>();
4028
4029 let mut conflicts_changed = false;
4030
4031 // Record the merge state for newly conflicted paths
4032 for path in ¤t_conflicted_paths {
4033 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
4034 conflicts_changed = true;
4035 self.merge_heads_by_conflicted_path
4036 .insert(path.clone(), heads.clone());
4037 }
4038 }
4039
4040 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4041 self.merge_heads_by_conflicted_path
4042 .retain(|path, old_merge_heads| {
4043 let keep = current_conflicted_paths.contains(path)
4044 || (old_merge_heads == &heads
4045 && old_merge_heads.iter().any(|head| head.is_some()));
4046 if !keep {
4047 conflicts_changed = true;
4048 }
4049 keep
4050 });
4051
4052 Ok(conflicts_changed)
4053 }
4054}
4055
4056impl Repository {
4057 pub fn is_trusted(&self) -> bool {
4058 match self.repository_state.peek() {
4059 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4060 _ => false,
4061 }
4062 }
4063
4064 pub fn snapshot(&self) -> RepositorySnapshot {
4065 self.snapshot.clone()
4066 }
4067
4068 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4069 self.pending_ops.iter().cloned()
4070 }
4071
4072 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4073 self.pending_ops.summary().clone()
4074 }
4075
4076 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4077 self.pending_ops
4078 .get(&PathKey(path.as_ref().clone()), ())
4079 .cloned()
4080 }
4081
4082 fn local(
4083 id: RepositoryId,
4084 work_directory_abs_path: Arc<Path>,
4085 original_repo_abs_path: Arc<Path>,
4086 dot_git_abs_path: Arc<Path>,
4087 project_environment: WeakEntity<ProjectEnvironment>,
4088 fs: Arc<dyn Fs>,
4089 is_trusted: bool,
4090 git_store: WeakEntity<GitStore>,
4091 cx: &mut Context<Self>,
4092 ) -> Self {
4093 let snapshot = RepositorySnapshot::empty(
4094 id,
4095 work_directory_abs_path.clone(),
4096 Some(original_repo_abs_path),
4097 PathStyle::local(),
4098 );
4099 let state = cx
4100 .spawn(async move |_, cx| {
4101 LocalRepositoryState::new(
4102 work_directory_abs_path,
4103 dot_git_abs_path,
4104 project_environment,
4105 fs,
4106 is_trusted,
4107 cx,
4108 )
4109 .await
4110 .map_err(|err| err.to_string())
4111 })
4112 .shared();
4113 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4114 let state = cx
4115 .spawn(async move |_, _| {
4116 let state = state.await?;
4117 Ok(RepositoryState::Local(state))
4118 })
4119 .shared();
4120
4121 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4122 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4123 if this.scan_id > 1 {
4124 this.initial_graph_data.clear();
4125 }
4126 }
4127 RepositoryEvent::StashEntriesChanged => {
4128 if this.scan_id > 1 {
4129 this.initial_graph_data
4130 .retain(|(log_source, _), _| *log_source != LogSource::All);
4131 }
4132 }
4133 _ => {}
4134 })
4135 .detach();
4136
4137 Repository {
4138 this: cx.weak_entity(),
4139 git_store,
4140 snapshot,
4141 pending_ops: Default::default(),
4142 repository_state: state,
4143 commit_message_buffer: None,
4144 askpass_delegates: Default::default(),
4145 paths_needing_status_update: Default::default(),
4146 latest_askpass_id: 0,
4147 job_sender,
4148 job_id: 0,
4149 active_jobs: Default::default(),
4150 initial_graph_data: Default::default(),
4151 commit_data: Default::default(),
4152 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4153 }
4154 }
4155
4156 fn remote(
4157 id: RepositoryId,
4158 work_directory_abs_path: Arc<Path>,
4159 original_repo_abs_path: Option<Arc<Path>>,
4160 path_style: PathStyle,
4161 project_id: ProjectId,
4162 client: AnyProtoClient,
4163 git_store: WeakEntity<GitStore>,
4164 cx: &mut Context<Self>,
4165 ) -> Self {
4166 let snapshot = RepositorySnapshot::empty(
4167 id,
4168 work_directory_abs_path,
4169 original_repo_abs_path,
4170 path_style,
4171 );
4172 let repository_state = RemoteRepositoryState { project_id, client };
4173 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4174 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4175 Self {
4176 this: cx.weak_entity(),
4177 snapshot,
4178 commit_message_buffer: None,
4179 git_store,
4180 pending_ops: Default::default(),
4181 paths_needing_status_update: Default::default(),
4182 job_sender,
4183 repository_state,
4184 askpass_delegates: Default::default(),
4185 latest_askpass_id: 0,
4186 active_jobs: Default::default(),
4187 job_id: 0,
4188 initial_graph_data: Default::default(),
4189 commit_data: Default::default(),
4190 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4191 }
4192 }
4193
4194 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4195 self.git_store.upgrade()
4196 }
4197
4198 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4199 let this = cx.weak_entity();
4200 let git_store = self.git_store.clone();
4201 let _ = self.send_keyed_job(
4202 Some(GitJobKey::ReloadBufferDiffBases),
4203 None,
4204 |state, mut cx| async move {
4205 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4206 log::error!("tried to recompute diffs for a non-local repository");
4207 return Ok(());
4208 };
4209
4210 let Some(this) = this.upgrade() else {
4211 return Ok(());
4212 };
4213
4214 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4215 git_store.update(cx, |git_store, cx| {
4216 git_store
4217 .diffs
4218 .iter()
4219 .filter_map(|(buffer_id, diff_state)| {
4220 let buffer_store = git_store.buffer_store.read(cx);
4221 let buffer = buffer_store.get(*buffer_id)?;
4222 let file = File::from_dyn(buffer.read(cx).file())?;
4223 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4224 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4225 log::debug!(
4226 "start reload diff bases for repo path {}",
4227 repo_path.as_unix_str()
4228 );
4229 diff_state.update(cx, |diff_state, _| {
4230 let has_unstaged_diff = diff_state
4231 .unstaged_diff
4232 .as_ref()
4233 .is_some_and(|diff| diff.is_upgradable());
4234 let has_uncommitted_diff = diff_state
4235 .uncommitted_diff
4236 .as_ref()
4237 .is_some_and(|set| set.is_upgradable());
4238
4239 Some((
4240 buffer,
4241 repo_path,
4242 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4243 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4244 ))
4245 })
4246 })
4247 .collect::<Vec<_>>()
4248 })
4249 })?;
4250
4251 let buffer_diff_base_changes = cx
4252 .background_spawn(async move {
4253 let mut changes = Vec::new();
4254 for (buffer, repo_path, current_index_text, current_head_text) in
4255 &repo_diff_state_updates
4256 {
4257 let index_text = if current_index_text.is_some() {
4258 backend.load_index_text(repo_path.clone()).await
4259 } else {
4260 None
4261 };
4262 let head_text = if current_head_text.is_some() {
4263 backend.load_committed_text(repo_path.clone()).await
4264 } else {
4265 None
4266 };
4267
4268 let change =
4269 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4270 (Some(current_index), Some(current_head)) => {
4271 let index_changed =
4272 index_text.as_deref() != current_index.as_deref();
4273 let head_changed =
4274 head_text.as_deref() != current_head.as_deref();
4275 if index_changed && head_changed {
4276 if index_text == head_text {
4277 Some(DiffBasesChange::SetBoth(head_text))
4278 } else {
4279 Some(DiffBasesChange::SetEach {
4280 index: index_text,
4281 head: head_text,
4282 })
4283 }
4284 } else if index_changed {
4285 Some(DiffBasesChange::SetIndex(index_text))
4286 } else if head_changed {
4287 Some(DiffBasesChange::SetHead(head_text))
4288 } else {
4289 None
4290 }
4291 }
4292 (Some(current_index), None) => {
4293 let index_changed =
4294 index_text.as_deref() != current_index.as_deref();
4295 index_changed
4296 .then_some(DiffBasesChange::SetIndex(index_text))
4297 }
4298 (None, Some(current_head)) => {
4299 let head_changed =
4300 head_text.as_deref() != current_head.as_deref();
4301 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4302 }
4303 (None, None) => None,
4304 };
4305
4306 changes.push((buffer.clone(), change))
4307 }
4308 changes
4309 })
4310 .await;
4311
4312 git_store.update(&mut cx, |git_store, cx| {
4313 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4314 let buffer_snapshot = buffer.read(cx).text_snapshot();
4315 let buffer_id = buffer_snapshot.remote_id();
4316 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4317 continue;
4318 };
4319
4320 let downstream_client = git_store.downstream_client();
4321 diff_state.update(cx, |diff_state, cx| {
4322 use proto::update_diff_bases::Mode;
4323
4324 if let Some((diff_bases_change, (client, project_id))) =
4325 diff_bases_change.clone().zip(downstream_client)
4326 {
4327 let (staged_text, committed_text, mode) = match diff_bases_change {
4328 DiffBasesChange::SetIndex(index) => {
4329 (index, None, Mode::IndexOnly)
4330 }
4331 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4332 DiffBasesChange::SetEach { index, head } => {
4333 (index, head, Mode::IndexAndHead)
4334 }
4335 DiffBasesChange::SetBoth(text) => {
4336 (None, text, Mode::IndexMatchesHead)
4337 }
4338 };
4339 client
4340 .send(proto::UpdateDiffBases {
4341 project_id: project_id.to_proto(),
4342 buffer_id: buffer_id.to_proto(),
4343 staged_text,
4344 committed_text,
4345 mode: mode as i32,
4346 })
4347 .log_err();
4348 }
4349
4350 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4351 });
4352 }
4353 })
4354 },
4355 );
4356 }
4357
4358 pub fn send_job<F, Fut, R>(
4359 &mut self,
4360 status: Option<SharedString>,
4361 job: F,
4362 ) -> oneshot::Receiver<R>
4363 where
4364 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4365 Fut: Future<Output = R> + 'static,
4366 R: Send + 'static,
4367 {
4368 self.send_keyed_job(None, status, job)
4369 }
4370
4371 fn send_keyed_job<F, Fut, R>(
4372 &mut self,
4373 key: Option<GitJobKey>,
4374 status: Option<SharedString>,
4375 job: F,
4376 ) -> oneshot::Receiver<R>
4377 where
4378 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4379 Fut: Future<Output = R> + 'static,
4380 R: Send + 'static,
4381 {
4382 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4383 let job_id = post_inc(&mut self.job_id);
4384 let this = self.this.clone();
4385 self.job_sender
4386 .unbounded_send(GitJob {
4387 key,
4388 job: Box::new(move |state, cx: &mut AsyncApp| {
4389 let job = job(state, cx.clone());
4390 cx.spawn(async move |cx| {
4391 if let Some(s) = status.clone() {
4392 this.update(cx, |this, cx| {
4393 this.active_jobs.insert(
4394 job_id,
4395 JobInfo {
4396 start: Instant::now(),
4397 message: s.clone(),
4398 },
4399 );
4400
4401 cx.notify();
4402 })
4403 .ok();
4404 }
4405 let result = job.await;
4406
4407 this.update(cx, |this, cx| {
4408 this.active_jobs.remove(&job_id);
4409 cx.notify();
4410 })
4411 .ok();
4412
4413 result_tx.send(result).ok();
4414 })
4415 }),
4416 })
4417 .ok();
4418 result_rx
4419 }
4420
4421 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4422 let Some(git_store) = self.git_store.upgrade() else {
4423 return;
4424 };
4425 let entity = cx.entity();
4426 git_store.update(cx, |git_store, cx| {
4427 let Some((&id, _)) = git_store
4428 .repositories
4429 .iter()
4430 .find(|(_, handle)| *handle == &entity)
4431 else {
4432 return;
4433 };
4434 git_store.active_repo_id = Some(id);
4435 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4436 });
4437 }
4438
4439 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4440 self.snapshot.status()
4441 }
4442
4443 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4444 self.snapshot.diff_stat_for_path(path)
4445 }
4446
4447 pub fn cached_stash(&self) -> GitStash {
4448 self.snapshot.stash_entries.clone()
4449 }
4450
4451 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4452 let git_store = self.git_store.upgrade()?;
4453 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4454 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4455 let abs_path = SanitizedPath::new(&abs_path);
4456 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4457 Some(ProjectPath {
4458 worktree_id: worktree.read(cx).id(),
4459 path: relative_path,
4460 })
4461 }
4462
4463 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4464 let git_store = self.git_store.upgrade()?;
4465 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4466 let abs_path = worktree_store.absolutize(path, cx)?;
4467 self.snapshot.abs_path_to_repo_path(&abs_path)
4468 }
4469
4470 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4471 other
4472 .read(cx)
4473 .snapshot
4474 .work_directory_abs_path
4475 .starts_with(&self.snapshot.work_directory_abs_path)
4476 }
4477
4478 pub fn open_commit_buffer(
4479 &mut self,
4480 languages: Option<Arc<LanguageRegistry>>,
4481 buffer_store: Entity<BufferStore>,
4482 cx: &mut Context<Self>,
4483 ) -> Task<Result<Entity<Buffer>>> {
4484 let id = self.id;
4485 if let Some(buffer) = self.commit_message_buffer.clone() {
4486 return Task::ready(Ok(buffer));
4487 }
4488 let this = cx.weak_entity();
4489
4490 let rx = self.send_job(None, move |state, mut cx| async move {
4491 let Some(this) = this.upgrade() else {
4492 bail!("git store was dropped");
4493 };
4494 match state {
4495 RepositoryState::Local(..) => {
4496 this.update(&mut cx, |_, cx| {
4497 Self::open_local_commit_buffer(languages, buffer_store, cx)
4498 })
4499 .await
4500 }
4501 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4502 let request = client.request(proto::OpenCommitMessageBuffer {
4503 project_id: project_id.0,
4504 repository_id: id.to_proto(),
4505 });
4506 let response = request.await.context("requesting to open commit buffer")?;
4507 let buffer_id = BufferId::new(response.buffer_id)?;
4508 let buffer = buffer_store
4509 .update(&mut cx, |buffer_store, cx| {
4510 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4511 })
4512 .await?;
4513 if let Some(language_registry) = languages {
4514 let git_commit_language =
4515 language_registry.language_for_name("Git Commit").await?;
4516 buffer.update(&mut cx, |buffer, cx| {
4517 buffer.set_language(Some(git_commit_language), cx);
4518 });
4519 }
4520 this.update(&mut cx, |this, _| {
4521 this.commit_message_buffer = Some(buffer.clone());
4522 });
4523 Ok(buffer)
4524 }
4525 }
4526 });
4527
4528 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4529 }
4530
4531 fn open_local_commit_buffer(
4532 language_registry: Option<Arc<LanguageRegistry>>,
4533 buffer_store: Entity<BufferStore>,
4534 cx: &mut Context<Self>,
4535 ) -> Task<Result<Entity<Buffer>>> {
4536 cx.spawn(async move |repository, cx| {
4537 let git_commit_language = match language_registry {
4538 Some(language_registry) => {
4539 Some(language_registry.language_for_name("Git Commit").await?)
4540 }
4541 None => None,
4542 };
4543 let buffer = buffer_store
4544 .update(cx, |buffer_store, cx| {
4545 buffer_store.create_buffer(git_commit_language, false, cx)
4546 })
4547 .await?;
4548
4549 repository.update(cx, |repository, _| {
4550 repository.commit_message_buffer = Some(buffer.clone());
4551 })?;
4552 Ok(buffer)
4553 })
4554 }
4555
4556 pub fn checkout_files(
4557 &mut self,
4558 commit: &str,
4559 paths: Vec<RepoPath>,
4560 cx: &mut Context<Self>,
4561 ) -> Task<Result<()>> {
4562 let commit = commit.to_string();
4563 let id = self.id;
4564
4565 self.spawn_job_with_tracking(
4566 paths.clone(),
4567 pending_op::GitStatus::Reverted,
4568 cx,
4569 async move |this, cx| {
4570 this.update(cx, |this, _cx| {
4571 this.send_job(
4572 Some(format!("git checkout {}", commit).into()),
4573 move |git_repo, _| async move {
4574 match git_repo {
4575 RepositoryState::Local(LocalRepositoryState {
4576 backend,
4577 environment,
4578 ..
4579 }) => {
4580 backend
4581 .checkout_files(commit, paths, environment.clone())
4582 .await
4583 }
4584 RepositoryState::Remote(RemoteRepositoryState {
4585 project_id,
4586 client,
4587 }) => {
4588 client
4589 .request(proto::GitCheckoutFiles {
4590 project_id: project_id.0,
4591 repository_id: id.to_proto(),
4592 commit,
4593 paths: paths
4594 .into_iter()
4595 .map(|p| p.to_proto())
4596 .collect(),
4597 })
4598 .await?;
4599
4600 Ok(())
4601 }
4602 }
4603 },
4604 )
4605 })?
4606 .await?
4607 },
4608 )
4609 }
4610
4611 pub fn reset(
4612 &mut self,
4613 commit: String,
4614 reset_mode: ResetMode,
4615 _cx: &mut App,
4616 ) -> oneshot::Receiver<Result<()>> {
4617 let id = self.id;
4618
4619 self.send_job(None, move |git_repo, _| async move {
4620 match git_repo {
4621 RepositoryState::Local(LocalRepositoryState {
4622 backend,
4623 environment,
4624 ..
4625 }) => backend.reset(commit, reset_mode, environment).await,
4626 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4627 client
4628 .request(proto::GitReset {
4629 project_id: project_id.0,
4630 repository_id: id.to_proto(),
4631 commit,
4632 mode: match reset_mode {
4633 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4634 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4635 },
4636 })
4637 .await?;
4638
4639 Ok(())
4640 }
4641 }
4642 })
4643 }
4644
4645 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4646 let id = self.id;
4647 self.send_job(None, move |git_repo, _cx| async move {
4648 match git_repo {
4649 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4650 backend.show(commit).await
4651 }
4652 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4653 let resp = client
4654 .request(proto::GitShow {
4655 project_id: project_id.0,
4656 repository_id: id.to_proto(),
4657 commit,
4658 })
4659 .await?;
4660
4661 Ok(CommitDetails {
4662 sha: resp.sha.into(),
4663 message: resp.message.into(),
4664 commit_timestamp: resp.commit_timestamp,
4665 author_email: resp.author_email.into(),
4666 author_name: resp.author_name.into(),
4667 })
4668 }
4669 }
4670 })
4671 }
4672
4673 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4674 let id = self.id;
4675 self.send_job(None, move |git_repo, cx| async move {
4676 match git_repo {
4677 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4678 backend.load_commit(commit, cx).await
4679 }
4680 RepositoryState::Remote(RemoteRepositoryState {
4681 client, project_id, ..
4682 }) => {
4683 let response = client
4684 .request(proto::LoadCommitDiff {
4685 project_id: project_id.0,
4686 repository_id: id.to_proto(),
4687 commit,
4688 })
4689 .await?;
4690 Ok(CommitDiff {
4691 files: response
4692 .files
4693 .into_iter()
4694 .map(|file| {
4695 Ok(CommitFile {
4696 path: RepoPath::from_proto(&file.path)?,
4697 old_text: file.old_text,
4698 new_text: file.new_text,
4699 is_binary: file.is_binary,
4700 })
4701 })
4702 .collect::<Result<Vec<_>>>()?,
4703 })
4704 }
4705 }
4706 })
4707 }
4708
4709 pub fn file_history(
4710 &mut self,
4711 path: RepoPath,
4712 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4713 self.file_history_paginated(path, 0, None)
4714 }
4715
4716 pub fn file_history_paginated(
4717 &mut self,
4718 path: RepoPath,
4719 skip: usize,
4720 limit: Option<usize>,
4721 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4722 let id = self.id;
4723 self.send_job(None, move |git_repo, _cx| async move {
4724 match git_repo {
4725 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4726 backend.file_history_paginated(path, skip, limit).await
4727 }
4728 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4729 let response = client
4730 .request(proto::GitFileHistory {
4731 project_id: project_id.0,
4732 repository_id: id.to_proto(),
4733 path: path.to_proto(),
4734 skip: skip as u64,
4735 limit: limit.map(|l| l as u64),
4736 })
4737 .await?;
4738 Ok(git::repository::FileHistory {
4739 entries: response
4740 .entries
4741 .into_iter()
4742 .map(|entry| git::repository::FileHistoryEntry {
4743 sha: entry.sha.into(),
4744 subject: entry.subject.into(),
4745 message: entry.message.into(),
4746 commit_timestamp: entry.commit_timestamp,
4747 author_name: entry.author_name.into(),
4748 author_email: entry.author_email.into(),
4749 })
4750 .collect(),
4751 path: RepoPath::from_proto(&response.path)?,
4752 })
4753 }
4754 }
4755 })
4756 }
4757
4758 pub fn get_graph_data(
4759 &self,
4760 log_source: LogSource,
4761 log_order: LogOrder,
4762 ) -> Option<&InitialGitGraphData> {
4763 self.initial_graph_data.get(&(log_source, log_order))
4764 }
4765
4766 pub fn search_commits(
4767 &mut self,
4768 log_source: LogSource,
4769 search_args: SearchCommitArgs,
4770 request_tx: smol::channel::Sender<Oid>,
4771 cx: &mut Context<Self>,
4772 ) {
4773 let repository_state = self.repository_state.clone();
4774
4775 cx.background_spawn(async move {
4776 let repo_state = repository_state.await;
4777
4778 match repo_state {
4779 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4780 backend
4781 .search_commits(log_source, search_args, request_tx)
4782 .await
4783 .log_err();
4784 }
4785 Ok(RepositoryState::Remote(_)) => {}
4786 Err(_) => {}
4787 };
4788 })
4789 .detach();
4790 }
4791
4792 pub fn graph_data(
4793 &mut self,
4794 log_source: LogSource,
4795 log_order: LogOrder,
4796 range: Range<usize>,
4797 cx: &mut Context<Self>,
4798 ) -> GraphDataResponse<'_> {
4799 let initial_commit_data = self
4800 .initial_graph_data
4801 .entry((log_source.clone(), log_order))
4802 .or_insert_with(|| {
4803 let state = self.repository_state.clone();
4804 let log_source = log_source.clone();
4805
4806 let fetch_task = cx.spawn(async move |repository, cx| {
4807 let state = state.await;
4808 let result = match state {
4809 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4810 Self::local_git_graph_data(
4811 repository.clone(),
4812 backend,
4813 log_source.clone(),
4814 log_order,
4815 cx,
4816 )
4817 .await
4818 }
4819 Ok(RepositoryState::Remote(_)) => {
4820 Err("Git graph is not supported for collab yet".into())
4821 }
4822 Err(e) => Err(SharedString::from(e)),
4823 };
4824
4825 if let Err(fetch_task_error) = result {
4826 repository
4827 .update(cx, |repository, _| {
4828 if let Some(data) = repository
4829 .initial_graph_data
4830 .get_mut(&(log_source, log_order))
4831 {
4832 data.error = Some(fetch_task_error);
4833 } else {
4834 debug_panic!(
4835 "This task would be dropped if this entry doesn't exist"
4836 );
4837 }
4838 })
4839 .ok();
4840 }
4841 });
4842
4843 InitialGitGraphData {
4844 fetch_task,
4845 error: None,
4846 commit_data: Vec::new(),
4847 commit_oid_to_index: HashMap::default(),
4848 }
4849 });
4850
4851 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4852 let max_end = initial_commit_data.commit_data.len();
4853
4854 GraphDataResponse {
4855 commits: &initial_commit_data.commit_data
4856 [range.start.min(max_start)..range.end.min(max_end)],
4857 is_loading: !initial_commit_data.fetch_task.is_ready(),
4858 error: initial_commit_data.error.clone(),
4859 }
4860 }
4861
4862 async fn local_git_graph_data(
4863 this: WeakEntity<Self>,
4864 backend: Arc<dyn GitRepository>,
4865 log_source: LogSource,
4866 log_order: LogOrder,
4867 cx: &mut AsyncApp,
4868 ) -> Result<(), SharedString> {
4869 let (request_tx, request_rx) =
4870 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4871
4872 let task = cx.background_executor().spawn({
4873 let log_source = log_source.clone();
4874 async move {
4875 backend
4876 .initial_graph_data(log_source, log_order, request_tx)
4877 .await
4878 .map_err(|err| SharedString::from(err.to_string()))
4879 }
4880 });
4881
4882 let graph_data_key = (log_source, log_order);
4883
4884 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4885 this.update(cx, |repository, cx| {
4886 let graph_data = repository
4887 .initial_graph_data
4888 .entry(graph_data_key.clone())
4889 .and_modify(|graph_data| {
4890 for commit_data in initial_graph_commit_data {
4891 graph_data
4892 .commit_oid_to_index
4893 .insert(commit_data.sha, graph_data.commit_data.len());
4894 graph_data.commit_data.push(commit_data);
4895 }
4896 cx.emit(RepositoryEvent::GraphEvent(
4897 graph_data_key.clone(),
4898 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4899 ));
4900 });
4901
4902 match &graph_data {
4903 Entry::Occupied(_) => {}
4904 Entry::Vacant(_) => {
4905 debug_panic!("This task should be dropped if data doesn't exist");
4906 }
4907 }
4908 })
4909 .ok();
4910 }
4911
4912 task.await?;
4913 Ok(())
4914 }
4915
4916 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4917 if !self.commit_data.contains_key(&sha) {
4918 match &self.graph_commit_data_handler {
4919 GraphCommitHandlerState::Open(handler) => {
4920 if handler.commit_data_request.try_send(sha).is_ok() {
4921 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4922 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4923 }
4924 }
4925 GraphCommitHandlerState::Closed => {
4926 self.open_graph_commit_data_handler(cx);
4927 }
4928 GraphCommitHandlerState::Starting => {}
4929 }
4930 }
4931
4932 self.commit_data
4933 .get(&sha)
4934 .unwrap_or(&CommitDataState::Loading)
4935 }
4936
4937 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4938 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4939
4940 let state = self.repository_state.clone();
4941 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4942 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4943
4944 let foreground_task = cx.spawn(async move |this, cx| {
4945 while let Ok((sha, commit_data)) = result_rx.recv().await {
4946 let result = this.update(cx, |this, cx| {
4947 let old_value = this
4948 .commit_data
4949 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4950 debug_assert!(
4951 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4952 "We should never overwrite commit data"
4953 );
4954
4955 cx.notify();
4956 });
4957 if result.is_err() {
4958 break;
4959 }
4960 }
4961
4962 this.update(cx, |this, _cx| {
4963 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4964 })
4965 .ok();
4966 });
4967
4968 let request_tx_for_handler = request_tx;
4969 let background_executor = cx.background_executor().clone();
4970
4971 cx.background_spawn(async move {
4972 let backend = match state.await {
4973 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4974 Ok(RepositoryState::Remote(_)) => {
4975 log::error!("commit_data_reader not supported for remote repositories");
4976 return;
4977 }
4978 Err(error) => {
4979 log::error!("failed to get repository state: {error}");
4980 return;
4981 }
4982 };
4983
4984 let reader = match backend.commit_data_reader() {
4985 Ok(reader) => reader,
4986 Err(error) => {
4987 log::error!("failed to create commit data reader: {error:?}");
4988 return;
4989 }
4990 };
4991
4992 loop {
4993 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4994
4995 futures::select_biased! {
4996 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4997 let Ok(sha) = sha else {
4998 break;
4999 };
5000
5001 match reader.read(sha).await {
5002 Ok(commit_data) => {
5003 if result_tx.send((sha, commit_data)).await.is_err() {
5004 break;
5005 }
5006 }
5007 Err(error) => {
5008 log::error!("failed to read commit data for {sha}: {error:?}");
5009 }
5010 }
5011 }
5012 _ = futures::FutureExt::fuse(timeout) => {
5013 break;
5014 }
5015 }
5016 }
5017
5018 drop(result_tx);
5019 })
5020 .detach();
5021
5022 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
5023 _task: foreground_task,
5024 commit_data_request: request_tx_for_handler,
5025 });
5026 }
5027
5028 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
5029 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
5030 }
5031
5032 fn save_buffers<'a>(
5033 &self,
5034 entries: impl IntoIterator<Item = &'a RepoPath>,
5035 cx: &mut Context<Self>,
5036 ) -> Vec<Task<anyhow::Result<()>>> {
5037 let mut save_futures = Vec::new();
5038 if let Some(buffer_store) = self.buffer_store(cx) {
5039 buffer_store.update(cx, |buffer_store, cx| {
5040 for path in entries {
5041 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5042 continue;
5043 };
5044 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5045 && buffer
5046 .read(cx)
5047 .file()
5048 .is_some_and(|file| file.disk_state().exists())
5049 && buffer.read(cx).has_unsaved_edits()
5050 {
5051 save_futures.push(buffer_store.save_buffer(buffer, cx));
5052 }
5053 }
5054 })
5055 }
5056 save_futures
5057 }
5058
5059 pub fn stage_entries(
5060 &mut self,
5061 entries: Vec<RepoPath>,
5062 cx: &mut Context<Self>,
5063 ) -> Task<anyhow::Result<()>> {
5064 self.stage_or_unstage_entries(true, entries, cx)
5065 }
5066
5067 pub fn unstage_entries(
5068 &mut self,
5069 entries: Vec<RepoPath>,
5070 cx: &mut Context<Self>,
5071 ) -> Task<anyhow::Result<()>> {
5072 self.stage_or_unstage_entries(false, entries, cx)
5073 }
5074
5075 fn stage_or_unstage_entries(
5076 &mut self,
5077 stage: bool,
5078 entries: Vec<RepoPath>,
5079 cx: &mut Context<Self>,
5080 ) -> Task<anyhow::Result<()>> {
5081 if entries.is_empty() {
5082 return Task::ready(Ok(()));
5083 }
5084 let Some(git_store) = self.git_store.upgrade() else {
5085 return Task::ready(Ok(()));
5086 };
5087 let id = self.id;
5088 let save_tasks = self.save_buffers(&entries, cx);
5089 let paths = entries
5090 .iter()
5091 .map(|p| p.as_unix_str())
5092 .collect::<Vec<_>>()
5093 .join(" ");
5094 let status = if stage {
5095 format!("git add {paths}")
5096 } else {
5097 format!("git reset {paths}")
5098 };
5099 let job_key = GitJobKey::WriteIndex(entries.clone());
5100
5101 self.spawn_job_with_tracking(
5102 entries.clone(),
5103 if stage {
5104 pending_op::GitStatus::Staged
5105 } else {
5106 pending_op::GitStatus::Unstaged
5107 },
5108 cx,
5109 async move |this, cx| {
5110 for save_task in save_tasks {
5111 save_task.await?;
5112 }
5113
5114 this.update(cx, |this, cx| {
5115 let weak_this = cx.weak_entity();
5116 this.send_keyed_job(
5117 Some(job_key),
5118 Some(status.into()),
5119 move |git_repo, mut cx| async move {
5120 let hunk_staging_operation_counts = weak_this
5121 .update(&mut cx, |this, cx| {
5122 let mut hunk_staging_operation_counts = HashMap::default();
5123 for path in &entries {
5124 let Some(project_path) =
5125 this.repo_path_to_project_path(path, cx)
5126 else {
5127 continue;
5128 };
5129 let Some(buffer) = git_store
5130 .read(cx)
5131 .buffer_store
5132 .read(cx)
5133 .get_by_path(&project_path)
5134 else {
5135 continue;
5136 };
5137 let Some(diff_state) = git_store
5138 .read(cx)
5139 .diffs
5140 .get(&buffer.read(cx).remote_id())
5141 .cloned()
5142 else {
5143 continue;
5144 };
5145 let Some(uncommitted_diff) =
5146 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5147 |uncommitted_diff| uncommitted_diff.upgrade(),
5148 )
5149 else {
5150 continue;
5151 };
5152 let buffer_snapshot = buffer.read(cx).text_snapshot();
5153 let file_exists = buffer
5154 .read(cx)
5155 .file()
5156 .is_some_and(|file| file.disk_state().exists());
5157 let hunk_staging_operation_count =
5158 diff_state.update(cx, |diff_state, cx| {
5159 uncommitted_diff.update(
5160 cx,
5161 |uncommitted_diff, cx| {
5162 uncommitted_diff
5163 .stage_or_unstage_all_hunks(
5164 stage,
5165 &buffer_snapshot,
5166 file_exists,
5167 cx,
5168 );
5169 },
5170 );
5171
5172 diff_state.hunk_staging_operation_count += 1;
5173 diff_state.hunk_staging_operation_count
5174 });
5175 hunk_staging_operation_counts.insert(
5176 diff_state.downgrade(),
5177 hunk_staging_operation_count,
5178 );
5179 }
5180 hunk_staging_operation_counts
5181 })
5182 .unwrap_or_default();
5183
5184 let result = match git_repo {
5185 RepositoryState::Local(LocalRepositoryState {
5186 backend,
5187 environment,
5188 ..
5189 }) => {
5190 if stage {
5191 backend.stage_paths(entries, environment.clone()).await
5192 } else {
5193 backend.unstage_paths(entries, environment.clone()).await
5194 }
5195 }
5196 RepositoryState::Remote(RemoteRepositoryState {
5197 project_id,
5198 client,
5199 }) => {
5200 if stage {
5201 client
5202 .request(proto::Stage {
5203 project_id: project_id.0,
5204 repository_id: id.to_proto(),
5205 paths: entries
5206 .into_iter()
5207 .map(|repo_path| repo_path.to_proto())
5208 .collect(),
5209 })
5210 .await
5211 .context("sending stage request")
5212 .map(|_| ())
5213 } else {
5214 client
5215 .request(proto::Unstage {
5216 project_id: project_id.0,
5217 repository_id: id.to_proto(),
5218 paths: entries
5219 .into_iter()
5220 .map(|repo_path| repo_path.to_proto())
5221 .collect(),
5222 })
5223 .await
5224 .context("sending unstage request")
5225 .map(|_| ())
5226 }
5227 }
5228 };
5229
5230 for (diff_state, hunk_staging_operation_count) in
5231 hunk_staging_operation_counts
5232 {
5233 diff_state
5234 .update(&mut cx, |diff_state, cx| {
5235 if result.is_ok() {
5236 diff_state.hunk_staging_operation_count_as_of_write =
5237 hunk_staging_operation_count;
5238 } else if let Some(uncommitted_diff) =
5239 &diff_state.uncommitted_diff
5240 {
5241 uncommitted_diff
5242 .update(cx, |uncommitted_diff, cx| {
5243 uncommitted_diff.clear_pending_hunks(cx);
5244 })
5245 .ok();
5246 }
5247 })
5248 .ok();
5249 }
5250
5251 result
5252 },
5253 )
5254 })?
5255 .await?
5256 },
5257 )
5258 }
5259
5260 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5261 let snapshot = self.snapshot.clone();
5262 let pending_ops = self.pending_ops.clone();
5263 let to_stage = cx.background_spawn(async move {
5264 snapshot
5265 .status()
5266 .filter_map(|entry| {
5267 if let Some(ops) =
5268 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5269 {
5270 if ops.staging() || ops.staged() {
5271 None
5272 } else {
5273 Some(entry.repo_path)
5274 }
5275 } else if entry.status.staging().is_fully_staged() {
5276 None
5277 } else {
5278 Some(entry.repo_path)
5279 }
5280 })
5281 .collect()
5282 });
5283
5284 cx.spawn(async move |this, cx| {
5285 let to_stage = to_stage.await;
5286 this.update(cx, |this, cx| {
5287 this.stage_or_unstage_entries(true, to_stage, cx)
5288 })?
5289 .await
5290 })
5291 }
5292
5293 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5294 let snapshot = self.snapshot.clone();
5295 let pending_ops = self.pending_ops.clone();
5296 let to_unstage = cx.background_spawn(async move {
5297 snapshot
5298 .status()
5299 .filter_map(|entry| {
5300 if let Some(ops) =
5301 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5302 {
5303 if !ops.staging() && !ops.staged() {
5304 None
5305 } else {
5306 Some(entry.repo_path)
5307 }
5308 } else if entry.status.staging().is_fully_unstaged() {
5309 None
5310 } else {
5311 Some(entry.repo_path)
5312 }
5313 })
5314 .collect()
5315 });
5316
5317 cx.spawn(async move |this, cx| {
5318 let to_unstage = to_unstage.await;
5319 this.update(cx, |this, cx| {
5320 this.stage_or_unstage_entries(false, to_unstage, cx)
5321 })?
5322 .await
5323 })
5324 }
5325
5326 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5327 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5328
5329 self.stash_entries(to_stash, cx)
5330 }
5331
5332 pub fn stash_entries(
5333 &mut self,
5334 entries: Vec<RepoPath>,
5335 cx: &mut Context<Self>,
5336 ) -> Task<anyhow::Result<()>> {
5337 let id = self.id;
5338
5339 cx.spawn(async move |this, cx| {
5340 this.update(cx, |this, _| {
5341 this.send_job(None, move |git_repo, _cx| async move {
5342 match git_repo {
5343 RepositoryState::Local(LocalRepositoryState {
5344 backend,
5345 environment,
5346 ..
5347 }) => backend.stash_paths(entries, environment).await,
5348 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5349 client
5350 .request(proto::Stash {
5351 project_id: project_id.0,
5352 repository_id: id.to_proto(),
5353 paths: entries
5354 .into_iter()
5355 .map(|repo_path| repo_path.to_proto())
5356 .collect(),
5357 })
5358 .await?;
5359 Ok(())
5360 }
5361 }
5362 })
5363 })?
5364 .await??;
5365 Ok(())
5366 })
5367 }
5368
5369 pub fn stash_pop(
5370 &mut self,
5371 index: Option<usize>,
5372 cx: &mut Context<Self>,
5373 ) -> Task<anyhow::Result<()>> {
5374 let id = self.id;
5375 cx.spawn(async move |this, cx| {
5376 this.update(cx, |this, _| {
5377 this.send_job(None, move |git_repo, _cx| async move {
5378 match git_repo {
5379 RepositoryState::Local(LocalRepositoryState {
5380 backend,
5381 environment,
5382 ..
5383 }) => backend.stash_pop(index, environment).await,
5384 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5385 client
5386 .request(proto::StashPop {
5387 project_id: project_id.0,
5388 repository_id: id.to_proto(),
5389 stash_index: index.map(|i| i as u64),
5390 })
5391 .await
5392 .context("sending stash pop request")?;
5393 Ok(())
5394 }
5395 }
5396 })
5397 })?
5398 .await??;
5399 Ok(())
5400 })
5401 }
5402
5403 pub fn stash_apply(
5404 &mut self,
5405 index: Option<usize>,
5406 cx: &mut Context<Self>,
5407 ) -> Task<anyhow::Result<()>> {
5408 let id = self.id;
5409 cx.spawn(async move |this, cx| {
5410 this.update(cx, |this, _| {
5411 this.send_job(None, move |git_repo, _cx| async move {
5412 match git_repo {
5413 RepositoryState::Local(LocalRepositoryState {
5414 backend,
5415 environment,
5416 ..
5417 }) => backend.stash_apply(index, environment).await,
5418 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5419 client
5420 .request(proto::StashApply {
5421 project_id: project_id.0,
5422 repository_id: id.to_proto(),
5423 stash_index: index.map(|i| i as u64),
5424 })
5425 .await
5426 .context("sending stash apply request")?;
5427 Ok(())
5428 }
5429 }
5430 })
5431 })?
5432 .await??;
5433 Ok(())
5434 })
5435 }
5436
5437 pub fn stash_drop(
5438 &mut self,
5439 index: Option<usize>,
5440 cx: &mut Context<Self>,
5441 ) -> oneshot::Receiver<anyhow::Result<()>> {
5442 let id = self.id;
5443 let updates_tx = self
5444 .git_store()
5445 .and_then(|git_store| match &git_store.read(cx).state {
5446 GitStoreState::Local { downstream, .. } => downstream
5447 .as_ref()
5448 .map(|downstream| downstream.updates_tx.clone()),
5449 _ => None,
5450 });
5451 let this = cx.weak_entity();
5452 self.send_job(None, move |git_repo, mut cx| async move {
5453 match git_repo {
5454 RepositoryState::Local(LocalRepositoryState {
5455 backend,
5456 environment,
5457 ..
5458 }) => {
5459 // TODO would be nice to not have to do this manually
5460 let result = backend.stash_drop(index, environment).await;
5461 if result.is_ok()
5462 && let Ok(stash_entries) = backend.stash_entries().await
5463 {
5464 let snapshot = this.update(&mut cx, |this, cx| {
5465 this.snapshot.stash_entries = stash_entries;
5466 cx.emit(RepositoryEvent::StashEntriesChanged);
5467 this.snapshot.clone()
5468 })?;
5469 if let Some(updates_tx) = updates_tx {
5470 updates_tx
5471 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5472 .ok();
5473 }
5474 }
5475
5476 result
5477 }
5478 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5479 client
5480 .request(proto::StashDrop {
5481 project_id: project_id.0,
5482 repository_id: id.to_proto(),
5483 stash_index: index.map(|i| i as u64),
5484 })
5485 .await
5486 .context("sending stash pop request")?;
5487 Ok(())
5488 }
5489 }
5490 })
5491 }
5492
5493 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5494 let id = self.id;
5495 self.send_job(
5496 Some(format!("git hook {}", hook.as_str()).into()),
5497 move |git_repo, _cx| async move {
5498 match git_repo {
5499 RepositoryState::Local(LocalRepositoryState {
5500 backend,
5501 environment,
5502 ..
5503 }) => backend.run_hook(hook, environment.clone()).await,
5504 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5505 client
5506 .request(proto::RunGitHook {
5507 project_id: project_id.0,
5508 repository_id: id.to_proto(),
5509 hook: hook.to_proto(),
5510 })
5511 .await?;
5512
5513 Ok(())
5514 }
5515 }
5516 },
5517 )
5518 }
5519
5520 pub fn commit(
5521 &mut self,
5522 message: SharedString,
5523 name_and_email: Option<(SharedString, SharedString)>,
5524 options: CommitOptions,
5525 askpass: AskPassDelegate,
5526 cx: &mut App,
5527 ) -> oneshot::Receiver<Result<()>> {
5528 let id = self.id;
5529 let askpass_delegates = self.askpass_delegates.clone();
5530 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5531
5532 let rx = self.run_hook(RunHook::PreCommit, cx);
5533
5534 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5535 rx.await??;
5536
5537 match git_repo {
5538 RepositoryState::Local(LocalRepositoryState {
5539 backend,
5540 environment,
5541 ..
5542 }) => {
5543 backend
5544 .commit(message, name_and_email, options, askpass, environment)
5545 .await
5546 }
5547 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5548 askpass_delegates.lock().insert(askpass_id, askpass);
5549 let _defer = util::defer(|| {
5550 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5551 debug_assert!(askpass_delegate.is_some());
5552 });
5553 let (name, email) = name_and_email.unzip();
5554 client
5555 .request(proto::Commit {
5556 project_id: project_id.0,
5557 repository_id: id.to_proto(),
5558 message: String::from(message),
5559 name: name.map(String::from),
5560 email: email.map(String::from),
5561 options: Some(proto::commit::CommitOptions {
5562 amend: options.amend,
5563 signoff: options.signoff,
5564 allow_empty: options.allow_empty,
5565 }),
5566 askpass_id,
5567 })
5568 .await?;
5569
5570 Ok(())
5571 }
5572 }
5573 })
5574 }
5575
5576 pub fn fetch(
5577 &mut self,
5578 fetch_options: FetchOptions,
5579 askpass: AskPassDelegate,
5580 _cx: &mut App,
5581 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5582 let askpass_delegates = self.askpass_delegates.clone();
5583 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5584 let id = self.id;
5585
5586 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5587 match git_repo {
5588 RepositoryState::Local(LocalRepositoryState {
5589 backend,
5590 environment,
5591 ..
5592 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5593 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5594 askpass_delegates.lock().insert(askpass_id, askpass);
5595 let _defer = util::defer(|| {
5596 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5597 debug_assert!(askpass_delegate.is_some());
5598 });
5599
5600 let response = client
5601 .request(proto::Fetch {
5602 project_id: project_id.0,
5603 repository_id: id.to_proto(),
5604 askpass_id,
5605 remote: fetch_options.to_proto(),
5606 })
5607 .await?;
5608
5609 Ok(RemoteCommandOutput {
5610 stdout: response.stdout,
5611 stderr: response.stderr,
5612 })
5613 }
5614 }
5615 })
5616 }
5617
5618 pub fn push(
5619 &mut self,
5620 branch: SharedString,
5621 remote_branch: SharedString,
5622 remote: SharedString,
5623 options: Option<PushOptions>,
5624 askpass: AskPassDelegate,
5625 cx: &mut Context<Self>,
5626 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5627 let askpass_delegates = self.askpass_delegates.clone();
5628 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5629 let id = self.id;
5630
5631 let args = options
5632 .map(|option| match option {
5633 PushOptions::SetUpstream => " --set-upstream",
5634 PushOptions::Force => " --force-with-lease",
5635 })
5636 .unwrap_or("");
5637
5638 let updates_tx = self
5639 .git_store()
5640 .and_then(|git_store| match &git_store.read(cx).state {
5641 GitStoreState::Local { downstream, .. } => downstream
5642 .as_ref()
5643 .map(|downstream| downstream.updates_tx.clone()),
5644 _ => None,
5645 });
5646
5647 let this = cx.weak_entity();
5648 self.send_job(
5649 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5650 move |git_repo, mut cx| async move {
5651 match git_repo {
5652 RepositoryState::Local(LocalRepositoryState {
5653 backend,
5654 environment,
5655 ..
5656 }) => {
5657 let result = backend
5658 .push(
5659 branch.to_string(),
5660 remote_branch.to_string(),
5661 remote.to_string(),
5662 options,
5663 askpass,
5664 environment.clone(),
5665 cx.clone(),
5666 )
5667 .await;
5668 // TODO would be nice to not have to do this manually
5669 if result.is_ok() {
5670 let branches = backend.branches().await?;
5671 let branch = branches.into_iter().find(|branch| branch.is_head);
5672 log::info!("head branch after scan is {branch:?}");
5673 let snapshot = this.update(&mut cx, |this, cx| {
5674 this.snapshot.branch = branch;
5675 cx.emit(RepositoryEvent::HeadChanged);
5676 this.snapshot.clone()
5677 })?;
5678 if let Some(updates_tx) = updates_tx {
5679 updates_tx
5680 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5681 .ok();
5682 }
5683 }
5684 result
5685 }
5686 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5687 askpass_delegates.lock().insert(askpass_id, askpass);
5688 let _defer = util::defer(|| {
5689 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5690 debug_assert!(askpass_delegate.is_some());
5691 });
5692 let response = client
5693 .request(proto::Push {
5694 project_id: project_id.0,
5695 repository_id: id.to_proto(),
5696 askpass_id,
5697 branch_name: branch.to_string(),
5698 remote_branch_name: remote_branch.to_string(),
5699 remote_name: remote.to_string(),
5700 options: options.map(|options| match options {
5701 PushOptions::Force => proto::push::PushOptions::Force,
5702 PushOptions::SetUpstream => {
5703 proto::push::PushOptions::SetUpstream
5704 }
5705 }
5706 as i32),
5707 })
5708 .await?;
5709
5710 Ok(RemoteCommandOutput {
5711 stdout: response.stdout,
5712 stderr: response.stderr,
5713 })
5714 }
5715 }
5716 },
5717 )
5718 }
5719
5720 pub fn pull(
5721 &mut self,
5722 branch: Option<SharedString>,
5723 remote: SharedString,
5724 rebase: bool,
5725 askpass: AskPassDelegate,
5726 _cx: &mut App,
5727 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5728 let askpass_delegates = self.askpass_delegates.clone();
5729 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5730 let id = self.id;
5731
5732 let mut status = "git pull".to_string();
5733 if rebase {
5734 status.push_str(" --rebase");
5735 }
5736 status.push_str(&format!(" {}", remote));
5737 if let Some(b) = &branch {
5738 status.push_str(&format!(" {}", b));
5739 }
5740
5741 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5742 match git_repo {
5743 RepositoryState::Local(LocalRepositoryState {
5744 backend,
5745 environment,
5746 ..
5747 }) => {
5748 backend
5749 .pull(
5750 branch.as_ref().map(|b| b.to_string()),
5751 remote.to_string(),
5752 rebase,
5753 askpass,
5754 environment.clone(),
5755 cx,
5756 )
5757 .await
5758 }
5759 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5760 askpass_delegates.lock().insert(askpass_id, askpass);
5761 let _defer = util::defer(|| {
5762 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5763 debug_assert!(askpass_delegate.is_some());
5764 });
5765 let response = client
5766 .request(proto::Pull {
5767 project_id: project_id.0,
5768 repository_id: id.to_proto(),
5769 askpass_id,
5770 rebase,
5771 branch_name: branch.as_ref().map(|b| b.to_string()),
5772 remote_name: remote.to_string(),
5773 })
5774 .await?;
5775
5776 Ok(RemoteCommandOutput {
5777 stdout: response.stdout,
5778 stderr: response.stderr,
5779 })
5780 }
5781 }
5782 })
5783 }
5784
5785 fn spawn_set_index_text_job(
5786 &mut self,
5787 path: RepoPath,
5788 content: Option<String>,
5789 hunk_staging_operation_count: Option<usize>,
5790 cx: &mut Context<Self>,
5791 ) -> oneshot::Receiver<anyhow::Result<()>> {
5792 let id = self.id;
5793 let this = cx.weak_entity();
5794 let git_store = self.git_store.clone();
5795 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5796 self.send_keyed_job(
5797 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5798 None,
5799 move |git_repo, mut cx| async move {
5800 log::debug!(
5801 "start updating index text for buffer {}",
5802 path.as_unix_str()
5803 );
5804
5805 match git_repo {
5806 RepositoryState::Local(LocalRepositoryState {
5807 fs,
5808 backend,
5809 environment,
5810 ..
5811 }) => {
5812 let executable = match fs.metadata(&abs_path).await {
5813 Ok(Some(meta)) => meta.is_executable,
5814 Ok(None) => false,
5815 Err(_err) => false,
5816 };
5817 backend
5818 .set_index_text(path.clone(), content, environment.clone(), executable)
5819 .await?;
5820 }
5821 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5822 client
5823 .request(proto::SetIndexText {
5824 project_id: project_id.0,
5825 repository_id: id.to_proto(),
5826 path: path.to_proto(),
5827 text: content,
5828 })
5829 .await?;
5830 }
5831 }
5832 log::debug!(
5833 "finish updating index text for buffer {}",
5834 path.as_unix_str()
5835 );
5836
5837 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5838 let project_path = this
5839 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5840 .ok()
5841 .flatten();
5842 git_store
5843 .update(&mut cx, |git_store, cx| {
5844 let buffer_id = git_store
5845 .buffer_store
5846 .read(cx)
5847 .get_by_path(&project_path?)?
5848 .read(cx)
5849 .remote_id();
5850 let diff_state = git_store.diffs.get(&buffer_id)?;
5851 diff_state.update(cx, |diff_state, _| {
5852 diff_state.hunk_staging_operation_count_as_of_write =
5853 hunk_staging_operation_count;
5854 });
5855 Some(())
5856 })
5857 .context("Git store dropped")?;
5858 }
5859 Ok(())
5860 },
5861 )
5862 }
5863
5864 pub fn create_remote(
5865 &mut self,
5866 remote_name: String,
5867 remote_url: String,
5868 ) -> oneshot::Receiver<Result<()>> {
5869 let id = self.id;
5870 self.send_job(
5871 Some(format!("git remote add {remote_name} {remote_url}").into()),
5872 move |repo, _cx| async move {
5873 match repo {
5874 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5875 backend.create_remote(remote_name, remote_url).await
5876 }
5877 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5878 client
5879 .request(proto::GitCreateRemote {
5880 project_id: project_id.0,
5881 repository_id: id.to_proto(),
5882 remote_name,
5883 remote_url,
5884 })
5885 .await?;
5886
5887 Ok(())
5888 }
5889 }
5890 },
5891 )
5892 }
5893
5894 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5895 let id = self.id;
5896 self.send_job(
5897 Some(format!("git remove remote {remote_name}").into()),
5898 move |repo, _cx| async move {
5899 match repo {
5900 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5901 backend.remove_remote(remote_name).await
5902 }
5903 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5904 client
5905 .request(proto::GitRemoveRemote {
5906 project_id: project_id.0,
5907 repository_id: id.to_proto(),
5908 remote_name,
5909 })
5910 .await?;
5911
5912 Ok(())
5913 }
5914 }
5915 },
5916 )
5917 }
5918
5919 pub fn get_remotes(
5920 &mut self,
5921 branch_name: Option<String>,
5922 is_push: bool,
5923 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5924 let id = self.id;
5925 self.send_job(None, move |repo, _cx| async move {
5926 match repo {
5927 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5928 let remote = if let Some(branch_name) = branch_name {
5929 if is_push {
5930 backend.get_push_remote(branch_name).await?
5931 } else {
5932 backend.get_branch_remote(branch_name).await?
5933 }
5934 } else {
5935 None
5936 };
5937
5938 match remote {
5939 Some(remote) => Ok(vec![remote]),
5940 None => backend.get_all_remotes().await,
5941 }
5942 }
5943 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5944 let response = client
5945 .request(proto::GetRemotes {
5946 project_id: project_id.0,
5947 repository_id: id.to_proto(),
5948 branch_name,
5949 is_push,
5950 })
5951 .await?;
5952
5953 let remotes = response
5954 .remotes
5955 .into_iter()
5956 .map(|remotes| Remote {
5957 name: remotes.name.into(),
5958 })
5959 .collect();
5960
5961 Ok(remotes)
5962 }
5963 }
5964 })
5965 }
5966
5967 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5968 let id = self.id;
5969 self.send_job(None, move |repo, _| async move {
5970 match repo {
5971 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5972 backend.branches().await
5973 }
5974 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5975 let response = client
5976 .request(proto::GitGetBranches {
5977 project_id: project_id.0,
5978 repository_id: id.to_proto(),
5979 })
5980 .await?;
5981
5982 let branches = response
5983 .branches
5984 .into_iter()
5985 .map(|branch| proto_to_branch(&branch))
5986 .collect();
5987
5988 Ok(branches)
5989 }
5990 }
5991 })
5992 }
5993
5994 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5995 /// returns the pathed for the linked worktree.
5996 ///
5997 /// Returns None if this is the main checkout.
5998 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5999 if self.work_directory_abs_path != self.original_repo_abs_path {
6000 Some(&self.work_directory_abs_path)
6001 } else {
6002 None
6003 }
6004 }
6005
6006 pub fn path_for_new_linked_worktree(
6007 &self,
6008 branch_name: &str,
6009 worktree_directory_setting: &str,
6010 ) -> Result<PathBuf> {
6011 let original_repo = self.original_repo_abs_path.clone();
6012 let project_name = original_repo
6013 .file_name()
6014 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
6015 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
6016 Ok(directory.join(branch_name).join(project_name))
6017 }
6018
6019 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
6020 let id = self.id;
6021 self.send_job(None, move |repo, _| async move {
6022 match repo {
6023 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6024 backend.worktrees().await
6025 }
6026 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6027 let response = client
6028 .request(proto::GitGetWorktrees {
6029 project_id: project_id.0,
6030 repository_id: id.to_proto(),
6031 })
6032 .await?;
6033
6034 let worktrees = response
6035 .worktrees
6036 .into_iter()
6037 .map(|worktree| proto_to_worktree(&worktree))
6038 .collect();
6039
6040 Ok(worktrees)
6041 }
6042 }
6043 })
6044 }
6045
6046 pub fn create_worktree(
6047 &mut self,
6048 target: CreateWorktreeTarget,
6049 path: PathBuf,
6050 ) -> oneshot::Receiver<Result<()>> {
6051 let id = self.id;
6052 let job_description = match target.branch_name() {
6053 Some(branch_name) => format!("git worktree add: {branch_name}"),
6054 None => "git worktree add (detached)".to_string(),
6055 };
6056 self.send_job(Some(job_description.into()), move |repo, _cx| async move {
6057 match repo {
6058 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6059 backend.create_worktree(target, path).await
6060 }
6061 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6062 let (name, commit, use_existing_branch) = match target {
6063 CreateWorktreeTarget::ExistingBranch { branch_name } => {
6064 (Some(branch_name), None, true)
6065 }
6066 CreateWorktreeTarget::NewBranch {
6067 branch_name,
6068 base_sha,
6069 } => (Some(branch_name), base_sha, false),
6070 CreateWorktreeTarget::Detached { base_sha } => (None, base_sha, false),
6071 };
6072
6073 client
6074 .request(proto::GitCreateWorktree {
6075 project_id: project_id.0,
6076 repository_id: id.to_proto(),
6077 name: name.unwrap_or_default(),
6078 directory: path.to_string_lossy().to_string(),
6079 commit,
6080 use_existing_branch,
6081 })
6082 .await?;
6083
6084 Ok(())
6085 }
6086 }
6087 })
6088 }
6089
6090 pub fn create_worktree_detached(
6091 &mut self,
6092 path: PathBuf,
6093 commit: String,
6094 ) -> oneshot::Receiver<Result<()>> {
6095 self.create_worktree(
6096 CreateWorktreeTarget::Detached {
6097 base_sha: Some(commit),
6098 },
6099 path,
6100 )
6101 }
6102
6103 pub fn checkout_branch_in_worktree(
6104 &mut self,
6105 branch_name: String,
6106 worktree_path: PathBuf,
6107 create: bool,
6108 ) -> oneshot::Receiver<Result<()>> {
6109 let description = if create {
6110 format!("git checkout -b {branch_name}")
6111 } else {
6112 format!("git checkout {branch_name}")
6113 };
6114 self.send_job(Some(description.into()), move |repo, _cx| async move {
6115 match repo {
6116 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6117 backend
6118 .checkout_branch_in_worktree(branch_name, worktree_path, create)
6119 .await
6120 }
6121 RepositoryState::Remote(_) => {
6122 log::warn!("checkout_branch_in_worktree not supported for remote repositories");
6123 Ok(())
6124 }
6125 }
6126 })
6127 }
6128
6129 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6130 let id = self.id;
6131 self.send_job(None, move |repo, _cx| async move {
6132 match repo {
6133 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6134 Ok(backend.head_sha().await)
6135 }
6136 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6137 let response = client
6138 .request(proto::GitGetHeadSha {
6139 project_id: project_id.0,
6140 repository_id: id.to_proto(),
6141 })
6142 .await?;
6143
6144 Ok(response.sha)
6145 }
6146 }
6147 })
6148 }
6149
6150 pub fn update_ref(
6151 &mut self,
6152 ref_name: String,
6153 commit: String,
6154 ) -> oneshot::Receiver<Result<()>> {
6155 self.send_job(None, move |repo, _cx| async move {
6156 match repo {
6157 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6158 backend.update_ref(ref_name, commit).await
6159 }
6160 RepositoryState::Remote(_) => {
6161 anyhow::bail!("update_ref is not supported for remote repositories")
6162 }
6163 }
6164 })
6165 }
6166
6167 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6168 self.send_job(None, move |repo, _cx| async move {
6169 match repo {
6170 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6171 backend.delete_ref(ref_name).await
6172 }
6173 RepositoryState::Remote(_) => {
6174 anyhow::bail!("delete_ref is not supported for remote repositories")
6175 }
6176 }
6177 })
6178 }
6179
6180 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6181 self.send_job(None, move |repo, _cx| async move {
6182 match repo {
6183 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6184 backend.repair_worktrees().await
6185 }
6186 RepositoryState::Remote(_) => {
6187 anyhow::bail!("repair_worktrees is not supported for remote repositories")
6188 }
6189 }
6190 })
6191 }
6192
6193 pub fn create_archive_checkpoint(&mut self) -> oneshot::Receiver<Result<(String, String)>> {
6194 self.send_job(None, move |repo, _cx| async move {
6195 match repo {
6196 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6197 backend.create_archive_checkpoint().await
6198 }
6199 RepositoryState::Remote(_) => {
6200 anyhow::bail!(
6201 "create_archive_checkpoint is not supported for remote repositories"
6202 )
6203 }
6204 }
6205 })
6206 }
6207
6208 pub fn restore_archive_checkpoint(
6209 &mut self,
6210 staged_sha: String,
6211 unstaged_sha: String,
6212 ) -> oneshot::Receiver<Result<()>> {
6213 self.send_job(None, move |repo, _cx| async move {
6214 match repo {
6215 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6216 backend
6217 .restore_archive_checkpoint(staged_sha, unstaged_sha)
6218 .await
6219 }
6220 RepositoryState::Remote(_) => {
6221 anyhow::bail!(
6222 "restore_archive_checkpoint is not supported for remote repositories"
6223 )
6224 }
6225 }
6226 })
6227 }
6228
6229 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6230 let id = self.id;
6231 self.send_job(
6232 Some(format!("git worktree remove: {}", path.display()).into()),
6233 move |repo, _cx| async move {
6234 match repo {
6235 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6236 backend.remove_worktree(path, force).await
6237 }
6238 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6239 client
6240 .request(proto::GitRemoveWorktree {
6241 project_id: project_id.0,
6242 repository_id: id.to_proto(),
6243 path: path.to_string_lossy().to_string(),
6244 force,
6245 })
6246 .await?;
6247
6248 Ok(())
6249 }
6250 }
6251 },
6252 )
6253 }
6254
6255 pub fn rename_worktree(
6256 &mut self,
6257 old_path: PathBuf,
6258 new_path: PathBuf,
6259 ) -> oneshot::Receiver<Result<()>> {
6260 let id = self.id;
6261 self.send_job(
6262 Some(format!("git worktree move: {}", old_path.display()).into()),
6263 move |repo, _cx| async move {
6264 match repo {
6265 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6266 backend.rename_worktree(old_path, new_path).await
6267 }
6268 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6269 client
6270 .request(proto::GitRenameWorktree {
6271 project_id: project_id.0,
6272 repository_id: id.to_proto(),
6273 old_path: old_path.to_string_lossy().to_string(),
6274 new_path: new_path.to_string_lossy().to_string(),
6275 })
6276 .await?;
6277
6278 Ok(())
6279 }
6280 }
6281 },
6282 )
6283 }
6284
6285 pub fn default_branch(
6286 &mut self,
6287 include_remote_name: bool,
6288 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6289 let id = self.id;
6290 self.send_job(None, move |repo, _| async move {
6291 match repo {
6292 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6293 backend.default_branch(include_remote_name).await
6294 }
6295 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6296 let response = client
6297 .request(proto::GetDefaultBranch {
6298 project_id: project_id.0,
6299 repository_id: id.to_proto(),
6300 })
6301 .await?;
6302
6303 anyhow::Ok(response.branch.map(SharedString::from))
6304 }
6305 }
6306 })
6307 }
6308
6309 pub fn diff_tree(
6310 &mut self,
6311 diff_type: DiffTreeType,
6312 _cx: &App,
6313 ) -> oneshot::Receiver<Result<TreeDiff>> {
6314 let repository_id = self.snapshot.id;
6315 self.send_job(None, move |repo, _cx| async move {
6316 match repo {
6317 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6318 backend.diff_tree(diff_type).await
6319 }
6320 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6321 let response = client
6322 .request(proto::GetTreeDiff {
6323 project_id: project_id.0,
6324 repository_id: repository_id.0,
6325 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6326 base: diff_type.base().to_string(),
6327 head: diff_type.head().to_string(),
6328 })
6329 .await?;
6330
6331 let entries = response
6332 .entries
6333 .into_iter()
6334 .filter_map(|entry| {
6335 let status = match entry.status() {
6336 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6337 proto::tree_diff_status::Status::Modified => {
6338 TreeDiffStatus::Modified {
6339 old: git::Oid::from_str(
6340 &entry.oid.context("missing oid").log_err()?,
6341 )
6342 .log_err()?,
6343 }
6344 }
6345 proto::tree_diff_status::Status::Deleted => {
6346 TreeDiffStatus::Deleted {
6347 old: git::Oid::from_str(
6348 &entry.oid.context("missing oid").log_err()?,
6349 )
6350 .log_err()?,
6351 }
6352 }
6353 };
6354 Some((
6355 RepoPath::from_rel_path(
6356 &RelPath::from_proto(&entry.path).log_err()?,
6357 ),
6358 status,
6359 ))
6360 })
6361 .collect();
6362
6363 Ok(TreeDiff { entries })
6364 }
6365 }
6366 })
6367 }
6368
6369 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6370 let id = self.id;
6371 self.send_job(None, move |repo, _cx| async move {
6372 match repo {
6373 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6374 backend.diff(diff_type).await
6375 }
6376 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6377 let (proto_diff_type, merge_base_ref) = match &diff_type {
6378 DiffType::HeadToIndex => {
6379 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6380 }
6381 DiffType::HeadToWorktree => {
6382 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6383 }
6384 DiffType::MergeBase { base_ref } => (
6385 proto::git_diff::DiffType::MergeBase.into(),
6386 Some(base_ref.to_string()),
6387 ),
6388 };
6389 let response = client
6390 .request(proto::GitDiff {
6391 project_id: project_id.0,
6392 repository_id: id.to_proto(),
6393 diff_type: proto_diff_type,
6394 merge_base_ref,
6395 })
6396 .await?;
6397
6398 Ok(response.diff)
6399 }
6400 }
6401 })
6402 }
6403
6404 pub fn create_branch(
6405 &mut self,
6406 branch_name: String,
6407 base_branch: Option<String>,
6408 ) -> oneshot::Receiver<Result<()>> {
6409 let id = self.id;
6410 let status_msg = if let Some(ref base) = base_branch {
6411 format!("git switch -c {branch_name} {base}").into()
6412 } else {
6413 format!("git switch -c {branch_name}").into()
6414 };
6415 self.send_job(Some(status_msg), move |repo, _cx| async move {
6416 match repo {
6417 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6418 backend.create_branch(branch_name, base_branch).await
6419 }
6420 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6421 client
6422 .request(proto::GitCreateBranch {
6423 project_id: project_id.0,
6424 repository_id: id.to_proto(),
6425 branch_name,
6426 })
6427 .await?;
6428
6429 Ok(())
6430 }
6431 }
6432 })
6433 }
6434
6435 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6436 let id = self.id;
6437 self.send_job(
6438 Some(format!("git switch {branch_name}").into()),
6439 move |repo, _cx| async move {
6440 match repo {
6441 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6442 backend.change_branch(branch_name).await
6443 }
6444 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6445 client
6446 .request(proto::GitChangeBranch {
6447 project_id: project_id.0,
6448 repository_id: id.to_proto(),
6449 branch_name,
6450 })
6451 .await?;
6452
6453 Ok(())
6454 }
6455 }
6456 },
6457 )
6458 }
6459
6460 pub fn delete_branch(
6461 &mut self,
6462 is_remote: bool,
6463 branch_name: String,
6464 ) -> oneshot::Receiver<Result<()>> {
6465 let id = self.id;
6466 self.send_job(
6467 Some(
6468 format!(
6469 "git branch {} {}",
6470 if is_remote { "-dr" } else { "-d" },
6471 branch_name
6472 )
6473 .into(),
6474 ),
6475 move |repo, _cx| async move {
6476 match repo {
6477 RepositoryState::Local(state) => {
6478 state.backend.delete_branch(is_remote, branch_name).await
6479 }
6480 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6481 client
6482 .request(proto::GitDeleteBranch {
6483 project_id: project_id.0,
6484 repository_id: id.to_proto(),
6485 is_remote,
6486 branch_name,
6487 })
6488 .await?;
6489
6490 Ok(())
6491 }
6492 }
6493 },
6494 )
6495 }
6496
6497 pub fn rename_branch(
6498 &mut self,
6499 branch: String,
6500 new_name: String,
6501 ) -> oneshot::Receiver<Result<()>> {
6502 let id = self.id;
6503 self.send_job(
6504 Some(format!("git branch -m {branch} {new_name}").into()),
6505 move |repo, _cx| async move {
6506 match repo {
6507 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6508 backend.rename_branch(branch, new_name).await
6509 }
6510 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6511 client
6512 .request(proto::GitRenameBranch {
6513 project_id: project_id.0,
6514 repository_id: id.to_proto(),
6515 branch,
6516 new_name,
6517 })
6518 .await?;
6519
6520 Ok(())
6521 }
6522 }
6523 },
6524 )
6525 }
6526
6527 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6528 let id = self.id;
6529 self.send_job(None, move |repo, _cx| async move {
6530 match repo {
6531 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6532 backend.check_for_pushed_commit().await
6533 }
6534 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6535 let response = client
6536 .request(proto::CheckForPushedCommits {
6537 project_id: project_id.0,
6538 repository_id: id.to_proto(),
6539 })
6540 .await?;
6541
6542 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6543
6544 Ok(branches)
6545 }
6546 }
6547 })
6548 }
6549
6550 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6551 let id = self.id;
6552 self.send_job(None, move |repo, _cx| async move {
6553 match repo {
6554 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6555 backend.checkpoint().await
6556 }
6557 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6558 let response = client
6559 .request(proto::GitCreateCheckpoint {
6560 project_id: project_id.0,
6561 repository_id: id.to_proto(),
6562 })
6563 .await?;
6564
6565 Ok(GitRepositoryCheckpoint {
6566 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6567 })
6568 }
6569 }
6570 })
6571 }
6572
6573 pub fn restore_checkpoint(
6574 &mut self,
6575 checkpoint: GitRepositoryCheckpoint,
6576 ) -> oneshot::Receiver<Result<()>> {
6577 let id = self.id;
6578 self.send_job(None, move |repo, _cx| async move {
6579 match repo {
6580 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6581 backend.restore_checkpoint(checkpoint).await
6582 }
6583 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6584 client
6585 .request(proto::GitRestoreCheckpoint {
6586 project_id: project_id.0,
6587 repository_id: id.to_proto(),
6588 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6589 })
6590 .await?;
6591 Ok(())
6592 }
6593 }
6594 })
6595 }
6596
6597 pub(crate) fn apply_remote_update(
6598 &mut self,
6599 update: proto::UpdateRepository,
6600 cx: &mut Context<Self>,
6601 ) -> Result<()> {
6602 if let Some(main_path) = &update.original_repo_abs_path {
6603 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6604 }
6605
6606 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6607 let new_head_commit = update
6608 .head_commit_details
6609 .as_ref()
6610 .map(proto_to_commit_details);
6611 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6612 cx.emit(RepositoryEvent::HeadChanged)
6613 }
6614 self.snapshot.branch = new_branch;
6615 self.snapshot.head_commit = new_head_commit;
6616
6617 // We don't store any merge head state for downstream projects; the upstream
6618 // will track it and we will just get the updated conflicts
6619 let new_merge_heads = TreeMap::from_ordered_entries(
6620 update
6621 .current_merge_conflicts
6622 .into_iter()
6623 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6624 );
6625 let conflicts_changed =
6626 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6627 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6628 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6629 let new_stash_entries = GitStash {
6630 entries: update
6631 .stash_entries
6632 .iter()
6633 .filter_map(|entry| proto_to_stash(entry).ok())
6634 .collect(),
6635 };
6636 if self.snapshot.stash_entries != new_stash_entries {
6637 cx.emit(RepositoryEvent::StashEntriesChanged)
6638 }
6639 self.snapshot.stash_entries = new_stash_entries;
6640 let new_linked_worktrees: Arc<[GitWorktree]> = update
6641 .linked_worktrees
6642 .iter()
6643 .map(proto_to_worktree)
6644 .collect();
6645 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6646 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6647 }
6648 self.snapshot.linked_worktrees = new_linked_worktrees;
6649 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6650 self.snapshot.remote_origin_url = update.remote_origin_url;
6651
6652 let edits = update
6653 .removed_statuses
6654 .into_iter()
6655 .filter_map(|path| {
6656 Some(sum_tree::Edit::Remove(PathKey(
6657 RelPath::from_proto(&path).log_err()?,
6658 )))
6659 })
6660 .chain(
6661 update
6662 .updated_statuses
6663 .into_iter()
6664 .filter_map(|updated_status| {
6665 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6666 }),
6667 )
6668 .collect::<Vec<_>>();
6669 if conflicts_changed || !edits.is_empty() {
6670 cx.emit(RepositoryEvent::StatusesChanged);
6671 }
6672 self.snapshot.statuses_by_path.edit(edits, ());
6673
6674 if update.is_last_update {
6675 self.snapshot.scan_id = update.scan_id;
6676 }
6677 self.clear_pending_ops(cx);
6678 Ok(())
6679 }
6680
6681 pub fn compare_checkpoints(
6682 &mut self,
6683 left: GitRepositoryCheckpoint,
6684 right: GitRepositoryCheckpoint,
6685 ) -> oneshot::Receiver<Result<bool>> {
6686 let id = self.id;
6687 self.send_job(None, move |repo, _cx| async move {
6688 match repo {
6689 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6690 backend.compare_checkpoints(left, right).await
6691 }
6692 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6693 let response = client
6694 .request(proto::GitCompareCheckpoints {
6695 project_id: project_id.0,
6696 repository_id: id.to_proto(),
6697 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6698 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6699 })
6700 .await?;
6701 Ok(response.equal)
6702 }
6703 }
6704 })
6705 }
6706
6707 pub fn diff_checkpoints(
6708 &mut self,
6709 base_checkpoint: GitRepositoryCheckpoint,
6710 target_checkpoint: GitRepositoryCheckpoint,
6711 ) -> oneshot::Receiver<Result<String>> {
6712 let id = self.id;
6713 self.send_job(None, move |repo, _cx| async move {
6714 match repo {
6715 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6716 backend
6717 .diff_checkpoints(base_checkpoint, target_checkpoint)
6718 .await
6719 }
6720 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6721 let response = client
6722 .request(proto::GitDiffCheckpoints {
6723 project_id: project_id.0,
6724 repository_id: id.to_proto(),
6725 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6726 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6727 })
6728 .await?;
6729 Ok(response.diff)
6730 }
6731 }
6732 })
6733 }
6734
6735 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6736 let updated = SumTree::from_iter(
6737 self.pending_ops.iter().filter_map(|ops| {
6738 let inner_ops: Vec<PendingOp> =
6739 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6740 if inner_ops.is_empty() {
6741 None
6742 } else {
6743 Some(PendingOps {
6744 repo_path: ops.repo_path.clone(),
6745 ops: inner_ops,
6746 })
6747 }
6748 }),
6749 (),
6750 );
6751
6752 if updated != self.pending_ops {
6753 cx.emit(RepositoryEvent::PendingOpsChanged {
6754 pending_ops: self.pending_ops.clone(),
6755 })
6756 }
6757
6758 self.pending_ops = updated;
6759 }
6760
6761 fn schedule_scan(
6762 &mut self,
6763 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6764 cx: &mut Context<Self>,
6765 ) {
6766 let this = cx.weak_entity();
6767 let _ = self.send_keyed_job(
6768 Some(GitJobKey::ReloadGitState),
6769 None,
6770 |state, mut cx| async move {
6771 log::debug!("run scheduled git status scan");
6772
6773 let Some(this) = this.upgrade() else {
6774 return Ok(());
6775 };
6776 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6777 bail!("not a local repository")
6778 };
6779 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6780 this.update(&mut cx, |this, cx| {
6781 this.clear_pending_ops(cx);
6782 });
6783 if let Some(updates_tx) = updates_tx {
6784 updates_tx
6785 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6786 .ok();
6787 }
6788 Ok(())
6789 },
6790 );
6791 }
6792
6793 fn spawn_local_git_worker(
6794 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6795 cx: &mut Context<Self>,
6796 ) -> mpsc::UnboundedSender<GitJob> {
6797 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6798
6799 cx.spawn(async move |_, cx| {
6800 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6801 if let Some(git_hosting_provider_registry) =
6802 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6803 {
6804 git_hosting_providers::register_additional_providers(
6805 git_hosting_provider_registry,
6806 state.backend.clone(),
6807 )
6808 .await;
6809 }
6810 let state = RepositoryState::Local(state);
6811 let mut jobs = VecDeque::new();
6812 loop {
6813 while let Ok(next_job) = job_rx.try_recv() {
6814 jobs.push_back(next_job);
6815 }
6816
6817 if let Some(job) = jobs.pop_front() {
6818 if let Some(current_key) = &job.key
6819 && jobs
6820 .iter()
6821 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6822 {
6823 continue;
6824 }
6825 (job.job)(state.clone(), cx).await;
6826 } else if let Some(job) = job_rx.next().await {
6827 jobs.push_back(job);
6828 } else {
6829 break;
6830 }
6831 }
6832 anyhow::Ok(())
6833 })
6834 .detach_and_log_err(cx);
6835
6836 job_tx
6837 }
6838
6839 fn spawn_remote_git_worker(
6840 state: RemoteRepositoryState,
6841 cx: &mut Context<Self>,
6842 ) -> mpsc::UnboundedSender<GitJob> {
6843 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6844
6845 cx.spawn(async move |_, cx| {
6846 let state = RepositoryState::Remote(state);
6847 let mut jobs = VecDeque::new();
6848 loop {
6849 while let Ok(next_job) = job_rx.try_recv() {
6850 jobs.push_back(next_job);
6851 }
6852
6853 if let Some(job) = jobs.pop_front() {
6854 if let Some(current_key) = &job.key
6855 && jobs
6856 .iter()
6857 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6858 {
6859 continue;
6860 }
6861 (job.job)(state.clone(), cx).await;
6862 } else if let Some(job) = job_rx.next().await {
6863 jobs.push_back(job);
6864 } else {
6865 break;
6866 }
6867 }
6868 anyhow::Ok(())
6869 })
6870 .detach_and_log_err(cx);
6871
6872 job_tx
6873 }
6874
6875 fn load_staged_text(
6876 &mut self,
6877 buffer_id: BufferId,
6878 repo_path: RepoPath,
6879 cx: &App,
6880 ) -> Task<Result<Option<String>>> {
6881 let rx = self.send_job(None, move |state, _| async move {
6882 match state {
6883 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6884 anyhow::Ok(backend.load_index_text(repo_path).await)
6885 }
6886 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6887 let response = client
6888 .request(proto::OpenUnstagedDiff {
6889 project_id: project_id.to_proto(),
6890 buffer_id: buffer_id.to_proto(),
6891 })
6892 .await?;
6893 Ok(response.staged_text)
6894 }
6895 }
6896 });
6897 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6898 }
6899
6900 fn load_committed_text(
6901 &mut self,
6902 buffer_id: BufferId,
6903 repo_path: RepoPath,
6904 cx: &App,
6905 ) -> Task<Result<DiffBasesChange>> {
6906 let rx = self.send_job(None, move |state, _| async move {
6907 match state {
6908 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6909 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6910 let staged_text = backend.load_index_text(repo_path).await;
6911 let diff_bases_change = if committed_text == staged_text {
6912 DiffBasesChange::SetBoth(committed_text)
6913 } else {
6914 DiffBasesChange::SetEach {
6915 index: staged_text,
6916 head: committed_text,
6917 }
6918 };
6919 anyhow::Ok(diff_bases_change)
6920 }
6921 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6922 use proto::open_uncommitted_diff_response::Mode;
6923
6924 let response = client
6925 .request(proto::OpenUncommittedDiff {
6926 project_id: project_id.to_proto(),
6927 buffer_id: buffer_id.to_proto(),
6928 })
6929 .await?;
6930 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6931 let bases = match mode {
6932 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6933 Mode::IndexAndHead => DiffBasesChange::SetEach {
6934 head: response.committed_text,
6935 index: response.staged_text,
6936 },
6937 };
6938 Ok(bases)
6939 }
6940 }
6941 });
6942
6943 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6944 }
6945
6946 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6947 let repository_id = self.snapshot.id;
6948 let rx = self.send_job(None, move |state, _| async move {
6949 match state {
6950 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6951 backend.load_blob_content(oid).await
6952 }
6953 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6954 let response = client
6955 .request(proto::GetBlobContent {
6956 project_id: project_id.to_proto(),
6957 repository_id: repository_id.0,
6958 oid: oid.to_string(),
6959 })
6960 .await?;
6961 Ok(response.content)
6962 }
6963 }
6964 });
6965 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6966 }
6967
6968 fn paths_changed(
6969 &mut self,
6970 paths: Vec<RepoPath>,
6971 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6972 cx: &mut Context<Self>,
6973 ) {
6974 if !paths.is_empty() {
6975 self.paths_needing_status_update.push(paths);
6976 }
6977
6978 let this = cx.weak_entity();
6979 let _ = self.send_keyed_job(
6980 Some(GitJobKey::RefreshStatuses),
6981 None,
6982 |state, mut cx| async move {
6983 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6984 (
6985 this.snapshot.clone(),
6986 mem::take(&mut this.paths_needing_status_update),
6987 )
6988 })?;
6989 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6990 bail!("not a local repository")
6991 };
6992
6993 if changed_paths.is_empty() {
6994 return Ok(());
6995 }
6996
6997 let has_head = prev_snapshot.head_commit.is_some();
6998
6999 let stash_entries = backend.stash_entries().await?;
7000 let changed_path_statuses = cx
7001 .background_spawn(async move {
7002 let mut changed_paths =
7003 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
7004 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
7005
7006 let status_task = backend.status(&changed_paths_vec);
7007 let diff_stat_future = if has_head {
7008 backend.diff_stat(&changed_paths_vec)
7009 } else {
7010 future::ready(Ok(status::GitDiffStat {
7011 entries: Arc::default(),
7012 }))
7013 .boxed()
7014 };
7015
7016 let (statuses, diff_stats) =
7017 futures::future::try_join(status_task, diff_stat_future).await?;
7018
7019 let diff_stats: HashMap<RepoPath, DiffStat> =
7020 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
7021
7022 let mut changed_path_statuses = Vec::new();
7023 let prev_statuses = prev_snapshot.statuses_by_path.clone();
7024 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7025
7026 for (repo_path, status) in &*statuses.entries {
7027 let current_diff_stat = diff_stats.get(repo_path).copied();
7028
7029 changed_paths.remove(repo_path);
7030 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
7031 && cursor.item().is_some_and(|entry| {
7032 entry.status == *status && entry.diff_stat == current_diff_stat
7033 })
7034 {
7035 continue;
7036 }
7037
7038 changed_path_statuses.push(Edit::Insert(StatusEntry {
7039 repo_path: repo_path.clone(),
7040 status: *status,
7041 diff_stat: current_diff_stat,
7042 }));
7043 }
7044 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7045 for path in changed_paths.into_iter() {
7046 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
7047 changed_path_statuses
7048 .push(Edit::Remove(PathKey(path.as_ref().clone())));
7049 }
7050 }
7051 anyhow::Ok(changed_path_statuses)
7052 })
7053 .await?;
7054
7055 this.update(&mut cx, |this, cx| {
7056 if this.snapshot.stash_entries != stash_entries {
7057 cx.emit(RepositoryEvent::StashEntriesChanged);
7058 this.snapshot.stash_entries = stash_entries;
7059 }
7060
7061 if !changed_path_statuses.is_empty() {
7062 cx.emit(RepositoryEvent::StatusesChanged);
7063 this.snapshot
7064 .statuses_by_path
7065 .edit(changed_path_statuses, ());
7066 this.snapshot.scan_id += 1;
7067 }
7068
7069 if let Some(updates_tx) = updates_tx {
7070 updates_tx
7071 .unbounded_send(DownstreamUpdate::UpdateRepository(
7072 this.snapshot.clone(),
7073 ))
7074 .ok();
7075 }
7076 })
7077 },
7078 );
7079 }
7080
7081 /// currently running git command and when it started
7082 pub fn current_job(&self) -> Option<JobInfo> {
7083 self.active_jobs.values().next().cloned()
7084 }
7085
7086 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7087 self.send_job(None, |_, _| async {})
7088 }
7089
7090 fn spawn_job_with_tracking<AsyncFn>(
7091 &mut self,
7092 paths: Vec<RepoPath>,
7093 git_status: pending_op::GitStatus,
7094 cx: &mut Context<Self>,
7095 f: AsyncFn,
7096 ) -> Task<Result<()>>
7097 where
7098 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7099 {
7100 let ids = self.new_pending_ops_for_paths(paths, git_status);
7101
7102 cx.spawn(async move |this, cx| {
7103 let (job_status, result) = match f(this.clone(), cx).await {
7104 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7105 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7106 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7107 };
7108
7109 this.update(cx, |this, _| {
7110 let mut edits = Vec::with_capacity(ids.len());
7111 for (id, entry) in ids {
7112 if let Some(mut ops) = this
7113 .pending_ops
7114 .get(&PathKey(entry.as_ref().clone()), ())
7115 .cloned()
7116 {
7117 if let Some(op) = ops.op_by_id_mut(id) {
7118 op.job_status = job_status;
7119 }
7120 edits.push(sum_tree::Edit::Insert(ops));
7121 }
7122 }
7123 this.pending_ops.edit(edits, ());
7124 })?;
7125
7126 result
7127 })
7128 }
7129
7130 fn new_pending_ops_for_paths(
7131 &mut self,
7132 paths: Vec<RepoPath>,
7133 git_status: pending_op::GitStatus,
7134 ) -> Vec<(PendingOpId, RepoPath)> {
7135 let mut edits = Vec::with_capacity(paths.len());
7136 let mut ids = Vec::with_capacity(paths.len());
7137 for path in paths {
7138 let mut ops = self
7139 .pending_ops
7140 .get(&PathKey(path.as_ref().clone()), ())
7141 .cloned()
7142 .unwrap_or_else(|| PendingOps::new(&path));
7143 let id = ops.max_id() + 1;
7144 ops.ops.push(PendingOp {
7145 id,
7146 git_status,
7147 job_status: pending_op::JobStatus::Running,
7148 });
7149 edits.push(sum_tree::Edit::Insert(ops));
7150 ids.push((id, path));
7151 }
7152 self.pending_ops.edit(edits, ());
7153 ids
7154 }
7155 pub fn default_remote_url(&self) -> Option<String> {
7156 self.remote_upstream_url
7157 .clone()
7158 .or(self.remote_origin_url.clone())
7159 }
7160}
7161
7162/// If `path` is a git linked worktree checkout, resolves it to the main
7163/// repository's working directory path. Returns `None` if `path` is a normal
7164/// repository, not a git repo, or if resolution fails.
7165///
7166/// Resolution works by:
7167/// 1. Reading the `.git` file to get the `gitdir:` pointer
7168/// 2. Following that to the worktree-specific git directory
7169/// 3. Reading the `commondir` file to find the shared `.git` directory
7170/// 4. Deriving the main repo's working directory from the common dir
7171pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7172 let dot_git = path.join(".git");
7173 let metadata = fs.metadata(&dot_git).await.ok()??;
7174 if metadata.is_dir {
7175 return None; // Normal repo, not a linked worktree
7176 }
7177 // It's a .git file — parse the gitdir: pointer
7178 let content = fs.load(&dot_git).await.ok()?;
7179 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7180 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7181 // Read commondir to find the main .git directory
7182 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7183 let common_dir = fs
7184 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7185 .await
7186 .ok()?;
7187 git::repository::original_repo_path_from_common_dir(&common_dir)
7188}
7189
7190/// Validates that the resolved worktree directory is acceptable:
7191/// - The setting must not be an absolute path.
7192/// - The resolved path must be either a subdirectory of the working
7193/// directory or a subdirectory of its parent (i.e., a sibling).
7194///
7195/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7196pub fn worktrees_directory_for_repo(
7197 original_repo_abs_path: &Path,
7198 worktree_directory_setting: &str,
7199) -> Result<PathBuf> {
7200 // Check the original setting before trimming, since a path like "///"
7201 // is absolute but becomes "" after stripping trailing separators.
7202 // Also check for leading `/` or `\` explicitly, because on Windows
7203 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7204 // would slip through even though it's clearly not a relative path.
7205 if Path::new(worktree_directory_setting).is_absolute()
7206 || worktree_directory_setting.starts_with('/')
7207 || worktree_directory_setting.starts_with('\\')
7208 {
7209 anyhow::bail!(
7210 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7211 );
7212 }
7213
7214 if worktree_directory_setting.is_empty() {
7215 anyhow::bail!("git.worktree_directory must not be empty");
7216 }
7217
7218 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7219 if trimmed == ".." {
7220 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7221 }
7222
7223 let joined = original_repo_abs_path.join(trimmed);
7224 let resolved = util::normalize_path(&joined);
7225 let resolved = if resolved.starts_with(original_repo_abs_path) {
7226 resolved
7227 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7228 resolved.join(repo_dir_name)
7229 } else {
7230 resolved
7231 };
7232
7233 let parent = original_repo_abs_path
7234 .parent()
7235 .unwrap_or(original_repo_abs_path);
7236
7237 if !resolved.starts_with(parent) {
7238 anyhow::bail!(
7239 "git.worktree_directory resolved to {resolved:?}, which is outside \
7240 the project root and its parent directory. It must resolve to a \
7241 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7242 );
7243 }
7244
7245 Ok(resolved)
7246}
7247
7248/// Returns a short name for a linked worktree suitable for UI display
7249///
7250/// Uses the main worktree path to come up with a short name that disambiguates
7251/// the linked worktree from the main worktree.
7252pub fn linked_worktree_short_name(
7253 main_worktree_path: &Path,
7254 linked_worktree_path: &Path,
7255) -> Option<SharedString> {
7256 if main_worktree_path == linked_worktree_path {
7257 return None;
7258 }
7259
7260 let project_name = main_worktree_path.file_name()?.to_str()?;
7261 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7262 let name = if directory_name != project_name {
7263 directory_name.to_string()
7264 } else {
7265 linked_worktree_path
7266 .parent()?
7267 .file_name()?
7268 .to_str()?
7269 .to_string()
7270 };
7271 Some(name.into())
7272}
7273
7274fn get_permalink_in_rust_registry_src(
7275 provider_registry: Arc<GitHostingProviderRegistry>,
7276 path: PathBuf,
7277 selection: Range<u32>,
7278) -> Result<url::Url> {
7279 #[derive(Deserialize)]
7280 struct CargoVcsGit {
7281 sha1: String,
7282 }
7283
7284 #[derive(Deserialize)]
7285 struct CargoVcsInfo {
7286 git: CargoVcsGit,
7287 path_in_vcs: String,
7288 }
7289
7290 #[derive(Deserialize)]
7291 struct CargoPackage {
7292 repository: String,
7293 }
7294
7295 #[derive(Deserialize)]
7296 struct CargoToml {
7297 package: CargoPackage,
7298 }
7299
7300 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7301 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7302 Some((dir, json))
7303 }) else {
7304 bail!("No .cargo_vcs_info.json found in parent directories")
7305 };
7306 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7307 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7308 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7309 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7310 .context("parsing package.repository field of manifest")?;
7311 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7312 let permalink = provider.build_permalink(
7313 remote,
7314 BuildPermalinkParams::new(
7315 &cargo_vcs_info.git.sha1,
7316 &RepoPath::from_rel_path(
7317 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7318 ),
7319 Some(selection),
7320 ),
7321 );
7322 Ok(permalink)
7323}
7324
7325fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7326 let Some(blame) = blame else {
7327 return proto::BlameBufferResponse {
7328 blame_response: None,
7329 };
7330 };
7331
7332 let entries = blame
7333 .entries
7334 .into_iter()
7335 .map(|entry| proto::BlameEntry {
7336 sha: entry.sha.as_bytes().into(),
7337 start_line: entry.range.start,
7338 end_line: entry.range.end,
7339 original_line_number: entry.original_line_number,
7340 author: entry.author,
7341 author_mail: entry.author_mail,
7342 author_time: entry.author_time,
7343 author_tz: entry.author_tz,
7344 committer: entry.committer_name,
7345 committer_mail: entry.committer_email,
7346 committer_time: entry.committer_time,
7347 committer_tz: entry.committer_tz,
7348 summary: entry.summary,
7349 previous: entry.previous,
7350 filename: entry.filename,
7351 })
7352 .collect::<Vec<_>>();
7353
7354 let messages = blame
7355 .messages
7356 .into_iter()
7357 .map(|(oid, message)| proto::CommitMessage {
7358 oid: oid.as_bytes().into(),
7359 message,
7360 })
7361 .collect::<Vec<_>>();
7362
7363 proto::BlameBufferResponse {
7364 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7365 }
7366}
7367
7368fn deserialize_blame_buffer_response(
7369 response: proto::BlameBufferResponse,
7370) -> Option<git::blame::Blame> {
7371 let response = response.blame_response?;
7372 let entries = response
7373 .entries
7374 .into_iter()
7375 .filter_map(|entry| {
7376 Some(git::blame::BlameEntry {
7377 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7378 range: entry.start_line..entry.end_line,
7379 original_line_number: entry.original_line_number,
7380 committer_name: entry.committer,
7381 committer_time: entry.committer_time,
7382 committer_tz: entry.committer_tz,
7383 committer_email: entry.committer_mail,
7384 author: entry.author,
7385 author_mail: entry.author_mail,
7386 author_time: entry.author_time,
7387 author_tz: entry.author_tz,
7388 summary: entry.summary,
7389 previous: entry.previous,
7390 filename: entry.filename,
7391 })
7392 })
7393 .collect::<Vec<_>>();
7394
7395 let messages = response
7396 .messages
7397 .into_iter()
7398 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7399 .collect::<HashMap<_, _>>();
7400
7401 Some(Blame { entries, messages })
7402}
7403
7404fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7405 proto::Branch {
7406 is_head: branch.is_head,
7407 ref_name: branch.ref_name.to_string(),
7408 unix_timestamp: branch
7409 .most_recent_commit
7410 .as_ref()
7411 .map(|commit| commit.commit_timestamp as u64),
7412 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7413 ref_name: upstream.ref_name.to_string(),
7414 tracking: upstream
7415 .tracking
7416 .status()
7417 .map(|upstream| proto::UpstreamTracking {
7418 ahead: upstream.ahead as u64,
7419 behind: upstream.behind as u64,
7420 }),
7421 }),
7422 most_recent_commit: branch
7423 .most_recent_commit
7424 .as_ref()
7425 .map(|commit| proto::CommitSummary {
7426 sha: commit.sha.to_string(),
7427 subject: commit.subject.to_string(),
7428 commit_timestamp: commit.commit_timestamp,
7429 author_name: commit.author_name.to_string(),
7430 }),
7431 }
7432}
7433
7434fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7435 proto::Worktree {
7436 path: worktree.path.to_string_lossy().to_string(),
7437 ref_name: worktree
7438 .ref_name
7439 .as_ref()
7440 .map(|s| s.to_string())
7441 .unwrap_or_default(),
7442 sha: worktree.sha.to_string(),
7443 is_main: worktree.is_main,
7444 is_bare: worktree.is_bare,
7445 }
7446}
7447
7448fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7449 git::repository::Worktree {
7450 path: PathBuf::from(proto.path.clone()),
7451 ref_name: if proto.ref_name.is_empty() {
7452 None
7453 } else {
7454 Some(SharedString::from(&proto.ref_name))
7455 },
7456 sha: proto.sha.clone().into(),
7457 is_main: proto.is_main,
7458 is_bare: proto.is_bare,
7459 }
7460}
7461
7462fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7463 git::repository::Branch {
7464 is_head: proto.is_head,
7465 ref_name: proto.ref_name.clone().into(),
7466 upstream: proto
7467 .upstream
7468 .as_ref()
7469 .map(|upstream| git::repository::Upstream {
7470 ref_name: upstream.ref_name.to_string().into(),
7471 tracking: upstream
7472 .tracking
7473 .as_ref()
7474 .map(|tracking| {
7475 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7476 ahead: tracking.ahead as u32,
7477 behind: tracking.behind as u32,
7478 })
7479 })
7480 .unwrap_or(git::repository::UpstreamTracking::Gone),
7481 }),
7482 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7483 git::repository::CommitSummary {
7484 sha: commit.sha.to_string().into(),
7485 subject: commit.subject.to_string().into(),
7486 commit_timestamp: commit.commit_timestamp,
7487 author_name: commit.author_name.to_string().into(),
7488 has_parent: true,
7489 }
7490 }),
7491 }
7492}
7493
7494fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7495 proto::GitCommitDetails {
7496 sha: commit.sha.to_string(),
7497 message: commit.message.to_string(),
7498 commit_timestamp: commit.commit_timestamp,
7499 author_email: commit.author_email.to_string(),
7500 author_name: commit.author_name.to_string(),
7501 }
7502}
7503
7504fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7505 CommitDetails {
7506 sha: proto.sha.clone().into(),
7507 message: proto.message.clone().into(),
7508 commit_timestamp: proto.commit_timestamp,
7509 author_email: proto.author_email.clone().into(),
7510 author_name: proto.author_name.clone().into(),
7511 }
7512}
7513
7514/// This snapshot computes the repository state on the foreground thread while
7515/// running the git commands on the background thread. We update branch, head,
7516/// remotes, and worktrees first so the UI can react sooner, then compute file
7517/// state and emit those events immediately after.
7518async fn compute_snapshot(
7519 this: Entity<Repository>,
7520 backend: Arc<dyn GitRepository>,
7521 cx: &mut AsyncApp,
7522) -> Result<RepositorySnapshot> {
7523 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7524 this.paths_needing_status_update.clear();
7525 (
7526 this.id,
7527 this.work_directory_abs_path.clone(),
7528 this.snapshot.clone(),
7529 )
7530 });
7531
7532 let head_commit_future = {
7533 let backend = backend.clone();
7534 async move {
7535 Ok(match backend.head_sha().await {
7536 Some(head_sha) => backend.show(head_sha).await.log_err(),
7537 None => None,
7538 })
7539 }
7540 };
7541 let (branches, head_commit, all_worktrees) = cx
7542 .background_spawn({
7543 let backend = backend.clone();
7544 async move {
7545 futures::future::try_join3(
7546 backend.branches(),
7547 head_commit_future,
7548 backend.worktrees(),
7549 )
7550 .await
7551 }
7552 })
7553 .await?;
7554 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7555 let branch_list: Arc<[Branch]> = branches.into();
7556
7557 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7558 .into_iter()
7559 .filter(|wt| wt.path != *work_directory_abs_path)
7560 .collect();
7561
7562 let (remote_origin_url, remote_upstream_url) = cx
7563 .background_spawn({
7564 let backend = backend.clone();
7565 async move {
7566 Ok::<_, anyhow::Error>(
7567 futures::future::join(
7568 backend.remote_url("origin"),
7569 backend.remote_url("upstream"),
7570 )
7571 .await,
7572 )
7573 }
7574 })
7575 .await?;
7576
7577 let snapshot = this.update(cx, |this, cx| {
7578 let head_changed =
7579 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7580 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7581 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7582
7583 this.snapshot = RepositorySnapshot {
7584 id,
7585 work_directory_abs_path,
7586 branch,
7587 branch_list: branch_list.clone(),
7588 head_commit,
7589 remote_origin_url,
7590 remote_upstream_url,
7591 linked_worktrees,
7592 scan_id: prev_snapshot.scan_id + 1,
7593 ..prev_snapshot
7594 };
7595
7596 if head_changed {
7597 cx.emit(RepositoryEvent::HeadChanged);
7598 }
7599
7600 if branch_list_changed {
7601 cx.emit(RepositoryEvent::BranchListChanged);
7602 }
7603
7604 if worktrees_changed {
7605 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7606 }
7607
7608 this.snapshot.clone()
7609 });
7610
7611 let (statuses, diff_stats, stash_entries) = cx
7612 .background_spawn({
7613 let backend = backend.clone();
7614 let snapshot = snapshot.clone();
7615 async move {
7616 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7617 if snapshot.head_commit.is_some() {
7618 backend.diff_stat(&[])
7619 } else {
7620 future::ready(Ok(status::GitDiffStat {
7621 entries: Arc::default(),
7622 }))
7623 .boxed()
7624 };
7625 futures::future::try_join3(
7626 backend.status(&[RepoPath::from_rel_path(
7627 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7628 )]),
7629 diff_stat_future,
7630 backend.stash_entries(),
7631 )
7632 .await
7633 }
7634 })
7635 .await?;
7636
7637 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7638 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7639 let mut conflicted_paths = Vec::new();
7640 let statuses_by_path = SumTree::from_iter(
7641 statuses.entries.iter().map(|(repo_path, status)| {
7642 if status.is_conflicted() {
7643 conflicted_paths.push(repo_path.clone());
7644 }
7645 StatusEntry {
7646 repo_path: repo_path.clone(),
7647 status: *status,
7648 diff_stat: diff_stat_map.get(repo_path).copied(),
7649 }
7650 }),
7651 (),
7652 );
7653
7654 let merge_details = cx
7655 .background_spawn({
7656 let backend = backend.clone();
7657 let mut merge_details = snapshot.merge.clone();
7658 async move {
7659 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7660 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7661 }
7662 })
7663 .await?;
7664 let (merge_details, conflicts_changed) = merge_details;
7665 log::debug!("new merge details: {merge_details:?}");
7666
7667 Ok(this.update(cx, |this, cx| {
7668 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7669 cx.emit(RepositoryEvent::StatusesChanged);
7670 }
7671 if stash_entries != this.snapshot.stash_entries {
7672 cx.emit(RepositoryEvent::StashEntriesChanged);
7673 }
7674
7675 this.snapshot.scan_id += 1;
7676 this.snapshot.merge = merge_details;
7677 this.snapshot.statuses_by_path = statuses_by_path;
7678 this.snapshot.stash_entries = stash_entries;
7679
7680 this.snapshot.clone()
7681 }))
7682}
7683
7684fn status_from_proto(
7685 simple_status: i32,
7686 status: Option<proto::GitFileStatus>,
7687) -> anyhow::Result<FileStatus> {
7688 use proto::git_file_status::Variant;
7689
7690 let Some(variant) = status.and_then(|status| status.variant) else {
7691 let code = proto::GitStatus::from_i32(simple_status)
7692 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7693 let result = match code {
7694 proto::GitStatus::Added => TrackedStatus {
7695 worktree_status: StatusCode::Added,
7696 index_status: StatusCode::Unmodified,
7697 }
7698 .into(),
7699 proto::GitStatus::Modified => TrackedStatus {
7700 worktree_status: StatusCode::Modified,
7701 index_status: StatusCode::Unmodified,
7702 }
7703 .into(),
7704 proto::GitStatus::Conflict => UnmergedStatus {
7705 first_head: UnmergedStatusCode::Updated,
7706 second_head: UnmergedStatusCode::Updated,
7707 }
7708 .into(),
7709 proto::GitStatus::Deleted => TrackedStatus {
7710 worktree_status: StatusCode::Deleted,
7711 index_status: StatusCode::Unmodified,
7712 }
7713 .into(),
7714 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7715 };
7716 return Ok(result);
7717 };
7718
7719 let result = match variant {
7720 Variant::Untracked(_) => FileStatus::Untracked,
7721 Variant::Ignored(_) => FileStatus::Ignored,
7722 Variant::Unmerged(unmerged) => {
7723 let [first_head, second_head] =
7724 [unmerged.first_head, unmerged.second_head].map(|head| {
7725 let code = proto::GitStatus::from_i32(head)
7726 .with_context(|| format!("Invalid git status code: {head}"))?;
7727 let result = match code {
7728 proto::GitStatus::Added => UnmergedStatusCode::Added,
7729 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7730 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7731 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7732 };
7733 Ok(result)
7734 });
7735 let [first_head, second_head] = [first_head?, second_head?];
7736 UnmergedStatus {
7737 first_head,
7738 second_head,
7739 }
7740 .into()
7741 }
7742 Variant::Tracked(tracked) => {
7743 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7744 .map(|status| {
7745 let code = proto::GitStatus::from_i32(status)
7746 .with_context(|| format!("Invalid git status code: {status}"))?;
7747 let result = match code {
7748 proto::GitStatus::Modified => StatusCode::Modified,
7749 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7750 proto::GitStatus::Added => StatusCode::Added,
7751 proto::GitStatus::Deleted => StatusCode::Deleted,
7752 proto::GitStatus::Renamed => StatusCode::Renamed,
7753 proto::GitStatus::Copied => StatusCode::Copied,
7754 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7755 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7756 };
7757 Ok(result)
7758 });
7759 let [index_status, worktree_status] = [index_status?, worktree_status?];
7760 TrackedStatus {
7761 index_status,
7762 worktree_status,
7763 }
7764 .into()
7765 }
7766 };
7767 Ok(result)
7768}
7769
7770fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7771 use proto::git_file_status::{Tracked, Unmerged, Variant};
7772
7773 let variant = match status {
7774 FileStatus::Untracked => Variant::Untracked(Default::default()),
7775 FileStatus::Ignored => Variant::Ignored(Default::default()),
7776 FileStatus::Unmerged(UnmergedStatus {
7777 first_head,
7778 second_head,
7779 }) => Variant::Unmerged(Unmerged {
7780 first_head: unmerged_status_to_proto(first_head),
7781 second_head: unmerged_status_to_proto(second_head),
7782 }),
7783 FileStatus::Tracked(TrackedStatus {
7784 index_status,
7785 worktree_status,
7786 }) => Variant::Tracked(Tracked {
7787 index_status: tracked_status_to_proto(index_status),
7788 worktree_status: tracked_status_to_proto(worktree_status),
7789 }),
7790 };
7791 proto::GitFileStatus {
7792 variant: Some(variant),
7793 }
7794}
7795
7796fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7797 match code {
7798 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7799 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7800 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7801 }
7802}
7803
7804fn tracked_status_to_proto(code: StatusCode) -> i32 {
7805 match code {
7806 StatusCode::Added => proto::GitStatus::Added as _,
7807 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7808 StatusCode::Modified => proto::GitStatus::Modified as _,
7809 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7810 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7811 StatusCode::Copied => proto::GitStatus::Copied as _,
7812 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7813 }
7814}