1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub branch_list: Arc<[Branch]>,
291 pub head_commit: Option<CommitDetails>,
292 pub scan_id: u64,
293 pub merge: MergeDetails,
294 pub remote_origin_url: Option<String>,
295 pub remote_upstream_url: Option<String>,
296 pub stash_entries: GitStash,
297 pub linked_worktrees: Arc<[GitWorktree]>,
298}
299
300type JobId = u64;
301
302#[derive(Clone, Debug, PartialEq, Eq)]
303pub struct JobInfo {
304 pub start: Instant,
305 pub message: SharedString,
306}
307
308struct GraphCommitDataHandler {
309 _task: Task<()>,
310 commit_data_request: smol::channel::Sender<Oid>,
311}
312
313enum GraphCommitHandlerState {
314 Starting,
315 Open(GraphCommitDataHandler),
316 Closed,
317}
318
319pub struct InitialGitGraphData {
320 fetch_task: Task<()>,
321 pub error: Option<SharedString>,
322 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
323 pub commit_oid_to_index: HashMap<Oid, usize>,
324}
325
326pub struct GraphDataResponse<'a> {
327 pub commits: &'a [Arc<InitialGraphCommitData>],
328 pub is_loading: bool,
329 pub error: Option<SharedString>,
330}
331
332pub struct Repository {
333 this: WeakEntity<Self>,
334 snapshot: RepositorySnapshot,
335 commit_message_buffer: Option<Entity<Buffer>>,
336 git_store: WeakEntity<GitStore>,
337 // For a local repository, holds paths that have had worktree events since the last status scan completed,
338 // and that should be examined during the next status scan.
339 paths_needing_status_update: Vec<Vec<RepoPath>>,
340 job_sender: mpsc::UnboundedSender<GitJob>,
341 active_jobs: HashMap<JobId, JobInfo>,
342 pending_ops: SumTree<PendingOps>,
343 job_id: JobId,
344 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
345 latest_askpass_id: u64,
346 repository_state: Shared<Task<Result<RepositoryState, String>>>,
347 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
348 graph_commit_data_handler: GraphCommitHandlerState,
349 commit_data: HashMap<Oid, CommitDataState>,
350}
351
352impl std::ops::Deref for Repository {
353 type Target = RepositorySnapshot;
354
355 fn deref(&self) -> &Self::Target {
356 &self.snapshot
357 }
358}
359
360#[derive(Clone)]
361pub struct LocalRepositoryState {
362 pub fs: Arc<dyn Fs>,
363 pub backend: Arc<dyn GitRepository>,
364 pub environment: Arc<HashMap<String, String>>,
365}
366
367impl LocalRepositoryState {
368 async fn new(
369 work_directory_abs_path: Arc<Path>,
370 dot_git_abs_path: Arc<Path>,
371 project_environment: WeakEntity<ProjectEnvironment>,
372 fs: Arc<dyn Fs>,
373 is_trusted: bool,
374 cx: &mut AsyncApp,
375 ) -> anyhow::Result<Self> {
376 let environment = project_environment
377 .update(cx, |project_environment, cx| {
378 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
379 })?
380 .await
381 .unwrap_or_else(|| {
382 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
383 HashMap::default()
384 });
385 let search_paths = environment.get("PATH").map(|val| val.to_owned());
386 let backend = cx
387 .background_spawn({
388 let fs = fs.clone();
389 async move {
390 let system_git_binary_path = search_paths
391 .and_then(|search_paths| {
392 which::which_in("git", Some(search_paths), &work_directory_abs_path)
393 .ok()
394 })
395 .or_else(|| which::which("git").ok());
396 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
397 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
398 }
399 })
400 .await?;
401 backend.set_trusted(is_trusted);
402 Ok(LocalRepositoryState {
403 backend,
404 environment: Arc::new(environment),
405 fs,
406 })
407 }
408}
409
410#[derive(Clone)]
411pub struct RemoteRepositoryState {
412 pub project_id: ProjectId,
413 pub client: AnyProtoClient,
414}
415
416#[derive(Clone)]
417pub enum RepositoryState {
418 Local(LocalRepositoryState),
419 Remote(RemoteRepositoryState),
420}
421
422#[derive(Clone, Debug, PartialEq, Eq)]
423pub enum GitGraphEvent {
424 CountUpdated(usize),
425 FullyLoaded,
426 LoadingError,
427}
428
429#[derive(Clone, Debug, PartialEq, Eq)]
430pub enum RepositoryEvent {
431 StatusesChanged,
432 HeadChanged,
433 BranchListChanged,
434 StashEntriesChanged,
435 GitWorktreeListChanged,
436 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
437 GraphEvent((LogSource, LogOrder), GitGraphEvent),
438}
439
440#[derive(Clone, Debug)]
441pub struct JobsUpdated;
442
443#[derive(Debug)]
444pub enum GitStoreEvent {
445 ActiveRepositoryChanged(Option<RepositoryId>),
446 /// Bool is true when the repository that's updated is the active repository
447 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
448 RepositoryAdded,
449 RepositoryRemoved(RepositoryId),
450 IndexWriteError(anyhow::Error),
451 JobsUpdated,
452 ConflictsUpdated,
453}
454
455impl EventEmitter<RepositoryEvent> for Repository {}
456impl EventEmitter<JobsUpdated> for Repository {}
457impl EventEmitter<GitStoreEvent> for GitStore {}
458
459pub struct GitJob {
460 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
461 key: Option<GitJobKey>,
462}
463
464#[derive(PartialEq, Eq)]
465enum GitJobKey {
466 WriteIndex(Vec<RepoPath>),
467 ReloadBufferDiffBases,
468 RefreshStatuses,
469 ReloadGitState,
470}
471
472impl GitStore {
473 pub fn local(
474 worktree_store: &Entity<WorktreeStore>,
475 buffer_store: Entity<BufferStore>,
476 environment: Entity<ProjectEnvironment>,
477 fs: Arc<dyn Fs>,
478 cx: &mut Context<Self>,
479 ) -> Self {
480 Self::new(
481 worktree_store.clone(),
482 buffer_store,
483 GitStoreState::Local {
484 next_repository_id: Arc::new(AtomicU64::new(1)),
485 downstream: None,
486 project_environment: environment,
487 fs,
488 },
489 cx,
490 )
491 }
492
493 pub fn remote(
494 worktree_store: &Entity<WorktreeStore>,
495 buffer_store: Entity<BufferStore>,
496 upstream_client: AnyProtoClient,
497 project_id: u64,
498 cx: &mut Context<Self>,
499 ) -> Self {
500 Self::new(
501 worktree_store.clone(),
502 buffer_store,
503 GitStoreState::Remote {
504 upstream_client,
505 upstream_project_id: project_id,
506 downstream: None,
507 },
508 cx,
509 )
510 }
511
512 fn new(
513 worktree_store: Entity<WorktreeStore>,
514 buffer_store: Entity<BufferStore>,
515 state: GitStoreState,
516 cx: &mut Context<Self>,
517 ) -> Self {
518 let mut _subscriptions = vec![
519 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
520 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
521 ];
522
523 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
524 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
525 }
526
527 GitStore {
528 state,
529 buffer_store,
530 worktree_store,
531 repositories: HashMap::default(),
532 worktree_ids: HashMap::default(),
533 active_repo_id: None,
534 _subscriptions,
535 loading_diffs: HashMap::default(),
536 shared_diffs: HashMap::default(),
537 diffs: HashMap::default(),
538 }
539 }
540
541 pub fn init(client: &AnyProtoClient) {
542 client.add_entity_request_handler(Self::handle_get_remotes);
543 client.add_entity_request_handler(Self::handle_get_branches);
544 client.add_entity_request_handler(Self::handle_get_default_branch);
545 client.add_entity_request_handler(Self::handle_change_branch);
546 client.add_entity_request_handler(Self::handle_create_branch);
547 client.add_entity_request_handler(Self::handle_rename_branch);
548 client.add_entity_request_handler(Self::handle_create_remote);
549 client.add_entity_request_handler(Self::handle_remove_remote);
550 client.add_entity_request_handler(Self::handle_delete_branch);
551 client.add_entity_request_handler(Self::handle_git_init);
552 client.add_entity_request_handler(Self::handle_push);
553 client.add_entity_request_handler(Self::handle_pull);
554 client.add_entity_request_handler(Self::handle_fetch);
555 client.add_entity_request_handler(Self::handle_stage);
556 client.add_entity_request_handler(Self::handle_unstage);
557 client.add_entity_request_handler(Self::handle_stash);
558 client.add_entity_request_handler(Self::handle_stash_pop);
559 client.add_entity_request_handler(Self::handle_stash_apply);
560 client.add_entity_request_handler(Self::handle_stash_drop);
561 client.add_entity_request_handler(Self::handle_commit);
562 client.add_entity_request_handler(Self::handle_run_hook);
563 client.add_entity_request_handler(Self::handle_reset);
564 client.add_entity_request_handler(Self::handle_show);
565 client.add_entity_request_handler(Self::handle_load_commit_diff);
566 client.add_entity_request_handler(Self::handle_file_history);
567 client.add_entity_request_handler(Self::handle_checkout_files);
568 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
569 client.add_entity_request_handler(Self::handle_set_index_text);
570 client.add_entity_request_handler(Self::handle_askpass);
571 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
572 client.add_entity_request_handler(Self::handle_git_diff);
573 client.add_entity_request_handler(Self::handle_tree_diff);
574 client.add_entity_request_handler(Self::handle_get_blob_content);
575 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
576 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
577 client.add_entity_message_handler(Self::handle_update_diff_bases);
578 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
579 client.add_entity_request_handler(Self::handle_blame_buffer);
580 client.add_entity_message_handler(Self::handle_update_repository);
581 client.add_entity_message_handler(Self::handle_remove_repository);
582 client.add_entity_request_handler(Self::handle_git_clone);
583 client.add_entity_request_handler(Self::handle_get_worktrees);
584 client.add_entity_request_handler(Self::handle_create_worktree);
585 client.add_entity_request_handler(Self::handle_remove_worktree);
586 client.add_entity_request_handler(Self::handle_rename_worktree);
587 }
588
589 pub fn is_local(&self) -> bool {
590 matches!(self.state, GitStoreState::Local { .. })
591 }
592 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
593 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
594 let id = repo.read(cx).id;
595 if self.active_repo_id != Some(id) {
596 self.active_repo_id = Some(id);
597 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
598 }
599 }
600 }
601
602 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
603 match &mut self.state {
604 GitStoreState::Remote {
605 downstream: downstream_client,
606 ..
607 } => {
608 for repo in self.repositories.values() {
609 let update = repo.read(cx).snapshot.initial_update(project_id);
610 for update in split_repository_update(update) {
611 client.send(update).log_err();
612 }
613 }
614 *downstream_client = Some((client, ProjectId(project_id)));
615 }
616 GitStoreState::Local {
617 downstream: downstream_client,
618 ..
619 } => {
620 let mut snapshots = HashMap::default();
621 let (updates_tx, mut updates_rx) = mpsc::unbounded();
622 for repo in self.repositories.values() {
623 updates_tx
624 .unbounded_send(DownstreamUpdate::UpdateRepository(
625 repo.read(cx).snapshot.clone(),
626 ))
627 .ok();
628 }
629 *downstream_client = Some(LocalDownstreamState {
630 client: client.clone(),
631 project_id: ProjectId(project_id),
632 updates_tx,
633 _task: cx.spawn(async move |this, cx| {
634 cx.background_spawn(async move {
635 while let Some(update) = updates_rx.next().await {
636 match update {
637 DownstreamUpdate::UpdateRepository(snapshot) => {
638 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
639 {
640 let update =
641 snapshot.build_update(old_snapshot, project_id);
642 *old_snapshot = snapshot;
643 for update in split_repository_update(update) {
644 client.send(update)?;
645 }
646 } else {
647 let update = snapshot.initial_update(project_id);
648 for update in split_repository_update(update) {
649 client.send(update)?;
650 }
651 snapshots.insert(snapshot.id, snapshot);
652 }
653 }
654 DownstreamUpdate::RemoveRepository(id) => {
655 client.send(proto::RemoveRepository {
656 project_id,
657 id: id.to_proto(),
658 })?;
659 }
660 }
661 }
662 anyhow::Ok(())
663 })
664 .await
665 .ok();
666 this.update(cx, |this, _| {
667 if let GitStoreState::Local {
668 downstream: downstream_client,
669 ..
670 } = &mut this.state
671 {
672 downstream_client.take();
673 } else {
674 unreachable!("unshared called on remote store");
675 }
676 })
677 }),
678 });
679 }
680 }
681 }
682
683 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
684 match &mut self.state {
685 GitStoreState::Local {
686 downstream: downstream_client,
687 ..
688 } => {
689 downstream_client.take();
690 }
691 GitStoreState::Remote {
692 downstream: downstream_client,
693 ..
694 } => {
695 downstream_client.take();
696 }
697 }
698 self.shared_diffs.clear();
699 }
700
701 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
702 self.shared_diffs.remove(peer_id);
703 }
704
705 pub fn active_repository(&self) -> Option<Entity<Repository>> {
706 self.active_repo_id
707 .as_ref()
708 .map(|id| self.repositories[id].clone())
709 }
710
711 pub fn open_unstaged_diff(
712 &mut self,
713 buffer: Entity<Buffer>,
714 cx: &mut Context<Self>,
715 ) -> Task<Result<Entity<BufferDiff>>> {
716 let buffer_id = buffer.read(cx).remote_id();
717 if let Some(diff_state) = self.diffs.get(&buffer_id)
718 && let Some(unstaged_diff) = diff_state
719 .read(cx)
720 .unstaged_diff
721 .as_ref()
722 .and_then(|weak| weak.upgrade())
723 {
724 if let Some(task) =
725 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
726 {
727 return cx.background_executor().spawn(async move {
728 task.await;
729 Ok(unstaged_diff)
730 });
731 }
732 return Task::ready(Ok(unstaged_diff));
733 }
734
735 let Some((repo, repo_path)) =
736 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
737 else {
738 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
739 };
740
741 let task = self
742 .loading_diffs
743 .entry((buffer_id, DiffKind::Unstaged))
744 .or_insert_with(|| {
745 let staged_text = repo.update(cx, |repo, cx| {
746 repo.load_staged_text(buffer_id, repo_path, cx)
747 });
748 cx.spawn(async move |this, cx| {
749 Self::open_diff_internal(
750 this,
751 DiffKind::Unstaged,
752 staged_text.await.map(DiffBasesChange::SetIndex),
753 buffer,
754 cx,
755 )
756 .await
757 .map_err(Arc::new)
758 })
759 .shared()
760 })
761 .clone();
762
763 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
764 }
765
766 pub fn open_diff_since(
767 &mut self,
768 oid: Option<git::Oid>,
769 buffer: Entity<Buffer>,
770 repo: Entity<Repository>,
771 cx: &mut Context<Self>,
772 ) -> Task<Result<Entity<BufferDiff>>> {
773 let buffer_id = buffer.read(cx).remote_id();
774
775 if let Some(diff_state) = self.diffs.get(&buffer_id)
776 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
777 {
778 if let Some(task) =
779 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
780 {
781 return cx.background_executor().spawn(async move {
782 task.await;
783 Ok(oid_diff)
784 });
785 }
786 return Task::ready(Ok(oid_diff));
787 }
788
789 let diff_kind = DiffKind::SinceOid(oid);
790 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
791 let task = task.clone();
792 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
793 }
794
795 let task = cx
796 .spawn(async move |this, cx| {
797 let result: Result<Entity<BufferDiff>> = async {
798 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
799 let language_registry =
800 buffer.update(cx, |buffer, _| buffer.language_registry());
801 let content: Option<Arc<str>> = match oid {
802 None => None,
803 Some(oid) => Some(
804 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
805 .await?
806 .into(),
807 ),
808 };
809 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
810
811 buffer_diff
812 .update(cx, |buffer_diff, cx| {
813 buffer_diff.language_changed(
814 buffer_snapshot.language().cloned(),
815 language_registry,
816 cx,
817 );
818 buffer_diff.set_base_text(
819 content.clone(),
820 buffer_snapshot.language().cloned(),
821 buffer_snapshot.text,
822 cx,
823 )
824 })
825 .await?;
826 let unstaged_diff = this
827 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
828 .await?;
829 buffer_diff.update(cx, |buffer_diff, _| {
830 buffer_diff.set_secondary_diff(unstaged_diff);
831 });
832
833 this.update(cx, |this, cx| {
834 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
835 .detach();
836
837 this.loading_diffs.remove(&(buffer_id, diff_kind));
838
839 let git_store = cx.weak_entity();
840 let diff_state = this
841 .diffs
842 .entry(buffer_id)
843 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
844
845 diff_state.update(cx, |state, _| {
846 if let Some(oid) = oid {
847 if let Some(content) = content {
848 state.oid_texts.insert(oid, content);
849 }
850 }
851 state.oid_diffs.insert(oid, buffer_diff.downgrade());
852 });
853 })?;
854
855 Ok(buffer_diff)
856 }
857 .await;
858 result.map_err(Arc::new)
859 })
860 .shared();
861
862 self.loading_diffs
863 .insert((buffer_id, diff_kind), task.clone());
864 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
865 }
866
867 #[ztracing::instrument(skip_all)]
868 pub fn open_uncommitted_diff(
869 &mut self,
870 buffer: Entity<Buffer>,
871 cx: &mut Context<Self>,
872 ) -> Task<Result<Entity<BufferDiff>>> {
873 let buffer_id = buffer.read(cx).remote_id();
874
875 if let Some(diff_state) = self.diffs.get(&buffer_id)
876 && let Some(uncommitted_diff) = diff_state
877 .read(cx)
878 .uncommitted_diff
879 .as_ref()
880 .and_then(|weak| weak.upgrade())
881 {
882 if let Some(task) =
883 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
884 {
885 return cx.background_executor().spawn(async move {
886 task.await;
887 Ok(uncommitted_diff)
888 });
889 }
890 return Task::ready(Ok(uncommitted_diff));
891 }
892
893 let Some((repo, repo_path)) =
894 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
895 else {
896 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
897 };
898
899 let task = self
900 .loading_diffs
901 .entry((buffer_id, DiffKind::Uncommitted))
902 .or_insert_with(|| {
903 let changes = repo.update(cx, |repo, cx| {
904 repo.load_committed_text(buffer_id, repo_path, cx)
905 });
906
907 // todo(lw): hot foreground spawn
908 cx.spawn(async move |this, cx| {
909 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
910 .await
911 .map_err(Arc::new)
912 })
913 .shared()
914 })
915 .clone();
916
917 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
918 }
919
920 #[ztracing::instrument(skip_all)]
921 async fn open_diff_internal(
922 this: WeakEntity<Self>,
923 kind: DiffKind,
924 texts: Result<DiffBasesChange>,
925 buffer_entity: Entity<Buffer>,
926 cx: &mut AsyncApp,
927 ) -> Result<Entity<BufferDiff>> {
928 let diff_bases_change = match texts {
929 Err(e) => {
930 this.update(cx, |this, cx| {
931 let buffer = buffer_entity.read(cx);
932 let buffer_id = buffer.remote_id();
933 this.loading_diffs.remove(&(buffer_id, kind));
934 })?;
935 return Err(e);
936 }
937 Ok(change) => change,
938 };
939
940 this.update(cx, |this, cx| {
941 let buffer = buffer_entity.read(cx);
942 let buffer_id = buffer.remote_id();
943 let language = buffer.language().cloned();
944 let language_registry = buffer.language_registry();
945 let text_snapshot = buffer.text_snapshot();
946 this.loading_diffs.remove(&(buffer_id, kind));
947
948 let git_store = cx.weak_entity();
949 let diff_state = this
950 .diffs
951 .entry(buffer_id)
952 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
953
954 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
955
956 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
957 diff_state.update(cx, |diff_state, cx| {
958 diff_state.language_changed = true;
959 diff_state.language = language;
960 diff_state.language_registry = language_registry;
961
962 match kind {
963 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
964 DiffKind::Uncommitted => {
965 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
966 diff
967 } else {
968 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
969 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
970 unstaged_diff
971 };
972
973 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
974 diff_state.uncommitted_diff = Some(diff.downgrade())
975 }
976 DiffKind::SinceOid(_) => {
977 unreachable!("open_diff_internal is not used for OID diffs")
978 }
979 }
980
981 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
982 let rx = diff_state.wait_for_recalculation();
983
984 anyhow::Ok(async move {
985 if let Some(rx) = rx {
986 rx.await;
987 }
988 Ok(diff)
989 })
990 })
991 })??
992 .await
993 }
994
995 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
996 let diff_state = self.diffs.get(&buffer_id)?;
997 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
998 }
999
1000 pub fn get_uncommitted_diff(
1001 &self,
1002 buffer_id: BufferId,
1003 cx: &App,
1004 ) -> Option<Entity<BufferDiff>> {
1005 let diff_state = self.diffs.get(&buffer_id)?;
1006 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1007 }
1008
1009 pub fn get_diff_since_oid(
1010 &self,
1011 buffer_id: BufferId,
1012 oid: Option<git::Oid>,
1013 cx: &App,
1014 ) -> Option<Entity<BufferDiff>> {
1015 let diff_state = self.diffs.get(&buffer_id)?;
1016 diff_state.read(cx).oid_diff(oid)
1017 }
1018
1019 pub fn open_conflict_set(
1020 &mut self,
1021 buffer: Entity<Buffer>,
1022 cx: &mut Context<Self>,
1023 ) -> Entity<ConflictSet> {
1024 log::debug!("open conflict set");
1025 let buffer_id = buffer.read(cx).remote_id();
1026
1027 if let Some(git_state) = self.diffs.get(&buffer_id)
1028 && let Some(conflict_set) = git_state
1029 .read(cx)
1030 .conflict_set
1031 .as_ref()
1032 .and_then(|weak| weak.upgrade())
1033 {
1034 let conflict_set = conflict_set;
1035 let buffer_snapshot = buffer.read(cx).text_snapshot();
1036
1037 git_state.update(cx, |state, cx| {
1038 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1039 });
1040
1041 return conflict_set;
1042 }
1043
1044 let is_unmerged = self
1045 .repository_and_path_for_buffer_id(buffer_id, cx)
1046 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1047 let git_store = cx.weak_entity();
1048 let buffer_git_state = self
1049 .diffs
1050 .entry(buffer_id)
1051 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1052 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1053
1054 self._subscriptions
1055 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1056 cx.emit(GitStoreEvent::ConflictsUpdated);
1057 }));
1058
1059 buffer_git_state.update(cx, |state, cx| {
1060 state.conflict_set = Some(conflict_set.downgrade());
1061 let buffer_snapshot = buffer.read(cx).text_snapshot();
1062 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1063 });
1064
1065 conflict_set
1066 }
1067
1068 pub fn project_path_git_status(
1069 &self,
1070 project_path: &ProjectPath,
1071 cx: &App,
1072 ) -> Option<FileStatus> {
1073 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1074 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1075 }
1076
1077 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1078 let mut work_directory_abs_paths = Vec::new();
1079 let mut checkpoints = Vec::new();
1080 for repository in self.repositories.values() {
1081 repository.update(cx, |repository, _| {
1082 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1083 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1084 });
1085 }
1086
1087 cx.background_executor().spawn(async move {
1088 let checkpoints = future::try_join_all(checkpoints).await?;
1089 Ok(GitStoreCheckpoint {
1090 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1091 .into_iter()
1092 .zip(checkpoints)
1093 .collect(),
1094 })
1095 })
1096 }
1097
1098 pub fn restore_checkpoint(
1099 &self,
1100 checkpoint: GitStoreCheckpoint,
1101 cx: &mut App,
1102 ) -> Task<Result<()>> {
1103 let repositories_by_work_dir_abs_path = self
1104 .repositories
1105 .values()
1106 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1107 .collect::<HashMap<_, _>>();
1108
1109 let mut tasks = Vec::new();
1110 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1111 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1112 let restore = repository.update(cx, |repository, _| {
1113 repository.restore_checkpoint(checkpoint)
1114 });
1115 tasks.push(async move { restore.await? });
1116 }
1117 }
1118 cx.background_spawn(async move {
1119 future::try_join_all(tasks).await?;
1120 Ok(())
1121 })
1122 }
1123
1124 /// Compares two checkpoints, returning true if they are equal.
1125 pub fn compare_checkpoints(
1126 &self,
1127 left: GitStoreCheckpoint,
1128 mut right: GitStoreCheckpoint,
1129 cx: &mut App,
1130 ) -> Task<Result<bool>> {
1131 let repositories_by_work_dir_abs_path = self
1132 .repositories
1133 .values()
1134 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1135 .collect::<HashMap<_, _>>();
1136
1137 let mut tasks = Vec::new();
1138 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1139 if let Some(right_checkpoint) = right
1140 .checkpoints_by_work_dir_abs_path
1141 .remove(&work_dir_abs_path)
1142 {
1143 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1144 {
1145 let compare = repository.update(cx, |repository, _| {
1146 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1147 });
1148
1149 tasks.push(async move { compare.await? });
1150 }
1151 } else {
1152 return Task::ready(Ok(false));
1153 }
1154 }
1155 cx.background_spawn(async move {
1156 Ok(future::try_join_all(tasks)
1157 .await?
1158 .into_iter()
1159 .all(|result| result))
1160 })
1161 }
1162
1163 /// Blames a buffer.
1164 pub fn blame_buffer(
1165 &self,
1166 buffer: &Entity<Buffer>,
1167 version: Option<clock::Global>,
1168 cx: &mut Context<Self>,
1169 ) -> Task<Result<Option<Blame>>> {
1170 let buffer = buffer.read(cx);
1171 let Some((repo, repo_path)) =
1172 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1173 else {
1174 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1175 };
1176 let content = match &version {
1177 Some(version) => buffer.rope_for_version(version),
1178 None => buffer.as_rope().clone(),
1179 };
1180 let line_ending = buffer.line_ending();
1181 let version = version.unwrap_or(buffer.version());
1182 let buffer_id = buffer.remote_id();
1183
1184 let repo = repo.downgrade();
1185 cx.spawn(async move |_, cx| {
1186 let repository_state = repo
1187 .update(cx, |repo, _| repo.repository_state.clone())?
1188 .await
1189 .map_err(|err| anyhow::anyhow!(err))?;
1190 match repository_state {
1191 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1192 .blame(repo_path.clone(), content, line_ending)
1193 .await
1194 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1195 .map(Some),
1196 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1197 let response = client
1198 .request(proto::BlameBuffer {
1199 project_id: project_id.to_proto(),
1200 buffer_id: buffer_id.into(),
1201 version: serialize_version(&version),
1202 })
1203 .await?;
1204 Ok(deserialize_blame_buffer_response(response))
1205 }
1206 }
1207 })
1208 }
1209
1210 pub fn file_history(
1211 &self,
1212 repo: &Entity<Repository>,
1213 path: RepoPath,
1214 cx: &mut App,
1215 ) -> Task<Result<git::repository::FileHistory>> {
1216 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1217
1218 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1219 }
1220
1221 pub fn file_history_paginated(
1222 &self,
1223 repo: &Entity<Repository>,
1224 path: RepoPath,
1225 skip: usize,
1226 limit: Option<usize>,
1227 cx: &mut App,
1228 ) -> Task<Result<git::repository::FileHistory>> {
1229 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1230
1231 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1232 }
1233
1234 pub fn get_permalink_to_line(
1235 &self,
1236 buffer: &Entity<Buffer>,
1237 selection: Range<u32>,
1238 cx: &mut App,
1239 ) -> Task<Result<url::Url>> {
1240 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1241 return Task::ready(Err(anyhow!("buffer has no file")));
1242 };
1243
1244 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1245 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1246 cx,
1247 ) else {
1248 // If we're not in a Git repo, check whether this is a Rust source
1249 // file in the Cargo registry (presumably opened with go-to-definition
1250 // from a normal Rust file). If so, we can put together a permalink
1251 // using crate metadata.
1252 if buffer
1253 .read(cx)
1254 .language()
1255 .is_none_or(|lang| lang.name() != "Rust")
1256 {
1257 return Task::ready(Err(anyhow!("no permalink available")));
1258 }
1259 let file_path = file.worktree.read(cx).absolutize(&file.path);
1260 return cx.spawn(async move |cx| {
1261 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1262 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1263 .context("no permalink available")
1264 });
1265 };
1266
1267 let buffer_id = buffer.read(cx).remote_id();
1268 let branch = repo.read(cx).branch.clone();
1269 let remote = branch
1270 .as_ref()
1271 .and_then(|b| b.upstream.as_ref())
1272 .and_then(|b| b.remote_name())
1273 .unwrap_or("origin")
1274 .to_string();
1275
1276 let rx = repo.update(cx, |repo, _| {
1277 repo.send_job(None, move |state, cx| async move {
1278 match state {
1279 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1280 let origin_url = backend
1281 .remote_url(&remote)
1282 .await
1283 .with_context(|| format!("remote \"{remote}\" not found"))?;
1284
1285 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1286
1287 let provider_registry =
1288 cx.update(GitHostingProviderRegistry::default_global);
1289
1290 let (provider, remote) =
1291 parse_git_remote_url(provider_registry, &origin_url)
1292 .context("parsing Git remote URL")?;
1293
1294 Ok(provider.build_permalink(
1295 remote,
1296 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1297 ))
1298 }
1299 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1300 let response = client
1301 .request(proto::GetPermalinkToLine {
1302 project_id: project_id.to_proto(),
1303 buffer_id: buffer_id.into(),
1304 selection: Some(proto::Range {
1305 start: selection.start as u64,
1306 end: selection.end as u64,
1307 }),
1308 })
1309 .await?;
1310
1311 url::Url::parse(&response.permalink).context("failed to parse permalink")
1312 }
1313 }
1314 })
1315 });
1316 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1317 }
1318
1319 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1320 match &self.state {
1321 GitStoreState::Local {
1322 downstream: downstream_client,
1323 ..
1324 } => downstream_client
1325 .as_ref()
1326 .map(|state| (state.client.clone(), state.project_id)),
1327 GitStoreState::Remote {
1328 downstream: downstream_client,
1329 ..
1330 } => downstream_client.clone(),
1331 }
1332 }
1333
1334 fn upstream_client(&self) -> Option<AnyProtoClient> {
1335 match &self.state {
1336 GitStoreState::Local { .. } => None,
1337 GitStoreState::Remote {
1338 upstream_client, ..
1339 } => Some(upstream_client.clone()),
1340 }
1341 }
1342
1343 fn on_worktree_store_event(
1344 &mut self,
1345 worktree_store: Entity<WorktreeStore>,
1346 event: &WorktreeStoreEvent,
1347 cx: &mut Context<Self>,
1348 ) {
1349 let GitStoreState::Local {
1350 project_environment,
1351 downstream,
1352 next_repository_id,
1353 fs,
1354 } = &self.state
1355 else {
1356 return;
1357 };
1358
1359 match event {
1360 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1361 if let Some(worktree) = self
1362 .worktree_store
1363 .read(cx)
1364 .worktree_for_id(*worktree_id, cx)
1365 {
1366 let paths_by_git_repo =
1367 self.process_updated_entries(&worktree, updated_entries, cx);
1368 let downstream = downstream
1369 .as_ref()
1370 .map(|downstream| downstream.updates_tx.clone());
1371 cx.spawn(async move |_, cx| {
1372 let paths_by_git_repo = paths_by_git_repo.await;
1373 for (repo, paths) in paths_by_git_repo {
1374 repo.update(cx, |repo, cx| {
1375 repo.paths_changed(paths, downstream.clone(), cx);
1376 });
1377 }
1378 })
1379 .detach();
1380 }
1381 }
1382 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1383 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1384 else {
1385 return;
1386 };
1387 if !worktree.read(cx).is_visible() {
1388 log::debug!(
1389 "not adding repositories for local worktree {:?} because it's not visible",
1390 worktree.read(cx).abs_path()
1391 );
1392 return;
1393 }
1394 self.update_repositories_from_worktree(
1395 *worktree_id,
1396 project_environment.clone(),
1397 next_repository_id.clone(),
1398 downstream
1399 .as_ref()
1400 .map(|downstream| downstream.updates_tx.clone()),
1401 changed_repos.clone(),
1402 fs.clone(),
1403 cx,
1404 );
1405 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1406 }
1407 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1408 let repos_without_worktree: Vec<RepositoryId> = self
1409 .worktree_ids
1410 .iter_mut()
1411 .filter_map(|(repo_id, worktree_ids)| {
1412 worktree_ids.remove(worktree_id);
1413 if worktree_ids.is_empty() {
1414 Some(*repo_id)
1415 } else {
1416 None
1417 }
1418 })
1419 .collect();
1420 let is_active_repo_removed = repos_without_worktree
1421 .iter()
1422 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1423
1424 for repo_id in repos_without_worktree {
1425 self.repositories.remove(&repo_id);
1426 self.worktree_ids.remove(&repo_id);
1427 if let Some(updates_tx) =
1428 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1429 {
1430 updates_tx
1431 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1432 .ok();
1433 }
1434 }
1435
1436 if is_active_repo_removed {
1437 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1438 self.active_repo_id = Some(repo_id);
1439 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1440 } else {
1441 self.active_repo_id = None;
1442 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1443 }
1444 }
1445 }
1446 _ => {}
1447 }
1448 }
1449 fn on_repository_event(
1450 &mut self,
1451 repo: Entity<Repository>,
1452 event: &RepositoryEvent,
1453 cx: &mut Context<Self>,
1454 ) {
1455 let id = repo.read(cx).id;
1456 let repo_snapshot = repo.read(cx).snapshot.clone();
1457 for (buffer_id, diff) in self.diffs.iter() {
1458 if let Some((buffer_repo, repo_path)) =
1459 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1460 && buffer_repo == repo
1461 {
1462 diff.update(cx, |diff, cx| {
1463 if let Some(conflict_set) = &diff.conflict_set {
1464 let conflict_status_changed =
1465 conflict_set.update(cx, |conflict_set, cx| {
1466 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1467 conflict_set.set_has_conflict(has_conflict, cx)
1468 })?;
1469 if conflict_status_changed {
1470 let buffer_store = self.buffer_store.read(cx);
1471 if let Some(buffer) = buffer_store.get(*buffer_id) {
1472 let _ = diff
1473 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1474 }
1475 }
1476 }
1477 anyhow::Ok(())
1478 })
1479 .ok();
1480 }
1481 }
1482 cx.emit(GitStoreEvent::RepositoryUpdated(
1483 id,
1484 event.clone(),
1485 self.active_repo_id == Some(id),
1486 ))
1487 }
1488
1489 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1490 cx.emit(GitStoreEvent::JobsUpdated)
1491 }
1492
1493 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1494 fn update_repositories_from_worktree(
1495 &mut self,
1496 worktree_id: WorktreeId,
1497 project_environment: Entity<ProjectEnvironment>,
1498 next_repository_id: Arc<AtomicU64>,
1499 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1500 updated_git_repositories: UpdatedGitRepositoriesSet,
1501 fs: Arc<dyn Fs>,
1502 cx: &mut Context<Self>,
1503 ) {
1504 let mut removed_ids = Vec::new();
1505 for update in updated_git_repositories.iter() {
1506 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1507 let existing_work_directory_abs_path =
1508 repo.read(cx).work_directory_abs_path.clone();
1509 Some(&existing_work_directory_abs_path)
1510 == update.old_work_directory_abs_path.as_ref()
1511 || Some(&existing_work_directory_abs_path)
1512 == update.new_work_directory_abs_path.as_ref()
1513 }) {
1514 let repo_id = *id;
1515 if let Some(new_work_directory_abs_path) =
1516 update.new_work_directory_abs_path.clone()
1517 {
1518 self.worktree_ids
1519 .entry(repo_id)
1520 .or_insert_with(HashSet::new)
1521 .insert(worktree_id);
1522 existing.update(cx, |existing, cx| {
1523 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1524 existing.schedule_scan(updates_tx.clone(), cx);
1525 });
1526 } else {
1527 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1528 worktree_ids.remove(&worktree_id);
1529 if worktree_ids.is_empty() {
1530 removed_ids.push(repo_id);
1531 }
1532 }
1533 }
1534 } else if let UpdatedGitRepository {
1535 new_work_directory_abs_path: Some(work_directory_abs_path),
1536 dot_git_abs_path: Some(dot_git_abs_path),
1537 repository_dir_abs_path: Some(repository_dir_abs_path),
1538 common_dir_abs_path: Some(common_dir_abs_path),
1539 ..
1540 } = update
1541 {
1542 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1543 work_directory_abs_path,
1544 common_dir_abs_path,
1545 repository_dir_abs_path,
1546 )
1547 .into();
1548 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1549 let is_trusted = TrustedWorktrees::try_get_global(cx)
1550 .map(|trusted_worktrees| {
1551 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1552 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1553 })
1554 })
1555 .unwrap_or(false);
1556 let git_store = cx.weak_entity();
1557 let repo = cx.new(|cx| {
1558 let mut repo = Repository::local(
1559 id,
1560 work_directory_abs_path.clone(),
1561 original_repo_abs_path.clone(),
1562 dot_git_abs_path.clone(),
1563 project_environment.downgrade(),
1564 fs.clone(),
1565 is_trusted,
1566 git_store,
1567 cx,
1568 );
1569 if let Some(updates_tx) = updates_tx.as_ref() {
1570 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1571 updates_tx
1572 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1573 .ok();
1574 }
1575 repo.schedule_scan(updates_tx.clone(), cx);
1576 repo
1577 });
1578 self._subscriptions
1579 .push(cx.subscribe(&repo, Self::on_repository_event));
1580 self._subscriptions
1581 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1582 self.repositories.insert(id, repo);
1583 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1584 cx.emit(GitStoreEvent::RepositoryAdded);
1585 self.active_repo_id.get_or_insert_with(|| {
1586 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1587 id
1588 });
1589 }
1590 }
1591
1592 for id in removed_ids {
1593 if self.active_repo_id == Some(id) {
1594 self.active_repo_id = None;
1595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1596 }
1597 self.repositories.remove(&id);
1598 if let Some(updates_tx) = updates_tx.as_ref() {
1599 updates_tx
1600 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1601 .ok();
1602 }
1603 }
1604 }
1605
1606 fn on_trusted_worktrees_event(
1607 &mut self,
1608 _: Entity<TrustedWorktreesStore>,
1609 event: &TrustedWorktreesEvent,
1610 cx: &mut Context<Self>,
1611 ) {
1612 if !matches!(self.state, GitStoreState::Local { .. }) {
1613 return;
1614 }
1615
1616 let (is_trusted, event_paths) = match event {
1617 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1618 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1619 };
1620
1621 for (repo_id, worktree_ids) in &self.worktree_ids {
1622 if worktree_ids
1623 .iter()
1624 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1625 {
1626 if let Some(repo) = self.repositories.get(repo_id) {
1627 let repository_state = repo.read(cx).repository_state.clone();
1628 cx.background_spawn(async move {
1629 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1630 state.backend.set_trusted(is_trusted);
1631 }
1632 })
1633 .detach();
1634 }
1635 }
1636 }
1637 }
1638
1639 fn on_buffer_store_event(
1640 &mut self,
1641 _: Entity<BufferStore>,
1642 event: &BufferStoreEvent,
1643 cx: &mut Context<Self>,
1644 ) {
1645 match event {
1646 BufferStoreEvent::BufferAdded(buffer) => {
1647 cx.subscribe(buffer, |this, buffer, event, cx| {
1648 if let BufferEvent::LanguageChanged(_) = event {
1649 let buffer_id = buffer.read(cx).remote_id();
1650 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1651 diff_state.update(cx, |diff_state, cx| {
1652 diff_state.buffer_language_changed(buffer, cx);
1653 });
1654 }
1655 }
1656 })
1657 .detach();
1658 }
1659 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1660 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1661 diffs.remove(buffer_id);
1662 }
1663 }
1664 BufferStoreEvent::BufferDropped(buffer_id) => {
1665 self.diffs.remove(buffer_id);
1666 for diffs in self.shared_diffs.values_mut() {
1667 diffs.remove(buffer_id);
1668 }
1669 }
1670 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1671 // Whenever a buffer's file path changes, it's possible that the
1672 // new path is actually a path that is being tracked by a git
1673 // repository. In that case, we'll want to update the buffer's
1674 // `BufferDiffState`, in case it already has one.
1675 let buffer_id = buffer.read(cx).remote_id();
1676 let diff_state = self.diffs.get(&buffer_id);
1677 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1678
1679 if let Some(diff_state) = diff_state
1680 && let Some((repo, repo_path)) = repo
1681 {
1682 let buffer = buffer.clone();
1683 let diff_state = diff_state.clone();
1684
1685 cx.spawn(async move |_git_store, cx| {
1686 async {
1687 let diff_bases_change = repo
1688 .update(cx, |repo, cx| {
1689 repo.load_committed_text(buffer_id, repo_path, cx)
1690 })
1691 .await?;
1692
1693 diff_state.update(cx, |diff_state, cx| {
1694 let buffer_snapshot = buffer.read(cx).text_snapshot();
1695 diff_state.diff_bases_changed(
1696 buffer_snapshot,
1697 Some(diff_bases_change),
1698 cx,
1699 );
1700 });
1701 anyhow::Ok(())
1702 }
1703 .await
1704 .log_err();
1705 })
1706 .detach();
1707 }
1708 }
1709 }
1710 }
1711
1712 pub fn recalculate_buffer_diffs(
1713 &mut self,
1714 buffers: Vec<Entity<Buffer>>,
1715 cx: &mut Context<Self>,
1716 ) -> impl Future<Output = ()> + use<> {
1717 let mut futures = Vec::new();
1718 for buffer in buffers {
1719 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1720 let buffer = buffer.read(cx).text_snapshot();
1721 diff_state.update(cx, |diff_state, cx| {
1722 diff_state.recalculate_diffs(buffer.clone(), cx);
1723 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1724 });
1725 futures.push(diff_state.update(cx, |diff_state, cx| {
1726 diff_state
1727 .reparse_conflict_markers(buffer, cx)
1728 .map(|_| {})
1729 .boxed()
1730 }));
1731 }
1732 }
1733 async move {
1734 futures::future::join_all(futures).await;
1735 }
1736 }
1737
1738 fn on_buffer_diff_event(
1739 &mut self,
1740 diff: Entity<buffer_diff::BufferDiff>,
1741 event: &BufferDiffEvent,
1742 cx: &mut Context<Self>,
1743 ) {
1744 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1745 let buffer_id = diff.read(cx).buffer_id;
1746 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1747 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1748 diff_state.hunk_staging_operation_count += 1;
1749 diff_state.hunk_staging_operation_count
1750 });
1751 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1752 let recv = repo.update(cx, |repo, cx| {
1753 log::debug!("hunks changed for {}", path.as_unix_str());
1754 repo.spawn_set_index_text_job(
1755 path,
1756 new_index_text.as_ref().map(|rope| rope.to_string()),
1757 Some(hunk_staging_operation_count),
1758 cx,
1759 )
1760 });
1761 let diff = diff.downgrade();
1762 cx.spawn(async move |this, cx| {
1763 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1764 diff.update(cx, |diff, cx| {
1765 diff.clear_pending_hunks(cx);
1766 })
1767 .ok();
1768 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1769 .ok();
1770 }
1771 })
1772 .detach();
1773 }
1774 }
1775 }
1776 }
1777
1778 fn local_worktree_git_repos_changed(
1779 &mut self,
1780 worktree: Entity<Worktree>,
1781 changed_repos: &UpdatedGitRepositoriesSet,
1782 cx: &mut Context<Self>,
1783 ) {
1784 log::debug!("local worktree repos changed");
1785 debug_assert!(worktree.read(cx).is_local());
1786
1787 for repository in self.repositories.values() {
1788 repository.update(cx, |repository, cx| {
1789 let repo_abs_path = &repository.work_directory_abs_path;
1790 if changed_repos.iter().any(|update| {
1791 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1792 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1793 }) {
1794 repository.reload_buffer_diff_bases(cx);
1795 }
1796 });
1797 }
1798 }
1799
1800 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1801 &self.repositories
1802 }
1803
1804 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1805 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1806 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1807 Some(status.status)
1808 }
1809
1810 pub fn repository_and_path_for_buffer_id(
1811 &self,
1812 buffer_id: BufferId,
1813 cx: &App,
1814 ) -> Option<(Entity<Repository>, RepoPath)> {
1815 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1816 let project_path = buffer.read(cx).project_path(cx)?;
1817 self.repository_and_path_for_project_path(&project_path, cx)
1818 }
1819
1820 pub fn repository_and_path_for_project_path(
1821 &self,
1822 path: &ProjectPath,
1823 cx: &App,
1824 ) -> Option<(Entity<Repository>, RepoPath)> {
1825 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1826 self.repositories
1827 .values()
1828 .filter_map(|repo| {
1829 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1830 Some((repo.clone(), repo_path))
1831 })
1832 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1833 }
1834
1835 pub fn git_init(
1836 &self,
1837 path: Arc<Path>,
1838 fallback_branch_name: String,
1839 cx: &App,
1840 ) -> Task<Result<()>> {
1841 match &self.state {
1842 GitStoreState::Local { fs, .. } => {
1843 let fs = fs.clone();
1844 cx.background_executor()
1845 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1846 }
1847 GitStoreState::Remote {
1848 upstream_client,
1849 upstream_project_id: project_id,
1850 ..
1851 } => {
1852 let client = upstream_client.clone();
1853 let project_id = *project_id;
1854 cx.background_executor().spawn(async move {
1855 client
1856 .request(proto::GitInit {
1857 project_id: project_id,
1858 abs_path: path.to_string_lossy().into_owned(),
1859 fallback_branch_name,
1860 })
1861 .await?;
1862 Ok(())
1863 })
1864 }
1865 }
1866 }
1867
1868 pub fn git_clone(
1869 &self,
1870 repo: String,
1871 path: impl Into<Arc<std::path::Path>>,
1872 cx: &App,
1873 ) -> Task<Result<()>> {
1874 let path = path.into();
1875 match &self.state {
1876 GitStoreState::Local { fs, .. } => {
1877 let fs = fs.clone();
1878 cx.background_executor()
1879 .spawn(async move { fs.git_clone(&repo, &path).await })
1880 }
1881 GitStoreState::Remote {
1882 upstream_client,
1883 upstream_project_id,
1884 ..
1885 } => {
1886 if upstream_client.is_via_collab() {
1887 return Task::ready(Err(anyhow!(
1888 "Git Clone isn't supported for project guests"
1889 )));
1890 }
1891 let request = upstream_client.request(proto::GitClone {
1892 project_id: *upstream_project_id,
1893 abs_path: path.to_string_lossy().into_owned(),
1894 remote_repo: repo,
1895 });
1896
1897 cx.background_spawn(async move {
1898 let result = request.await?;
1899
1900 match result.success {
1901 true => Ok(()),
1902 false => Err(anyhow!("Git Clone failed")),
1903 }
1904 })
1905 }
1906 }
1907 }
1908
1909 async fn handle_update_repository(
1910 this: Entity<Self>,
1911 envelope: TypedEnvelope<proto::UpdateRepository>,
1912 mut cx: AsyncApp,
1913 ) -> Result<()> {
1914 this.update(&mut cx, |this, cx| {
1915 let path_style = this.worktree_store.read(cx).path_style();
1916 let mut update = envelope.payload;
1917
1918 let id = RepositoryId::from_proto(update.id);
1919 let client = this.upstream_client().context("no upstream client")?;
1920
1921 let original_repo_abs_path: Option<Arc<Path>> = update
1922 .original_repo_abs_path
1923 .as_deref()
1924 .map(|p| Path::new(p).into());
1925
1926 let mut repo_subscription = None;
1927 let repo = this.repositories.entry(id).or_insert_with(|| {
1928 let git_store = cx.weak_entity();
1929 let repo = cx.new(|cx| {
1930 Repository::remote(
1931 id,
1932 Path::new(&update.abs_path).into(),
1933 original_repo_abs_path.clone(),
1934 path_style,
1935 ProjectId(update.project_id),
1936 client,
1937 git_store,
1938 cx,
1939 )
1940 });
1941 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1942 cx.emit(GitStoreEvent::RepositoryAdded);
1943 repo
1944 });
1945 this._subscriptions.extend(repo_subscription);
1946
1947 repo.update(cx, {
1948 let update = update.clone();
1949 |repo, cx| repo.apply_remote_update(update, cx)
1950 })?;
1951
1952 this.active_repo_id.get_or_insert_with(|| {
1953 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1954 id
1955 });
1956
1957 if let Some((client, project_id)) = this.downstream_client() {
1958 update.project_id = project_id.to_proto();
1959 client.send(update).log_err();
1960 }
1961 Ok(())
1962 })
1963 }
1964
1965 async fn handle_remove_repository(
1966 this: Entity<Self>,
1967 envelope: TypedEnvelope<proto::RemoveRepository>,
1968 mut cx: AsyncApp,
1969 ) -> Result<()> {
1970 this.update(&mut cx, |this, cx| {
1971 let mut update = envelope.payload;
1972 let id = RepositoryId::from_proto(update.id);
1973 this.repositories.remove(&id);
1974 if let Some((client, project_id)) = this.downstream_client() {
1975 update.project_id = project_id.to_proto();
1976 client.send(update).log_err();
1977 }
1978 if this.active_repo_id == Some(id) {
1979 this.active_repo_id = None;
1980 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1981 }
1982 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1983 });
1984 Ok(())
1985 }
1986
1987 async fn handle_git_init(
1988 this: Entity<Self>,
1989 envelope: TypedEnvelope<proto::GitInit>,
1990 cx: AsyncApp,
1991 ) -> Result<proto::Ack> {
1992 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1993 let name = envelope.payload.fallback_branch_name;
1994 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1995 .await?;
1996
1997 Ok(proto::Ack {})
1998 }
1999
2000 async fn handle_git_clone(
2001 this: Entity<Self>,
2002 envelope: TypedEnvelope<proto::GitClone>,
2003 cx: AsyncApp,
2004 ) -> Result<proto::GitCloneResponse> {
2005 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2006 let repo_name = envelope.payload.remote_repo;
2007 let result = cx
2008 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2009 .await;
2010
2011 Ok(proto::GitCloneResponse {
2012 success: result.is_ok(),
2013 })
2014 }
2015
2016 async fn handle_fetch(
2017 this: Entity<Self>,
2018 envelope: TypedEnvelope<proto::Fetch>,
2019 mut cx: AsyncApp,
2020 ) -> Result<proto::RemoteMessageResponse> {
2021 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2022 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2023 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2024 let askpass_id = envelope.payload.askpass_id;
2025
2026 let askpass = make_remote_delegate(
2027 this,
2028 envelope.payload.project_id,
2029 repository_id,
2030 askpass_id,
2031 &mut cx,
2032 );
2033
2034 let remote_output = repository_handle
2035 .update(&mut cx, |repository_handle, cx| {
2036 repository_handle.fetch(fetch_options, askpass, cx)
2037 })
2038 .await??;
2039
2040 Ok(proto::RemoteMessageResponse {
2041 stdout: remote_output.stdout,
2042 stderr: remote_output.stderr,
2043 })
2044 }
2045
2046 async fn handle_push(
2047 this: Entity<Self>,
2048 envelope: TypedEnvelope<proto::Push>,
2049 mut cx: AsyncApp,
2050 ) -> Result<proto::RemoteMessageResponse> {
2051 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2052 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2053
2054 let askpass_id = envelope.payload.askpass_id;
2055 let askpass = make_remote_delegate(
2056 this,
2057 envelope.payload.project_id,
2058 repository_id,
2059 askpass_id,
2060 &mut cx,
2061 );
2062
2063 let options = envelope
2064 .payload
2065 .options
2066 .as_ref()
2067 .map(|_| match envelope.payload.options() {
2068 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2069 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2070 });
2071
2072 let branch_name = envelope.payload.branch_name.into();
2073 let remote_branch_name = envelope.payload.remote_branch_name.into();
2074 let remote_name = envelope.payload.remote_name.into();
2075
2076 let remote_output = repository_handle
2077 .update(&mut cx, |repository_handle, cx| {
2078 repository_handle.push(
2079 branch_name,
2080 remote_branch_name,
2081 remote_name,
2082 options,
2083 askpass,
2084 cx,
2085 )
2086 })
2087 .await??;
2088 Ok(proto::RemoteMessageResponse {
2089 stdout: remote_output.stdout,
2090 stderr: remote_output.stderr,
2091 })
2092 }
2093
2094 async fn handle_pull(
2095 this: Entity<Self>,
2096 envelope: TypedEnvelope<proto::Pull>,
2097 mut cx: AsyncApp,
2098 ) -> Result<proto::RemoteMessageResponse> {
2099 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2100 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2101 let askpass_id = envelope.payload.askpass_id;
2102 let askpass = make_remote_delegate(
2103 this,
2104 envelope.payload.project_id,
2105 repository_id,
2106 askpass_id,
2107 &mut cx,
2108 );
2109
2110 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2111 let remote_name = envelope.payload.remote_name.into();
2112 let rebase = envelope.payload.rebase;
2113
2114 let remote_message = repository_handle
2115 .update(&mut cx, |repository_handle, cx| {
2116 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2117 })
2118 .await??;
2119
2120 Ok(proto::RemoteMessageResponse {
2121 stdout: remote_message.stdout,
2122 stderr: remote_message.stderr,
2123 })
2124 }
2125
2126 async fn handle_stage(
2127 this: Entity<Self>,
2128 envelope: TypedEnvelope<proto::Stage>,
2129 mut cx: AsyncApp,
2130 ) -> Result<proto::Ack> {
2131 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2132 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2133
2134 let entries = envelope
2135 .payload
2136 .paths
2137 .into_iter()
2138 .map(|path| RepoPath::new(&path))
2139 .collect::<Result<Vec<_>>>()?;
2140
2141 repository_handle
2142 .update(&mut cx, |repository_handle, cx| {
2143 repository_handle.stage_entries(entries, cx)
2144 })
2145 .await?;
2146 Ok(proto::Ack {})
2147 }
2148
2149 async fn handle_unstage(
2150 this: Entity<Self>,
2151 envelope: TypedEnvelope<proto::Unstage>,
2152 mut cx: AsyncApp,
2153 ) -> Result<proto::Ack> {
2154 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2155 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2156
2157 let entries = envelope
2158 .payload
2159 .paths
2160 .into_iter()
2161 .map(|path| RepoPath::new(&path))
2162 .collect::<Result<Vec<_>>>()?;
2163
2164 repository_handle
2165 .update(&mut cx, |repository_handle, cx| {
2166 repository_handle.unstage_entries(entries, cx)
2167 })
2168 .await?;
2169
2170 Ok(proto::Ack {})
2171 }
2172
2173 async fn handle_stash(
2174 this: Entity<Self>,
2175 envelope: TypedEnvelope<proto::Stash>,
2176 mut cx: AsyncApp,
2177 ) -> Result<proto::Ack> {
2178 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2179 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2180
2181 let entries = envelope
2182 .payload
2183 .paths
2184 .into_iter()
2185 .map(|path| RepoPath::new(&path))
2186 .collect::<Result<Vec<_>>>()?;
2187
2188 repository_handle
2189 .update(&mut cx, |repository_handle, cx| {
2190 repository_handle.stash_entries(entries, cx)
2191 })
2192 .await?;
2193
2194 Ok(proto::Ack {})
2195 }
2196
2197 async fn handle_stash_pop(
2198 this: Entity<Self>,
2199 envelope: TypedEnvelope<proto::StashPop>,
2200 mut cx: AsyncApp,
2201 ) -> Result<proto::Ack> {
2202 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2203 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2204 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2205
2206 repository_handle
2207 .update(&mut cx, |repository_handle, cx| {
2208 repository_handle.stash_pop(stash_index, cx)
2209 })
2210 .await?;
2211
2212 Ok(proto::Ack {})
2213 }
2214
2215 async fn handle_stash_apply(
2216 this: Entity<Self>,
2217 envelope: TypedEnvelope<proto::StashApply>,
2218 mut cx: AsyncApp,
2219 ) -> Result<proto::Ack> {
2220 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2221 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2222 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2223
2224 repository_handle
2225 .update(&mut cx, |repository_handle, cx| {
2226 repository_handle.stash_apply(stash_index, cx)
2227 })
2228 .await?;
2229
2230 Ok(proto::Ack {})
2231 }
2232
2233 async fn handle_stash_drop(
2234 this: Entity<Self>,
2235 envelope: TypedEnvelope<proto::StashDrop>,
2236 mut cx: AsyncApp,
2237 ) -> Result<proto::Ack> {
2238 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2239 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2240 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2241
2242 repository_handle
2243 .update(&mut cx, |repository_handle, cx| {
2244 repository_handle.stash_drop(stash_index, cx)
2245 })
2246 .await??;
2247
2248 Ok(proto::Ack {})
2249 }
2250
2251 async fn handle_set_index_text(
2252 this: Entity<Self>,
2253 envelope: TypedEnvelope<proto::SetIndexText>,
2254 mut cx: AsyncApp,
2255 ) -> Result<proto::Ack> {
2256 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2257 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2258 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2259
2260 repository_handle
2261 .update(&mut cx, |repository_handle, cx| {
2262 repository_handle.spawn_set_index_text_job(
2263 repo_path,
2264 envelope.payload.text,
2265 None,
2266 cx,
2267 )
2268 })
2269 .await??;
2270 Ok(proto::Ack {})
2271 }
2272
2273 async fn handle_run_hook(
2274 this: Entity<Self>,
2275 envelope: TypedEnvelope<proto::RunGitHook>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::Ack> {
2278 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2279 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2280 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2281 repository_handle
2282 .update(&mut cx, |repository_handle, cx| {
2283 repository_handle.run_hook(hook, cx)
2284 })
2285 .await??;
2286 Ok(proto::Ack {})
2287 }
2288
2289 async fn handle_commit(
2290 this: Entity<Self>,
2291 envelope: TypedEnvelope<proto::Commit>,
2292 mut cx: AsyncApp,
2293 ) -> Result<proto::Ack> {
2294 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2295 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2296 let askpass_id = envelope.payload.askpass_id;
2297
2298 let askpass = make_remote_delegate(
2299 this,
2300 envelope.payload.project_id,
2301 repository_id,
2302 askpass_id,
2303 &mut cx,
2304 );
2305
2306 let message = SharedString::from(envelope.payload.message);
2307 let name = envelope.payload.name.map(SharedString::from);
2308 let email = envelope.payload.email.map(SharedString::from);
2309 let options = envelope.payload.options.unwrap_or_default();
2310
2311 repository_handle
2312 .update(&mut cx, |repository_handle, cx| {
2313 repository_handle.commit(
2314 message,
2315 name.zip(email),
2316 CommitOptions {
2317 amend: options.amend,
2318 signoff: options.signoff,
2319 },
2320 askpass,
2321 cx,
2322 )
2323 })
2324 .await??;
2325 Ok(proto::Ack {})
2326 }
2327
2328 async fn handle_get_remotes(
2329 this: Entity<Self>,
2330 envelope: TypedEnvelope<proto::GetRemotes>,
2331 mut cx: AsyncApp,
2332 ) -> Result<proto::GetRemotesResponse> {
2333 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2334 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2335
2336 let branch_name = envelope.payload.branch_name;
2337 let is_push = envelope.payload.is_push;
2338
2339 let remotes = repository_handle
2340 .update(&mut cx, |repository_handle, _| {
2341 repository_handle.get_remotes(branch_name, is_push)
2342 })
2343 .await??;
2344
2345 Ok(proto::GetRemotesResponse {
2346 remotes: remotes
2347 .into_iter()
2348 .map(|remotes| proto::get_remotes_response::Remote {
2349 name: remotes.name.to_string(),
2350 })
2351 .collect::<Vec<_>>(),
2352 })
2353 }
2354
2355 async fn handle_get_worktrees(
2356 this: Entity<Self>,
2357 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2358 mut cx: AsyncApp,
2359 ) -> Result<proto::GitWorktreesResponse> {
2360 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2361 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2362
2363 let worktrees = repository_handle
2364 .update(&mut cx, |repository_handle, _| {
2365 repository_handle.worktrees()
2366 })
2367 .await??;
2368
2369 Ok(proto::GitWorktreesResponse {
2370 worktrees: worktrees
2371 .into_iter()
2372 .map(|worktree| worktree_to_proto(&worktree))
2373 .collect::<Vec<_>>(),
2374 })
2375 }
2376
2377 async fn handle_create_worktree(
2378 this: Entity<Self>,
2379 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2380 mut cx: AsyncApp,
2381 ) -> Result<proto::Ack> {
2382 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2383 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2384 let directory = PathBuf::from(envelope.payload.directory);
2385 let name = envelope.payload.name;
2386 let commit = envelope.payload.commit;
2387
2388 repository_handle
2389 .update(&mut cx, |repository_handle, _| {
2390 repository_handle.create_worktree(name, directory, commit)
2391 })
2392 .await??;
2393
2394 Ok(proto::Ack {})
2395 }
2396
2397 async fn handle_remove_worktree(
2398 this: Entity<Self>,
2399 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2400 mut cx: AsyncApp,
2401 ) -> Result<proto::Ack> {
2402 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2403 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2404 let path = PathBuf::from(envelope.payload.path);
2405 let force = envelope.payload.force;
2406
2407 repository_handle
2408 .update(&mut cx, |repository_handle, _| {
2409 repository_handle.remove_worktree(path, force)
2410 })
2411 .await??;
2412
2413 Ok(proto::Ack {})
2414 }
2415
2416 async fn handle_rename_worktree(
2417 this: Entity<Self>,
2418 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2419 mut cx: AsyncApp,
2420 ) -> Result<proto::Ack> {
2421 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2422 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2423 let old_path = PathBuf::from(envelope.payload.old_path);
2424 let new_path = PathBuf::from(envelope.payload.new_path);
2425
2426 repository_handle
2427 .update(&mut cx, |repository_handle, _| {
2428 repository_handle.rename_worktree(old_path, new_path)
2429 })
2430 .await??;
2431
2432 Ok(proto::Ack {})
2433 }
2434
2435 async fn handle_get_branches(
2436 this: Entity<Self>,
2437 envelope: TypedEnvelope<proto::GitGetBranches>,
2438 mut cx: AsyncApp,
2439 ) -> Result<proto::GitBranchesResponse> {
2440 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2441 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2442
2443 let branches = repository_handle
2444 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2445 .await??;
2446
2447 Ok(proto::GitBranchesResponse {
2448 branches: branches
2449 .into_iter()
2450 .map(|branch| branch_to_proto(&branch))
2451 .collect::<Vec<_>>(),
2452 })
2453 }
2454 async fn handle_get_default_branch(
2455 this: Entity<Self>,
2456 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2457 mut cx: AsyncApp,
2458 ) -> Result<proto::GetDefaultBranchResponse> {
2459 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2460 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2461
2462 let branch = repository_handle
2463 .update(&mut cx, |repository_handle, _| {
2464 repository_handle.default_branch(false)
2465 })
2466 .await??
2467 .map(Into::into);
2468
2469 Ok(proto::GetDefaultBranchResponse { branch })
2470 }
2471 async fn handle_create_branch(
2472 this: Entity<Self>,
2473 envelope: TypedEnvelope<proto::GitCreateBranch>,
2474 mut cx: AsyncApp,
2475 ) -> Result<proto::Ack> {
2476 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2477 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2478 let branch_name = envelope.payload.branch_name;
2479
2480 repository_handle
2481 .update(&mut cx, |repository_handle, _| {
2482 repository_handle.create_branch(branch_name, None)
2483 })
2484 .await??;
2485
2486 Ok(proto::Ack {})
2487 }
2488
2489 async fn handle_change_branch(
2490 this: Entity<Self>,
2491 envelope: TypedEnvelope<proto::GitChangeBranch>,
2492 mut cx: AsyncApp,
2493 ) -> Result<proto::Ack> {
2494 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2495 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2496 let branch_name = envelope.payload.branch_name;
2497
2498 repository_handle
2499 .update(&mut cx, |repository_handle, _| {
2500 repository_handle.change_branch(branch_name)
2501 })
2502 .await??;
2503
2504 Ok(proto::Ack {})
2505 }
2506
2507 async fn handle_rename_branch(
2508 this: Entity<Self>,
2509 envelope: TypedEnvelope<proto::GitRenameBranch>,
2510 mut cx: AsyncApp,
2511 ) -> Result<proto::Ack> {
2512 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2513 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2514 let branch = envelope.payload.branch;
2515 let new_name = envelope.payload.new_name;
2516
2517 repository_handle
2518 .update(&mut cx, |repository_handle, _| {
2519 repository_handle.rename_branch(branch, new_name)
2520 })
2521 .await??;
2522
2523 Ok(proto::Ack {})
2524 }
2525
2526 async fn handle_create_remote(
2527 this: Entity<Self>,
2528 envelope: TypedEnvelope<proto::GitCreateRemote>,
2529 mut cx: AsyncApp,
2530 ) -> Result<proto::Ack> {
2531 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2532 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2533 let remote_name = envelope.payload.remote_name;
2534 let remote_url = envelope.payload.remote_url;
2535
2536 repository_handle
2537 .update(&mut cx, |repository_handle, _| {
2538 repository_handle.create_remote(remote_name, remote_url)
2539 })
2540 .await??;
2541
2542 Ok(proto::Ack {})
2543 }
2544
2545 async fn handle_delete_branch(
2546 this: Entity<Self>,
2547 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2548 mut cx: AsyncApp,
2549 ) -> Result<proto::Ack> {
2550 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2551 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2552 let is_remote = envelope.payload.is_remote;
2553 let branch_name = envelope.payload.branch_name;
2554
2555 repository_handle
2556 .update(&mut cx, |repository_handle, _| {
2557 repository_handle.delete_branch(is_remote, branch_name)
2558 })
2559 .await??;
2560
2561 Ok(proto::Ack {})
2562 }
2563
2564 async fn handle_remove_remote(
2565 this: Entity<Self>,
2566 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2567 mut cx: AsyncApp,
2568 ) -> Result<proto::Ack> {
2569 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2570 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2571 let remote_name = envelope.payload.remote_name;
2572
2573 repository_handle
2574 .update(&mut cx, |repository_handle, _| {
2575 repository_handle.remove_remote(remote_name)
2576 })
2577 .await??;
2578
2579 Ok(proto::Ack {})
2580 }
2581
2582 async fn handle_show(
2583 this: Entity<Self>,
2584 envelope: TypedEnvelope<proto::GitShow>,
2585 mut cx: AsyncApp,
2586 ) -> Result<proto::GitCommitDetails> {
2587 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2588 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2589
2590 let commit = repository_handle
2591 .update(&mut cx, |repository_handle, _| {
2592 repository_handle.show(envelope.payload.commit)
2593 })
2594 .await??;
2595 Ok(proto::GitCommitDetails {
2596 sha: commit.sha.into(),
2597 message: commit.message.into(),
2598 commit_timestamp: commit.commit_timestamp,
2599 author_email: commit.author_email.into(),
2600 author_name: commit.author_name.into(),
2601 })
2602 }
2603
2604 async fn handle_load_commit_diff(
2605 this: Entity<Self>,
2606 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2607 mut cx: AsyncApp,
2608 ) -> Result<proto::LoadCommitDiffResponse> {
2609 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2610 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2611
2612 let commit_diff = repository_handle
2613 .update(&mut cx, |repository_handle, _| {
2614 repository_handle.load_commit_diff(envelope.payload.commit)
2615 })
2616 .await??;
2617 Ok(proto::LoadCommitDiffResponse {
2618 files: commit_diff
2619 .files
2620 .into_iter()
2621 .map(|file| proto::CommitFile {
2622 path: file.path.to_proto(),
2623 old_text: file.old_text,
2624 new_text: file.new_text,
2625 is_binary: file.is_binary,
2626 })
2627 .collect(),
2628 })
2629 }
2630
2631 async fn handle_file_history(
2632 this: Entity<Self>,
2633 envelope: TypedEnvelope<proto::GitFileHistory>,
2634 mut cx: AsyncApp,
2635 ) -> Result<proto::GitFileHistoryResponse> {
2636 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2637 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2638 let path = RepoPath::from_proto(&envelope.payload.path)?;
2639 let skip = envelope.payload.skip as usize;
2640 let limit = envelope.payload.limit.map(|l| l as usize);
2641
2642 let file_history = repository_handle
2643 .update(&mut cx, |repository_handle, _| {
2644 repository_handle.file_history_paginated(path, skip, limit)
2645 })
2646 .await??;
2647
2648 Ok(proto::GitFileHistoryResponse {
2649 entries: file_history
2650 .entries
2651 .into_iter()
2652 .map(|entry| proto::FileHistoryEntry {
2653 sha: entry.sha.to_string(),
2654 subject: entry.subject.to_string(),
2655 message: entry.message.to_string(),
2656 commit_timestamp: entry.commit_timestamp,
2657 author_name: entry.author_name.to_string(),
2658 author_email: entry.author_email.to_string(),
2659 })
2660 .collect(),
2661 path: file_history.path.to_proto(),
2662 })
2663 }
2664
2665 async fn handle_reset(
2666 this: Entity<Self>,
2667 envelope: TypedEnvelope<proto::GitReset>,
2668 mut cx: AsyncApp,
2669 ) -> Result<proto::Ack> {
2670 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2671 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2672
2673 let mode = match envelope.payload.mode() {
2674 git_reset::ResetMode::Soft => ResetMode::Soft,
2675 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2676 };
2677
2678 repository_handle
2679 .update(&mut cx, |repository_handle, cx| {
2680 repository_handle.reset(envelope.payload.commit, mode, cx)
2681 })
2682 .await??;
2683 Ok(proto::Ack {})
2684 }
2685
2686 async fn handle_checkout_files(
2687 this: Entity<Self>,
2688 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2689 mut cx: AsyncApp,
2690 ) -> Result<proto::Ack> {
2691 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2692 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2693 let paths = envelope
2694 .payload
2695 .paths
2696 .iter()
2697 .map(|s| RepoPath::from_proto(s))
2698 .collect::<Result<Vec<_>>>()?;
2699
2700 repository_handle
2701 .update(&mut cx, |repository_handle, cx| {
2702 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2703 })
2704 .await?;
2705 Ok(proto::Ack {})
2706 }
2707
2708 async fn handle_open_commit_message_buffer(
2709 this: Entity<Self>,
2710 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2711 mut cx: AsyncApp,
2712 ) -> Result<proto::OpenBufferResponse> {
2713 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2714 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2715 let buffer = repository
2716 .update(&mut cx, |repository, cx| {
2717 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2718 })
2719 .await?;
2720
2721 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2722 this.update(&mut cx, |this, cx| {
2723 this.buffer_store.update(cx, |buffer_store, cx| {
2724 buffer_store
2725 .create_buffer_for_peer(
2726 &buffer,
2727 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2728 cx,
2729 )
2730 .detach_and_log_err(cx);
2731 })
2732 });
2733
2734 Ok(proto::OpenBufferResponse {
2735 buffer_id: buffer_id.to_proto(),
2736 })
2737 }
2738
2739 async fn handle_askpass(
2740 this: Entity<Self>,
2741 envelope: TypedEnvelope<proto::AskPassRequest>,
2742 mut cx: AsyncApp,
2743 ) -> Result<proto::AskPassResponse> {
2744 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2745 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2746
2747 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2748 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2749 debug_panic!("no askpass found");
2750 anyhow::bail!("no askpass found");
2751 };
2752
2753 let response = askpass
2754 .ask_password(envelope.payload.prompt)
2755 .await
2756 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2757
2758 delegates
2759 .lock()
2760 .insert(envelope.payload.askpass_id, askpass);
2761
2762 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2763 Ok(proto::AskPassResponse {
2764 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2765 })
2766 }
2767
2768 async fn handle_check_for_pushed_commits(
2769 this: Entity<Self>,
2770 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2771 mut cx: AsyncApp,
2772 ) -> Result<proto::CheckForPushedCommitsResponse> {
2773 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2774 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2775
2776 let branches = repository_handle
2777 .update(&mut cx, |repository_handle, _| {
2778 repository_handle.check_for_pushed_commits()
2779 })
2780 .await??;
2781 Ok(proto::CheckForPushedCommitsResponse {
2782 pushed_to: branches
2783 .into_iter()
2784 .map(|commit| commit.to_string())
2785 .collect(),
2786 })
2787 }
2788
2789 async fn handle_git_diff(
2790 this: Entity<Self>,
2791 envelope: TypedEnvelope<proto::GitDiff>,
2792 mut cx: AsyncApp,
2793 ) -> Result<proto::GitDiffResponse> {
2794 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2795 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2796 let diff_type = match envelope.payload.diff_type() {
2797 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2798 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2799 proto::git_diff::DiffType::MergeBase => {
2800 let base_ref = envelope
2801 .payload
2802 .merge_base_ref
2803 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2804 DiffType::MergeBase {
2805 base_ref: base_ref.into(),
2806 }
2807 }
2808 };
2809
2810 let mut diff = repository_handle
2811 .update(&mut cx, |repository_handle, cx| {
2812 repository_handle.diff(diff_type, cx)
2813 })
2814 .await??;
2815 const ONE_MB: usize = 1_000_000;
2816 if diff.len() > ONE_MB {
2817 diff = diff.chars().take(ONE_MB).collect()
2818 }
2819
2820 Ok(proto::GitDiffResponse { diff })
2821 }
2822
2823 async fn handle_tree_diff(
2824 this: Entity<Self>,
2825 request: TypedEnvelope<proto::GetTreeDiff>,
2826 mut cx: AsyncApp,
2827 ) -> Result<proto::GetTreeDiffResponse> {
2828 let repository_id = RepositoryId(request.payload.repository_id);
2829 let diff_type = if request.payload.is_merge {
2830 DiffTreeType::MergeBase {
2831 base: request.payload.base.into(),
2832 head: request.payload.head.into(),
2833 }
2834 } else {
2835 DiffTreeType::Since {
2836 base: request.payload.base.into(),
2837 head: request.payload.head.into(),
2838 }
2839 };
2840
2841 let diff = this
2842 .update(&mut cx, |this, cx| {
2843 let repository = this.repositories().get(&repository_id)?;
2844 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2845 })
2846 .context("missing repository")?
2847 .await??;
2848
2849 Ok(proto::GetTreeDiffResponse {
2850 entries: diff
2851 .entries
2852 .into_iter()
2853 .map(|(path, status)| proto::TreeDiffStatus {
2854 path: path.as_ref().to_proto(),
2855 status: match status {
2856 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2857 TreeDiffStatus::Modified { .. } => {
2858 proto::tree_diff_status::Status::Modified.into()
2859 }
2860 TreeDiffStatus::Deleted { .. } => {
2861 proto::tree_diff_status::Status::Deleted.into()
2862 }
2863 },
2864 oid: match status {
2865 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2866 Some(old.to_string())
2867 }
2868 TreeDiffStatus::Added => None,
2869 },
2870 })
2871 .collect(),
2872 })
2873 }
2874
2875 async fn handle_get_blob_content(
2876 this: Entity<Self>,
2877 request: TypedEnvelope<proto::GetBlobContent>,
2878 mut cx: AsyncApp,
2879 ) -> Result<proto::GetBlobContentResponse> {
2880 let oid = git::Oid::from_str(&request.payload.oid)?;
2881 let repository_id = RepositoryId(request.payload.repository_id);
2882 let content = this
2883 .update(&mut cx, |this, cx| {
2884 let repository = this.repositories().get(&repository_id)?;
2885 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2886 })
2887 .context("missing repository")?
2888 .await?;
2889 Ok(proto::GetBlobContentResponse { content })
2890 }
2891
2892 async fn handle_open_unstaged_diff(
2893 this: Entity<Self>,
2894 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2895 mut cx: AsyncApp,
2896 ) -> Result<proto::OpenUnstagedDiffResponse> {
2897 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2898 let diff = this
2899 .update(&mut cx, |this, cx| {
2900 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2901 Some(this.open_unstaged_diff(buffer, cx))
2902 })
2903 .context("missing buffer")?
2904 .await?;
2905 this.update(&mut cx, |this, _| {
2906 let shared_diffs = this
2907 .shared_diffs
2908 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2909 .or_default();
2910 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2911 });
2912 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2913 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2914 }
2915
2916 async fn handle_open_uncommitted_diff(
2917 this: Entity<Self>,
2918 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2919 mut cx: AsyncApp,
2920 ) -> Result<proto::OpenUncommittedDiffResponse> {
2921 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2922 let diff = this
2923 .update(&mut cx, |this, cx| {
2924 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2925 Some(this.open_uncommitted_diff(buffer, cx))
2926 })
2927 .context("missing buffer")?
2928 .await?;
2929 this.update(&mut cx, |this, _| {
2930 let shared_diffs = this
2931 .shared_diffs
2932 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2933 .or_default();
2934 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2935 });
2936 Ok(diff.read_with(&cx, |diff, cx| {
2937 use proto::open_uncommitted_diff_response::Mode;
2938
2939 let unstaged_diff = diff.secondary_diff();
2940 let index_snapshot = unstaged_diff.and_then(|diff| {
2941 let diff = diff.read(cx);
2942 diff.base_text_exists().then(|| diff.base_text(cx))
2943 });
2944
2945 let mode;
2946 let staged_text;
2947 let committed_text;
2948 if diff.base_text_exists() {
2949 let committed_snapshot = diff.base_text(cx);
2950 committed_text = Some(committed_snapshot.text());
2951 if let Some(index_text) = index_snapshot {
2952 if index_text.remote_id() == committed_snapshot.remote_id() {
2953 mode = Mode::IndexMatchesHead;
2954 staged_text = None;
2955 } else {
2956 mode = Mode::IndexAndHead;
2957 staged_text = Some(index_text.text());
2958 }
2959 } else {
2960 mode = Mode::IndexAndHead;
2961 staged_text = None;
2962 }
2963 } else {
2964 mode = Mode::IndexAndHead;
2965 committed_text = None;
2966 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2967 }
2968
2969 proto::OpenUncommittedDiffResponse {
2970 committed_text,
2971 staged_text,
2972 mode: mode.into(),
2973 }
2974 }))
2975 }
2976
2977 async fn handle_update_diff_bases(
2978 this: Entity<Self>,
2979 request: TypedEnvelope<proto::UpdateDiffBases>,
2980 mut cx: AsyncApp,
2981 ) -> Result<()> {
2982 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2983 this.update(&mut cx, |this, cx| {
2984 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2985 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2986 {
2987 let buffer = buffer.read(cx).text_snapshot();
2988 diff_state.update(cx, |diff_state, cx| {
2989 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2990 })
2991 }
2992 });
2993 Ok(())
2994 }
2995
2996 async fn handle_blame_buffer(
2997 this: Entity<Self>,
2998 envelope: TypedEnvelope<proto::BlameBuffer>,
2999 mut cx: AsyncApp,
3000 ) -> Result<proto::BlameBufferResponse> {
3001 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3002 let version = deserialize_version(&envelope.payload.version);
3003 let buffer = this.read_with(&cx, |this, cx| {
3004 this.buffer_store.read(cx).get_existing(buffer_id)
3005 })?;
3006 buffer
3007 .update(&mut cx, |buffer, _| {
3008 buffer.wait_for_version(version.clone())
3009 })
3010 .await?;
3011 let blame = this
3012 .update(&mut cx, |this, cx| {
3013 this.blame_buffer(&buffer, Some(version), cx)
3014 })
3015 .await?;
3016 Ok(serialize_blame_buffer_response(blame))
3017 }
3018
3019 async fn handle_get_permalink_to_line(
3020 this: Entity<Self>,
3021 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3022 mut cx: AsyncApp,
3023 ) -> Result<proto::GetPermalinkToLineResponse> {
3024 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3025 // let version = deserialize_version(&envelope.payload.version);
3026 let selection = {
3027 let proto_selection = envelope
3028 .payload
3029 .selection
3030 .context("no selection to get permalink for defined")?;
3031 proto_selection.start as u32..proto_selection.end as u32
3032 };
3033 let buffer = this.read_with(&cx, |this, cx| {
3034 this.buffer_store.read(cx).get_existing(buffer_id)
3035 })?;
3036 let permalink = this
3037 .update(&mut cx, |this, cx| {
3038 this.get_permalink_to_line(&buffer, selection, cx)
3039 })
3040 .await?;
3041 Ok(proto::GetPermalinkToLineResponse {
3042 permalink: permalink.to_string(),
3043 })
3044 }
3045
3046 fn repository_for_request(
3047 this: &Entity<Self>,
3048 id: RepositoryId,
3049 cx: &mut AsyncApp,
3050 ) -> Result<Entity<Repository>> {
3051 this.read_with(cx, |this, _| {
3052 this.repositories
3053 .get(&id)
3054 .context("missing repository handle")
3055 .cloned()
3056 })
3057 }
3058
3059 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3060 self.repositories
3061 .iter()
3062 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3063 .collect()
3064 }
3065
3066 fn process_updated_entries(
3067 &self,
3068 worktree: &Entity<Worktree>,
3069 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3070 cx: &mut App,
3071 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3072 let path_style = worktree.read(cx).path_style();
3073 let mut repo_paths = self
3074 .repositories
3075 .values()
3076 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3077 .collect::<Vec<_>>();
3078 let mut entries: Vec<_> = updated_entries
3079 .iter()
3080 .map(|(path, _, _)| path.clone())
3081 .collect();
3082 entries.sort();
3083 let worktree = worktree.read(cx);
3084
3085 let entries = entries
3086 .into_iter()
3087 .map(|path| worktree.absolutize(&path))
3088 .collect::<Arc<[_]>>();
3089
3090 let executor = cx.background_executor().clone();
3091 cx.background_executor().spawn(async move {
3092 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3093 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3094 let mut tasks = FuturesOrdered::new();
3095 for (repo_path, repo) in repo_paths.into_iter().rev() {
3096 let entries = entries.clone();
3097 let task = executor.spawn(async move {
3098 // Find all repository paths that belong to this repo
3099 let mut ix = entries.partition_point(|path| path < &*repo_path);
3100 if ix == entries.len() {
3101 return None;
3102 };
3103
3104 let mut paths = Vec::new();
3105 // All paths prefixed by a given repo will constitute a continuous range.
3106 while let Some(path) = entries.get(ix)
3107 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3108 &repo_path, path, path_style,
3109 )
3110 {
3111 paths.push((repo_path, ix));
3112 ix += 1;
3113 }
3114 if paths.is_empty() {
3115 None
3116 } else {
3117 Some((repo, paths))
3118 }
3119 });
3120 tasks.push_back(task);
3121 }
3122
3123 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3124 let mut path_was_used = vec![false; entries.len()];
3125 let tasks = tasks.collect::<Vec<_>>().await;
3126 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3127 // We always want to assign a path to it's innermost repository.
3128 for t in tasks {
3129 let Some((repo, paths)) = t else {
3130 continue;
3131 };
3132 let entry = paths_by_git_repo.entry(repo).or_default();
3133 for (repo_path, ix) in paths {
3134 if path_was_used[ix] {
3135 continue;
3136 }
3137 path_was_used[ix] = true;
3138 entry.push(repo_path);
3139 }
3140 }
3141
3142 paths_by_git_repo
3143 })
3144 }
3145}
3146
3147impl BufferGitState {
3148 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3149 Self {
3150 unstaged_diff: Default::default(),
3151 uncommitted_diff: Default::default(),
3152 oid_diffs: Default::default(),
3153 recalculate_diff_task: Default::default(),
3154 language: Default::default(),
3155 language_registry: Default::default(),
3156 recalculating_tx: postage::watch::channel_with(false).0,
3157 hunk_staging_operation_count: 0,
3158 hunk_staging_operation_count_as_of_write: 0,
3159 head_text: Default::default(),
3160 index_text: Default::default(),
3161 oid_texts: Default::default(),
3162 head_changed: Default::default(),
3163 index_changed: Default::default(),
3164 language_changed: Default::default(),
3165 conflict_updated_futures: Default::default(),
3166 conflict_set: Default::default(),
3167 reparse_conflict_markers_task: Default::default(),
3168 }
3169 }
3170
3171 #[ztracing::instrument(skip_all)]
3172 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3173 self.language = buffer.read(cx).language().cloned();
3174 self.language_changed = true;
3175 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3176 }
3177
3178 fn reparse_conflict_markers(
3179 &mut self,
3180 buffer: text::BufferSnapshot,
3181 cx: &mut Context<Self>,
3182 ) -> oneshot::Receiver<()> {
3183 let (tx, rx) = oneshot::channel();
3184
3185 let Some(conflict_set) = self
3186 .conflict_set
3187 .as_ref()
3188 .and_then(|conflict_set| conflict_set.upgrade())
3189 else {
3190 return rx;
3191 };
3192
3193 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3194 if conflict_set.has_conflict {
3195 Some(conflict_set.snapshot())
3196 } else {
3197 None
3198 }
3199 });
3200
3201 if let Some(old_snapshot) = old_snapshot {
3202 self.conflict_updated_futures.push(tx);
3203 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3204 let (snapshot, changed_range) = cx
3205 .background_spawn(async move {
3206 let new_snapshot = ConflictSet::parse(&buffer);
3207 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3208 (new_snapshot, changed_range)
3209 })
3210 .await;
3211 this.update(cx, |this, cx| {
3212 if let Some(conflict_set) = &this.conflict_set {
3213 conflict_set
3214 .update(cx, |conflict_set, cx| {
3215 conflict_set.set_snapshot(snapshot, changed_range, cx);
3216 })
3217 .ok();
3218 }
3219 let futures = std::mem::take(&mut this.conflict_updated_futures);
3220 for tx in futures {
3221 tx.send(()).ok();
3222 }
3223 })
3224 }))
3225 }
3226
3227 rx
3228 }
3229
3230 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3231 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3232 }
3233
3234 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3235 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3236 }
3237
3238 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3239 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3240 }
3241
3242 fn handle_base_texts_updated(
3243 &mut self,
3244 buffer: text::BufferSnapshot,
3245 message: proto::UpdateDiffBases,
3246 cx: &mut Context<Self>,
3247 ) {
3248 use proto::update_diff_bases::Mode;
3249
3250 let Some(mode) = Mode::from_i32(message.mode) else {
3251 return;
3252 };
3253
3254 let diff_bases_change = match mode {
3255 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3256 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3257 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3258 Mode::IndexAndHead => DiffBasesChange::SetEach {
3259 index: message.staged_text,
3260 head: message.committed_text,
3261 },
3262 };
3263
3264 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3265 }
3266
3267 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3268 if *self.recalculating_tx.borrow() {
3269 let mut rx = self.recalculating_tx.subscribe();
3270 Some(async move {
3271 loop {
3272 let is_recalculating = rx.recv().await;
3273 if is_recalculating != Some(true) {
3274 break;
3275 }
3276 }
3277 })
3278 } else {
3279 None
3280 }
3281 }
3282
3283 fn diff_bases_changed(
3284 &mut self,
3285 buffer: text::BufferSnapshot,
3286 diff_bases_change: Option<DiffBasesChange>,
3287 cx: &mut Context<Self>,
3288 ) {
3289 match diff_bases_change {
3290 Some(DiffBasesChange::SetIndex(index)) => {
3291 self.index_text = index.map(|mut index| {
3292 text::LineEnding::normalize(&mut index);
3293 Arc::from(index.as_str())
3294 });
3295 self.index_changed = true;
3296 }
3297 Some(DiffBasesChange::SetHead(head)) => {
3298 self.head_text = head.map(|mut head| {
3299 text::LineEnding::normalize(&mut head);
3300 Arc::from(head.as_str())
3301 });
3302 self.head_changed = true;
3303 }
3304 Some(DiffBasesChange::SetBoth(text)) => {
3305 let text = text.map(|mut text| {
3306 text::LineEnding::normalize(&mut text);
3307 Arc::from(text.as_str())
3308 });
3309 self.head_text = text.clone();
3310 self.index_text = text;
3311 self.head_changed = true;
3312 self.index_changed = true;
3313 }
3314 Some(DiffBasesChange::SetEach { index, head }) => {
3315 self.index_text = index.map(|mut index| {
3316 text::LineEnding::normalize(&mut index);
3317 Arc::from(index.as_str())
3318 });
3319 self.index_changed = true;
3320 self.head_text = head.map(|mut head| {
3321 text::LineEnding::normalize(&mut head);
3322 Arc::from(head.as_str())
3323 });
3324 self.head_changed = true;
3325 }
3326 None => {}
3327 }
3328
3329 self.recalculate_diffs(buffer, cx)
3330 }
3331
3332 #[ztracing::instrument(skip_all)]
3333 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3334 *self.recalculating_tx.borrow_mut() = true;
3335
3336 let language = self.language.clone();
3337 let language_registry = self.language_registry.clone();
3338 let unstaged_diff = self.unstaged_diff();
3339 let uncommitted_diff = self.uncommitted_diff();
3340 let head = self.head_text.clone();
3341 let index = self.index_text.clone();
3342 let index_changed = self.index_changed;
3343 let head_changed = self.head_changed;
3344 let language_changed = self.language_changed;
3345 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3346 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3347 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3348 (None, None) => true,
3349 _ => false,
3350 };
3351
3352 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3353 .oid_diffs
3354 .iter()
3355 .filter_map(|(oid, weak)| {
3356 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3357 weak.upgrade().map(|diff| (*oid, diff, base_text))
3358 })
3359 .collect();
3360
3361 self.oid_diffs.retain(|oid, weak| {
3362 let alive = weak.upgrade().is_some();
3363 if !alive {
3364 if let Some(oid) = oid {
3365 self.oid_texts.remove(oid);
3366 }
3367 }
3368 alive
3369 });
3370 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3371 log::debug!(
3372 "start recalculating diffs for buffer {}",
3373 buffer.remote_id()
3374 );
3375
3376 let mut new_unstaged_diff = None;
3377 if let Some(unstaged_diff) = &unstaged_diff {
3378 new_unstaged_diff = Some(
3379 cx.update(|cx| {
3380 unstaged_diff.read(cx).update_diff(
3381 buffer.clone(),
3382 index,
3383 index_changed.then_some(false),
3384 language.clone(),
3385 cx,
3386 )
3387 })
3388 .await,
3389 );
3390 }
3391
3392 // Dropping BufferDiff can be expensive, so yield back to the event loop
3393 // for a bit
3394 yield_now().await;
3395
3396 let mut new_uncommitted_diff = None;
3397 if let Some(uncommitted_diff) = &uncommitted_diff {
3398 new_uncommitted_diff = if index_matches_head {
3399 new_unstaged_diff.clone()
3400 } else {
3401 Some(
3402 cx.update(|cx| {
3403 uncommitted_diff.read(cx).update_diff(
3404 buffer.clone(),
3405 head,
3406 head_changed.then_some(true),
3407 language.clone(),
3408 cx,
3409 )
3410 })
3411 .await,
3412 )
3413 }
3414 }
3415
3416 // Dropping BufferDiff can be expensive, so yield back to the event loop
3417 // for a bit
3418 yield_now().await;
3419
3420 let cancel = this.update(cx, |this, _| {
3421 // This checks whether all pending stage/unstage operations
3422 // have quiesced (i.e. both the corresponding write and the
3423 // read of that write have completed). If not, then we cancel
3424 // this recalculation attempt to avoid invalidating pending
3425 // state too quickly; another recalculation will come along
3426 // later and clear the pending state once the state of the index has settled.
3427 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3428 *this.recalculating_tx.borrow_mut() = false;
3429 true
3430 } else {
3431 false
3432 }
3433 })?;
3434 if cancel {
3435 log::debug!(
3436 concat!(
3437 "aborting recalculating diffs for buffer {}",
3438 "due to subsequent hunk operations",
3439 ),
3440 buffer.remote_id()
3441 );
3442 return Ok(());
3443 }
3444
3445 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3446 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3447 {
3448 let task = unstaged_diff.update(cx, |diff, cx| {
3449 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3450 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3451 // view multibuffers.
3452 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3453 });
3454 Some(task.await)
3455 } else {
3456 None
3457 };
3458
3459 yield_now().await;
3460
3461 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3462 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3463 {
3464 uncommitted_diff
3465 .update(cx, |diff, cx| {
3466 if language_changed {
3467 diff.language_changed(language.clone(), language_registry, cx);
3468 }
3469 diff.set_snapshot_with_secondary(
3470 new_uncommitted_diff,
3471 &buffer,
3472 unstaged_changed_range.flatten(),
3473 true,
3474 cx,
3475 )
3476 })
3477 .await;
3478 }
3479
3480 yield_now().await;
3481
3482 for (oid, oid_diff, base_text) in oid_diffs {
3483 let new_oid_diff = cx
3484 .update(|cx| {
3485 oid_diff.read(cx).update_diff(
3486 buffer.clone(),
3487 base_text,
3488 None,
3489 language.clone(),
3490 cx,
3491 )
3492 })
3493 .await;
3494
3495 oid_diff
3496 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3497 .await;
3498
3499 log::debug!(
3500 "finished recalculating oid diff for buffer {} oid {:?}",
3501 buffer.remote_id(),
3502 oid
3503 );
3504
3505 yield_now().await;
3506 }
3507
3508 log::debug!(
3509 "finished recalculating diffs for buffer {}",
3510 buffer.remote_id()
3511 );
3512
3513 if let Some(this) = this.upgrade() {
3514 this.update(cx, |this, _| {
3515 this.index_changed = false;
3516 this.head_changed = false;
3517 this.language_changed = false;
3518 *this.recalculating_tx.borrow_mut() = false;
3519 });
3520 }
3521
3522 Ok(())
3523 }));
3524 }
3525}
3526
3527fn make_remote_delegate(
3528 this: Entity<GitStore>,
3529 project_id: u64,
3530 repository_id: RepositoryId,
3531 askpass_id: u64,
3532 cx: &mut AsyncApp,
3533) -> AskPassDelegate {
3534 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3535 this.update(cx, |this, cx| {
3536 let Some((client, _)) = this.downstream_client() else {
3537 return;
3538 };
3539 let response = client.request(proto::AskPassRequest {
3540 project_id,
3541 repository_id: repository_id.to_proto(),
3542 askpass_id,
3543 prompt,
3544 });
3545 cx.spawn(async move |_, _| {
3546 let mut response = response.await?.response;
3547 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3548 .ok();
3549 response.zeroize();
3550 anyhow::Ok(())
3551 })
3552 .detach_and_log_err(cx);
3553 });
3554 })
3555}
3556
3557impl RepositoryId {
3558 pub fn to_proto(self) -> u64 {
3559 self.0
3560 }
3561
3562 pub fn from_proto(id: u64) -> Self {
3563 RepositoryId(id)
3564 }
3565}
3566
3567impl RepositorySnapshot {
3568 fn empty(
3569 id: RepositoryId,
3570 work_directory_abs_path: Arc<Path>,
3571 original_repo_abs_path: Option<Arc<Path>>,
3572 path_style: PathStyle,
3573 ) -> Self {
3574 Self {
3575 id,
3576 statuses_by_path: Default::default(),
3577 original_repo_abs_path: original_repo_abs_path
3578 .unwrap_or_else(|| work_directory_abs_path.clone()),
3579 work_directory_abs_path,
3580 branch: None,
3581 branch_list: Arc::from([]),
3582 head_commit: None,
3583 scan_id: 0,
3584 merge: Default::default(),
3585 remote_origin_url: None,
3586 remote_upstream_url: None,
3587 stash_entries: Default::default(),
3588 linked_worktrees: Arc::from([]),
3589 path_style,
3590 }
3591 }
3592
3593 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3594 proto::UpdateRepository {
3595 branch_summary: self.branch.as_ref().map(branch_to_proto),
3596 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3597 updated_statuses: self
3598 .statuses_by_path
3599 .iter()
3600 .map(|entry| entry.to_proto())
3601 .collect(),
3602 removed_statuses: Default::default(),
3603 current_merge_conflicts: self
3604 .merge
3605 .merge_heads_by_conflicted_path
3606 .iter()
3607 .map(|(repo_path, _)| repo_path.to_proto())
3608 .collect(),
3609 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3610 project_id,
3611 id: self.id.to_proto(),
3612 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3613 entry_ids: vec![self.id.to_proto()],
3614 scan_id: self.scan_id,
3615 is_last_update: true,
3616 stash_entries: self
3617 .stash_entries
3618 .entries
3619 .iter()
3620 .map(stash_to_proto)
3621 .collect(),
3622 remote_upstream_url: self.remote_upstream_url.clone(),
3623 remote_origin_url: self.remote_origin_url.clone(),
3624 original_repo_abs_path: Some(
3625 self.original_repo_abs_path.to_string_lossy().into_owned(),
3626 ),
3627 linked_worktrees: self
3628 .linked_worktrees
3629 .iter()
3630 .map(worktree_to_proto)
3631 .collect(),
3632 }
3633 }
3634
3635 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3636 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3637 let mut removed_statuses: Vec<String> = Vec::new();
3638
3639 let mut new_statuses = self.statuses_by_path.iter().peekable();
3640 let mut old_statuses = old.statuses_by_path.iter().peekable();
3641
3642 let mut current_new_entry = new_statuses.next();
3643 let mut current_old_entry = old_statuses.next();
3644 loop {
3645 match (current_new_entry, current_old_entry) {
3646 (Some(new_entry), Some(old_entry)) => {
3647 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3648 Ordering::Less => {
3649 updated_statuses.push(new_entry.to_proto());
3650 current_new_entry = new_statuses.next();
3651 }
3652 Ordering::Equal => {
3653 if new_entry.status != old_entry.status
3654 || new_entry.diff_stat != old_entry.diff_stat
3655 {
3656 updated_statuses.push(new_entry.to_proto());
3657 }
3658 current_old_entry = old_statuses.next();
3659 current_new_entry = new_statuses.next();
3660 }
3661 Ordering::Greater => {
3662 removed_statuses.push(old_entry.repo_path.to_proto());
3663 current_old_entry = old_statuses.next();
3664 }
3665 }
3666 }
3667 (None, Some(old_entry)) => {
3668 removed_statuses.push(old_entry.repo_path.to_proto());
3669 current_old_entry = old_statuses.next();
3670 }
3671 (Some(new_entry), None) => {
3672 updated_statuses.push(new_entry.to_proto());
3673 current_new_entry = new_statuses.next();
3674 }
3675 (None, None) => break,
3676 }
3677 }
3678
3679 proto::UpdateRepository {
3680 branch_summary: self.branch.as_ref().map(branch_to_proto),
3681 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3682 updated_statuses,
3683 removed_statuses,
3684 current_merge_conflicts: self
3685 .merge
3686 .merge_heads_by_conflicted_path
3687 .iter()
3688 .map(|(path, _)| path.to_proto())
3689 .collect(),
3690 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3691 project_id,
3692 id: self.id.to_proto(),
3693 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3694 entry_ids: vec![],
3695 scan_id: self.scan_id,
3696 is_last_update: true,
3697 stash_entries: self
3698 .stash_entries
3699 .entries
3700 .iter()
3701 .map(stash_to_proto)
3702 .collect(),
3703 remote_upstream_url: self.remote_upstream_url.clone(),
3704 remote_origin_url: self.remote_origin_url.clone(),
3705 original_repo_abs_path: Some(
3706 self.original_repo_abs_path.to_string_lossy().into_owned(),
3707 ),
3708 linked_worktrees: self
3709 .linked_worktrees
3710 .iter()
3711 .map(worktree_to_proto)
3712 .collect(),
3713 }
3714 }
3715
3716 /// The main worktree is the original checkout that other worktrees were
3717 /// created from.
3718 ///
3719 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3720 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3721 ///
3722 /// Submodules also return `true` here, since they are not linked worktrees.
3723 pub fn is_main_worktree(&self) -> bool {
3724 self.work_directory_abs_path == self.original_repo_abs_path
3725 }
3726
3727 /// Returns true if this repository is a linked worktree, that is, one that
3728 /// was created from another worktree.
3729 ///
3730 /// Returns `false` for both the main worktree and submodules.
3731 pub fn is_linked_worktree(&self) -> bool {
3732 !self.is_main_worktree()
3733 }
3734
3735 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3736 &self.linked_worktrees
3737 }
3738
3739 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3740 self.statuses_by_path.iter().cloned()
3741 }
3742
3743 pub fn status_summary(&self) -> GitSummary {
3744 self.statuses_by_path.summary().item_summary
3745 }
3746
3747 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3748 self.statuses_by_path
3749 .get(&PathKey(path.as_ref().clone()), ())
3750 .cloned()
3751 }
3752
3753 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3754 self.statuses_by_path
3755 .get(&PathKey(path.as_ref().clone()), ())
3756 .and_then(|entry| entry.diff_stat)
3757 }
3758
3759 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3760 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3761 }
3762
3763 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3764 self.path_style
3765 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3766 .unwrap()
3767 .into()
3768 }
3769
3770 #[inline]
3771 fn abs_path_to_repo_path_inner(
3772 work_directory_abs_path: &Path,
3773 abs_path: &Path,
3774 path_style: PathStyle,
3775 ) -> Option<RepoPath> {
3776 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3777 Some(RepoPath::from_rel_path(&rel_path))
3778 }
3779
3780 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3781 self.merge
3782 .merge_heads_by_conflicted_path
3783 .contains_key(repo_path)
3784 }
3785
3786 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3787 let had_conflict_on_last_merge_head_change = self
3788 .merge
3789 .merge_heads_by_conflicted_path
3790 .contains_key(repo_path);
3791 let has_conflict_currently = self
3792 .status_for_path(repo_path)
3793 .is_some_and(|entry| entry.status.is_conflicted());
3794 had_conflict_on_last_merge_head_change || has_conflict_currently
3795 }
3796
3797 /// This is the name that will be displayed in the repository selector for this repository.
3798 pub fn display_name(&self) -> SharedString {
3799 self.work_directory_abs_path
3800 .file_name()
3801 .unwrap_or_default()
3802 .to_string_lossy()
3803 .to_string()
3804 .into()
3805 }
3806}
3807
3808pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3809 proto::StashEntry {
3810 oid: entry.oid.as_bytes().to_vec(),
3811 message: entry.message.clone(),
3812 branch: entry.branch.clone(),
3813 index: entry.index as u64,
3814 timestamp: entry.timestamp,
3815 }
3816}
3817
3818pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3819 Ok(StashEntry {
3820 oid: Oid::from_bytes(&entry.oid)?,
3821 message: entry.message.clone(),
3822 index: entry.index as usize,
3823 branch: entry.branch.clone(),
3824 timestamp: entry.timestamp,
3825 })
3826}
3827
3828impl MergeDetails {
3829 async fn update(
3830 &mut self,
3831 backend: &Arc<dyn GitRepository>,
3832 current_conflicted_paths: Vec<RepoPath>,
3833 ) -> Result<bool> {
3834 log::debug!("load merge details");
3835 self.message = backend.merge_message().await.map(SharedString::from);
3836 let heads = backend
3837 .revparse_batch(vec![
3838 "MERGE_HEAD".into(),
3839 "CHERRY_PICK_HEAD".into(),
3840 "REBASE_HEAD".into(),
3841 "REVERT_HEAD".into(),
3842 "APPLY_HEAD".into(),
3843 ])
3844 .await
3845 .log_err()
3846 .unwrap_or_default()
3847 .into_iter()
3848 .map(|opt| opt.map(SharedString::from))
3849 .collect::<Vec<_>>();
3850
3851 let mut conflicts_changed = false;
3852
3853 // Record the merge state for newly conflicted paths
3854 for path in ¤t_conflicted_paths {
3855 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3856 conflicts_changed = true;
3857 self.merge_heads_by_conflicted_path
3858 .insert(path.clone(), heads.clone());
3859 }
3860 }
3861
3862 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3863 self.merge_heads_by_conflicted_path
3864 .retain(|path, old_merge_heads| {
3865 let keep = current_conflicted_paths.contains(path)
3866 || (old_merge_heads == &heads
3867 && old_merge_heads.iter().any(|head| head.is_some()));
3868 if !keep {
3869 conflicts_changed = true;
3870 }
3871 keep
3872 });
3873
3874 Ok(conflicts_changed)
3875 }
3876}
3877
3878impl Repository {
3879 pub fn is_trusted(&self) -> bool {
3880 match self.repository_state.peek() {
3881 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3882 _ => false,
3883 }
3884 }
3885
3886 pub fn snapshot(&self) -> RepositorySnapshot {
3887 self.snapshot.clone()
3888 }
3889
3890 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3891 self.pending_ops.iter().cloned()
3892 }
3893
3894 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3895 self.pending_ops.summary().clone()
3896 }
3897
3898 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3899 self.pending_ops
3900 .get(&PathKey(path.as_ref().clone()), ())
3901 .cloned()
3902 }
3903
3904 fn local(
3905 id: RepositoryId,
3906 work_directory_abs_path: Arc<Path>,
3907 original_repo_abs_path: Arc<Path>,
3908 dot_git_abs_path: Arc<Path>,
3909 project_environment: WeakEntity<ProjectEnvironment>,
3910 fs: Arc<dyn Fs>,
3911 is_trusted: bool,
3912 git_store: WeakEntity<GitStore>,
3913 cx: &mut Context<Self>,
3914 ) -> Self {
3915 let snapshot = RepositorySnapshot::empty(
3916 id,
3917 work_directory_abs_path.clone(),
3918 Some(original_repo_abs_path),
3919 PathStyle::local(),
3920 );
3921 let state = cx
3922 .spawn(async move |_, cx| {
3923 LocalRepositoryState::new(
3924 work_directory_abs_path,
3925 dot_git_abs_path,
3926 project_environment,
3927 fs,
3928 is_trusted,
3929 cx,
3930 )
3931 .await
3932 .map_err(|err| err.to_string())
3933 })
3934 .shared();
3935 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3936 let state = cx
3937 .spawn(async move |_, _| {
3938 let state = state.await?;
3939 Ok(RepositoryState::Local(state))
3940 })
3941 .shared();
3942
3943 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3944 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
3945 if this.scan_id > 1 {
3946 this.initial_graph_data.clear();
3947 }
3948 }
3949 RepositoryEvent::StashEntriesChanged => {
3950 if this.scan_id > 1 {
3951 this.initial_graph_data
3952 .retain(|(log_source, _), _| *log_source != LogSource::All);
3953 }
3954 }
3955 _ => {}
3956 })
3957 .detach();
3958
3959 Repository {
3960 this: cx.weak_entity(),
3961 git_store,
3962 snapshot,
3963 pending_ops: Default::default(),
3964 repository_state: state,
3965 commit_message_buffer: None,
3966 askpass_delegates: Default::default(),
3967 paths_needing_status_update: Default::default(),
3968 latest_askpass_id: 0,
3969 job_sender,
3970 job_id: 0,
3971 active_jobs: Default::default(),
3972 initial_graph_data: Default::default(),
3973 commit_data: Default::default(),
3974 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3975 }
3976 }
3977
3978 fn remote(
3979 id: RepositoryId,
3980 work_directory_abs_path: Arc<Path>,
3981 original_repo_abs_path: Option<Arc<Path>>,
3982 path_style: PathStyle,
3983 project_id: ProjectId,
3984 client: AnyProtoClient,
3985 git_store: WeakEntity<GitStore>,
3986 cx: &mut Context<Self>,
3987 ) -> Self {
3988 let snapshot = RepositorySnapshot::empty(
3989 id,
3990 work_directory_abs_path,
3991 original_repo_abs_path,
3992 path_style,
3993 );
3994 let repository_state = RemoteRepositoryState { project_id, client };
3995 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3996 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3997 Self {
3998 this: cx.weak_entity(),
3999 snapshot,
4000 commit_message_buffer: None,
4001 git_store,
4002 pending_ops: Default::default(),
4003 paths_needing_status_update: Default::default(),
4004 job_sender,
4005 repository_state,
4006 askpass_delegates: Default::default(),
4007 latest_askpass_id: 0,
4008 active_jobs: Default::default(),
4009 job_id: 0,
4010 initial_graph_data: Default::default(),
4011 commit_data: Default::default(),
4012 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4013 }
4014 }
4015
4016 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4017 self.git_store.upgrade()
4018 }
4019
4020 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4021 let this = cx.weak_entity();
4022 let git_store = self.git_store.clone();
4023 let _ = self.send_keyed_job(
4024 Some(GitJobKey::ReloadBufferDiffBases),
4025 None,
4026 |state, mut cx| async move {
4027 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4028 log::error!("tried to recompute diffs for a non-local repository");
4029 return Ok(());
4030 };
4031
4032 let Some(this) = this.upgrade() else {
4033 return Ok(());
4034 };
4035
4036 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4037 git_store.update(cx, |git_store, cx| {
4038 git_store
4039 .diffs
4040 .iter()
4041 .filter_map(|(buffer_id, diff_state)| {
4042 let buffer_store = git_store.buffer_store.read(cx);
4043 let buffer = buffer_store.get(*buffer_id)?;
4044 let file = File::from_dyn(buffer.read(cx).file())?;
4045 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4046 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4047 log::debug!(
4048 "start reload diff bases for repo path {}",
4049 repo_path.as_unix_str()
4050 );
4051 diff_state.update(cx, |diff_state, _| {
4052 let has_unstaged_diff = diff_state
4053 .unstaged_diff
4054 .as_ref()
4055 .is_some_and(|diff| diff.is_upgradable());
4056 let has_uncommitted_diff = diff_state
4057 .uncommitted_diff
4058 .as_ref()
4059 .is_some_and(|set| set.is_upgradable());
4060
4061 Some((
4062 buffer,
4063 repo_path,
4064 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4065 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4066 ))
4067 })
4068 })
4069 .collect::<Vec<_>>()
4070 })
4071 })?;
4072
4073 let buffer_diff_base_changes = cx
4074 .background_spawn(async move {
4075 let mut changes = Vec::new();
4076 for (buffer, repo_path, current_index_text, current_head_text) in
4077 &repo_diff_state_updates
4078 {
4079 let index_text = if current_index_text.is_some() {
4080 backend.load_index_text(repo_path.clone()).await
4081 } else {
4082 None
4083 };
4084 let head_text = if current_head_text.is_some() {
4085 backend.load_committed_text(repo_path.clone()).await
4086 } else {
4087 None
4088 };
4089
4090 let change =
4091 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4092 (Some(current_index), Some(current_head)) => {
4093 let index_changed =
4094 index_text.as_deref() != current_index.as_deref();
4095 let head_changed =
4096 head_text.as_deref() != current_head.as_deref();
4097 if index_changed && head_changed {
4098 if index_text == head_text {
4099 Some(DiffBasesChange::SetBoth(head_text))
4100 } else {
4101 Some(DiffBasesChange::SetEach {
4102 index: index_text,
4103 head: head_text,
4104 })
4105 }
4106 } else if index_changed {
4107 Some(DiffBasesChange::SetIndex(index_text))
4108 } else if head_changed {
4109 Some(DiffBasesChange::SetHead(head_text))
4110 } else {
4111 None
4112 }
4113 }
4114 (Some(current_index), None) => {
4115 let index_changed =
4116 index_text.as_deref() != current_index.as_deref();
4117 index_changed
4118 .then_some(DiffBasesChange::SetIndex(index_text))
4119 }
4120 (None, Some(current_head)) => {
4121 let head_changed =
4122 head_text.as_deref() != current_head.as_deref();
4123 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4124 }
4125 (None, None) => None,
4126 };
4127
4128 changes.push((buffer.clone(), change))
4129 }
4130 changes
4131 })
4132 .await;
4133
4134 git_store.update(&mut cx, |git_store, cx| {
4135 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4136 let buffer_snapshot = buffer.read(cx).text_snapshot();
4137 let buffer_id = buffer_snapshot.remote_id();
4138 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4139 continue;
4140 };
4141
4142 let downstream_client = git_store.downstream_client();
4143 diff_state.update(cx, |diff_state, cx| {
4144 use proto::update_diff_bases::Mode;
4145
4146 if let Some((diff_bases_change, (client, project_id))) =
4147 diff_bases_change.clone().zip(downstream_client)
4148 {
4149 let (staged_text, committed_text, mode) = match diff_bases_change {
4150 DiffBasesChange::SetIndex(index) => {
4151 (index, None, Mode::IndexOnly)
4152 }
4153 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4154 DiffBasesChange::SetEach { index, head } => {
4155 (index, head, Mode::IndexAndHead)
4156 }
4157 DiffBasesChange::SetBoth(text) => {
4158 (None, text, Mode::IndexMatchesHead)
4159 }
4160 };
4161 client
4162 .send(proto::UpdateDiffBases {
4163 project_id: project_id.to_proto(),
4164 buffer_id: buffer_id.to_proto(),
4165 staged_text,
4166 committed_text,
4167 mode: mode as i32,
4168 })
4169 .log_err();
4170 }
4171
4172 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4173 });
4174 }
4175 })
4176 },
4177 );
4178 }
4179
4180 pub fn send_job<F, Fut, R>(
4181 &mut self,
4182 status: Option<SharedString>,
4183 job: F,
4184 ) -> oneshot::Receiver<R>
4185 where
4186 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4187 Fut: Future<Output = R> + 'static,
4188 R: Send + 'static,
4189 {
4190 self.send_keyed_job(None, status, job)
4191 }
4192
4193 fn send_keyed_job<F, Fut, R>(
4194 &mut self,
4195 key: Option<GitJobKey>,
4196 status: Option<SharedString>,
4197 job: F,
4198 ) -> oneshot::Receiver<R>
4199 where
4200 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4201 Fut: Future<Output = R> + 'static,
4202 R: Send + 'static,
4203 {
4204 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4205 let job_id = post_inc(&mut self.job_id);
4206 let this = self.this.clone();
4207 self.job_sender
4208 .unbounded_send(GitJob {
4209 key,
4210 job: Box::new(move |state, cx: &mut AsyncApp| {
4211 let job = job(state, cx.clone());
4212 cx.spawn(async move |cx| {
4213 if let Some(s) = status.clone() {
4214 this.update(cx, |this, cx| {
4215 this.active_jobs.insert(
4216 job_id,
4217 JobInfo {
4218 start: Instant::now(),
4219 message: s.clone(),
4220 },
4221 );
4222
4223 cx.notify();
4224 })
4225 .ok();
4226 }
4227 let result = job.await;
4228
4229 this.update(cx, |this, cx| {
4230 this.active_jobs.remove(&job_id);
4231 cx.notify();
4232 })
4233 .ok();
4234
4235 result_tx.send(result).ok();
4236 })
4237 }),
4238 })
4239 .ok();
4240 result_rx
4241 }
4242
4243 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4244 let Some(git_store) = self.git_store.upgrade() else {
4245 return;
4246 };
4247 let entity = cx.entity();
4248 git_store.update(cx, |git_store, cx| {
4249 let Some((&id, _)) = git_store
4250 .repositories
4251 .iter()
4252 .find(|(_, handle)| *handle == &entity)
4253 else {
4254 return;
4255 };
4256 git_store.active_repo_id = Some(id);
4257 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4258 });
4259 }
4260
4261 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4262 self.snapshot.status()
4263 }
4264
4265 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4266 self.snapshot.diff_stat_for_path(path)
4267 }
4268
4269 pub fn cached_stash(&self) -> GitStash {
4270 self.snapshot.stash_entries.clone()
4271 }
4272
4273 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4274 let git_store = self.git_store.upgrade()?;
4275 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4276 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4277 let abs_path = SanitizedPath::new(&abs_path);
4278 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4279 Some(ProjectPath {
4280 worktree_id: worktree.read(cx).id(),
4281 path: relative_path,
4282 })
4283 }
4284
4285 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4286 let git_store = self.git_store.upgrade()?;
4287 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4288 let abs_path = worktree_store.absolutize(path, cx)?;
4289 self.snapshot.abs_path_to_repo_path(&abs_path)
4290 }
4291
4292 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4293 other
4294 .read(cx)
4295 .snapshot
4296 .work_directory_abs_path
4297 .starts_with(&self.snapshot.work_directory_abs_path)
4298 }
4299
4300 pub fn open_commit_buffer(
4301 &mut self,
4302 languages: Option<Arc<LanguageRegistry>>,
4303 buffer_store: Entity<BufferStore>,
4304 cx: &mut Context<Self>,
4305 ) -> Task<Result<Entity<Buffer>>> {
4306 let id = self.id;
4307 if let Some(buffer) = self.commit_message_buffer.clone() {
4308 return Task::ready(Ok(buffer));
4309 }
4310 let this = cx.weak_entity();
4311
4312 let rx = self.send_job(None, move |state, mut cx| async move {
4313 let Some(this) = this.upgrade() else {
4314 bail!("git store was dropped");
4315 };
4316 match state {
4317 RepositoryState::Local(..) => {
4318 this.update(&mut cx, |_, cx| {
4319 Self::open_local_commit_buffer(languages, buffer_store, cx)
4320 })
4321 .await
4322 }
4323 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4324 let request = client.request(proto::OpenCommitMessageBuffer {
4325 project_id: project_id.0,
4326 repository_id: id.to_proto(),
4327 });
4328 let response = request.await.context("requesting to open commit buffer")?;
4329 let buffer_id = BufferId::new(response.buffer_id)?;
4330 let buffer = buffer_store
4331 .update(&mut cx, |buffer_store, cx| {
4332 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4333 })
4334 .await?;
4335 if let Some(language_registry) = languages {
4336 let git_commit_language =
4337 language_registry.language_for_name("Git Commit").await?;
4338 buffer.update(&mut cx, |buffer, cx| {
4339 buffer.set_language(Some(git_commit_language), cx);
4340 });
4341 }
4342 this.update(&mut cx, |this, _| {
4343 this.commit_message_buffer = Some(buffer.clone());
4344 });
4345 Ok(buffer)
4346 }
4347 }
4348 });
4349
4350 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4351 }
4352
4353 fn open_local_commit_buffer(
4354 language_registry: Option<Arc<LanguageRegistry>>,
4355 buffer_store: Entity<BufferStore>,
4356 cx: &mut Context<Self>,
4357 ) -> Task<Result<Entity<Buffer>>> {
4358 cx.spawn(async move |repository, cx| {
4359 let git_commit_language = match language_registry {
4360 Some(language_registry) => {
4361 Some(language_registry.language_for_name("Git Commit").await?)
4362 }
4363 None => None,
4364 };
4365 let buffer = buffer_store
4366 .update(cx, |buffer_store, cx| {
4367 buffer_store.create_buffer(git_commit_language, false, cx)
4368 })
4369 .await?;
4370
4371 repository.update(cx, |repository, _| {
4372 repository.commit_message_buffer = Some(buffer.clone());
4373 })?;
4374 Ok(buffer)
4375 })
4376 }
4377
4378 pub fn checkout_files(
4379 &mut self,
4380 commit: &str,
4381 paths: Vec<RepoPath>,
4382 cx: &mut Context<Self>,
4383 ) -> Task<Result<()>> {
4384 let commit = commit.to_string();
4385 let id = self.id;
4386
4387 self.spawn_job_with_tracking(
4388 paths.clone(),
4389 pending_op::GitStatus::Reverted,
4390 cx,
4391 async move |this, cx| {
4392 this.update(cx, |this, _cx| {
4393 this.send_job(
4394 Some(format!("git checkout {}", commit).into()),
4395 move |git_repo, _| async move {
4396 match git_repo {
4397 RepositoryState::Local(LocalRepositoryState {
4398 backend,
4399 environment,
4400 ..
4401 }) => {
4402 backend
4403 .checkout_files(commit, paths, environment.clone())
4404 .await
4405 }
4406 RepositoryState::Remote(RemoteRepositoryState {
4407 project_id,
4408 client,
4409 }) => {
4410 client
4411 .request(proto::GitCheckoutFiles {
4412 project_id: project_id.0,
4413 repository_id: id.to_proto(),
4414 commit,
4415 paths: paths
4416 .into_iter()
4417 .map(|p| p.to_proto())
4418 .collect(),
4419 })
4420 .await?;
4421
4422 Ok(())
4423 }
4424 }
4425 },
4426 )
4427 })?
4428 .await?
4429 },
4430 )
4431 }
4432
4433 pub fn reset(
4434 &mut self,
4435 commit: String,
4436 reset_mode: ResetMode,
4437 _cx: &mut App,
4438 ) -> oneshot::Receiver<Result<()>> {
4439 let id = self.id;
4440
4441 self.send_job(None, move |git_repo, _| async move {
4442 match git_repo {
4443 RepositoryState::Local(LocalRepositoryState {
4444 backend,
4445 environment,
4446 ..
4447 }) => backend.reset(commit, reset_mode, environment).await,
4448 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4449 client
4450 .request(proto::GitReset {
4451 project_id: project_id.0,
4452 repository_id: id.to_proto(),
4453 commit,
4454 mode: match reset_mode {
4455 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4456 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4457 },
4458 })
4459 .await?;
4460
4461 Ok(())
4462 }
4463 }
4464 })
4465 }
4466
4467 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4468 let id = self.id;
4469 self.send_job(None, move |git_repo, _cx| async move {
4470 match git_repo {
4471 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4472 backend.show(commit).await
4473 }
4474 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4475 let resp = client
4476 .request(proto::GitShow {
4477 project_id: project_id.0,
4478 repository_id: id.to_proto(),
4479 commit,
4480 })
4481 .await?;
4482
4483 Ok(CommitDetails {
4484 sha: resp.sha.into(),
4485 message: resp.message.into(),
4486 commit_timestamp: resp.commit_timestamp,
4487 author_email: resp.author_email.into(),
4488 author_name: resp.author_name.into(),
4489 })
4490 }
4491 }
4492 })
4493 }
4494
4495 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4496 let id = self.id;
4497 self.send_job(None, move |git_repo, cx| async move {
4498 match git_repo {
4499 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4500 backend.load_commit(commit, cx).await
4501 }
4502 RepositoryState::Remote(RemoteRepositoryState {
4503 client, project_id, ..
4504 }) => {
4505 let response = client
4506 .request(proto::LoadCommitDiff {
4507 project_id: project_id.0,
4508 repository_id: id.to_proto(),
4509 commit,
4510 })
4511 .await?;
4512 Ok(CommitDiff {
4513 files: response
4514 .files
4515 .into_iter()
4516 .map(|file| {
4517 Ok(CommitFile {
4518 path: RepoPath::from_proto(&file.path)?,
4519 old_text: file.old_text,
4520 new_text: file.new_text,
4521 is_binary: file.is_binary,
4522 })
4523 })
4524 .collect::<Result<Vec<_>>>()?,
4525 })
4526 }
4527 }
4528 })
4529 }
4530
4531 pub fn file_history(
4532 &mut self,
4533 path: RepoPath,
4534 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4535 self.file_history_paginated(path, 0, None)
4536 }
4537
4538 pub fn file_history_paginated(
4539 &mut self,
4540 path: RepoPath,
4541 skip: usize,
4542 limit: Option<usize>,
4543 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4544 let id = self.id;
4545 self.send_job(None, move |git_repo, _cx| async move {
4546 match git_repo {
4547 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4548 backend.file_history_paginated(path, skip, limit).await
4549 }
4550 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4551 let response = client
4552 .request(proto::GitFileHistory {
4553 project_id: project_id.0,
4554 repository_id: id.to_proto(),
4555 path: path.to_proto(),
4556 skip: skip as u64,
4557 limit: limit.map(|l| l as u64),
4558 })
4559 .await?;
4560 Ok(git::repository::FileHistory {
4561 entries: response
4562 .entries
4563 .into_iter()
4564 .map(|entry| git::repository::FileHistoryEntry {
4565 sha: entry.sha.into(),
4566 subject: entry.subject.into(),
4567 message: entry.message.into(),
4568 commit_timestamp: entry.commit_timestamp,
4569 author_name: entry.author_name.into(),
4570 author_email: entry.author_email.into(),
4571 })
4572 .collect(),
4573 path: RepoPath::from_proto(&response.path)?,
4574 })
4575 }
4576 }
4577 })
4578 }
4579
4580 pub fn get_graph_data(
4581 &self,
4582 log_source: LogSource,
4583 log_order: LogOrder,
4584 ) -> Option<&InitialGitGraphData> {
4585 self.initial_graph_data.get(&(log_source, log_order))
4586 }
4587
4588 pub fn search_commits(
4589 &mut self,
4590 log_source: LogSource,
4591 search_args: SearchCommitArgs,
4592 request_tx: smol::channel::Sender<Oid>,
4593 cx: &mut Context<Self>,
4594 ) {
4595 let repository_state = self.repository_state.clone();
4596
4597 cx.background_spawn(async move {
4598 let repo_state = repository_state.await;
4599
4600 match repo_state {
4601 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4602 backend
4603 .search_commits(log_source, search_args, request_tx)
4604 .await
4605 .log_err();
4606 }
4607 Ok(RepositoryState::Remote(_)) => {}
4608 Err(_) => {}
4609 };
4610 })
4611 .detach();
4612 }
4613
4614 pub fn graph_data(
4615 &mut self,
4616 log_source: LogSource,
4617 log_order: LogOrder,
4618 range: Range<usize>,
4619 cx: &mut Context<Self>,
4620 ) -> GraphDataResponse<'_> {
4621 let initial_commit_data = self
4622 .initial_graph_data
4623 .entry((log_source.clone(), log_order))
4624 .or_insert_with(|| {
4625 let state = self.repository_state.clone();
4626 let log_source = log_source.clone();
4627
4628 let fetch_task = cx.spawn(async move |repository, cx| {
4629 let state = state.await;
4630 let result = match state {
4631 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4632 Self::local_git_graph_data(
4633 repository.clone(),
4634 backend,
4635 log_source.clone(),
4636 log_order,
4637 cx,
4638 )
4639 .await
4640 }
4641 Ok(RepositoryState::Remote(_)) => {
4642 Err("Git graph is not supported for collab yet".into())
4643 }
4644 Err(e) => Err(SharedString::from(e)),
4645 };
4646
4647 if let Err(fetch_task_error) = result {
4648 repository
4649 .update(cx, |repository, _| {
4650 if let Some(data) = repository
4651 .initial_graph_data
4652 .get_mut(&(log_source, log_order))
4653 {
4654 data.error = Some(fetch_task_error);
4655 } else {
4656 debug_panic!(
4657 "This task would be dropped if this entry doesn't exist"
4658 );
4659 }
4660 })
4661 .ok();
4662 }
4663 });
4664
4665 InitialGitGraphData {
4666 fetch_task,
4667 error: None,
4668 commit_data: Vec::new(),
4669 commit_oid_to_index: HashMap::default(),
4670 }
4671 });
4672
4673 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4674 let max_end = initial_commit_data.commit_data.len();
4675
4676 GraphDataResponse {
4677 commits: &initial_commit_data.commit_data
4678 [range.start.min(max_start)..range.end.min(max_end)],
4679 is_loading: !initial_commit_data.fetch_task.is_ready(),
4680 error: initial_commit_data.error.clone(),
4681 }
4682 }
4683
4684 async fn local_git_graph_data(
4685 this: WeakEntity<Self>,
4686 backend: Arc<dyn GitRepository>,
4687 log_source: LogSource,
4688 log_order: LogOrder,
4689 cx: &mut AsyncApp,
4690 ) -> Result<(), SharedString> {
4691 let (request_tx, request_rx) =
4692 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4693
4694 let task = cx.background_executor().spawn({
4695 let log_source = log_source.clone();
4696 async move {
4697 backend
4698 .initial_graph_data(log_source, log_order, request_tx)
4699 .await
4700 .map_err(|err| SharedString::from(err.to_string()))
4701 }
4702 });
4703
4704 let graph_data_key = (log_source, log_order);
4705
4706 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4707 this.update(cx, |repository, cx| {
4708 let graph_data = repository
4709 .initial_graph_data
4710 .entry(graph_data_key.clone())
4711 .and_modify(|graph_data| {
4712 for commit_data in initial_graph_commit_data {
4713 graph_data
4714 .commit_oid_to_index
4715 .insert(commit_data.sha, graph_data.commit_data.len());
4716 graph_data.commit_data.push(commit_data);
4717 }
4718 cx.emit(RepositoryEvent::GraphEvent(
4719 graph_data_key.clone(),
4720 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4721 ));
4722 });
4723
4724 match &graph_data {
4725 Entry::Occupied(_) => {}
4726 Entry::Vacant(_) => {
4727 debug_panic!("This task should be dropped if data doesn't exist");
4728 }
4729 }
4730 })
4731 .ok();
4732 }
4733
4734 task.await?;
4735 Ok(())
4736 }
4737
4738 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4739 if !self.commit_data.contains_key(&sha) {
4740 match &self.graph_commit_data_handler {
4741 GraphCommitHandlerState::Open(handler) => {
4742 if handler.commit_data_request.try_send(sha).is_ok() {
4743 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4744 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4745 }
4746 }
4747 GraphCommitHandlerState::Closed => {
4748 self.open_graph_commit_data_handler(cx);
4749 }
4750 GraphCommitHandlerState::Starting => {}
4751 }
4752 }
4753
4754 self.commit_data
4755 .get(&sha)
4756 .unwrap_or(&CommitDataState::Loading)
4757 }
4758
4759 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4760 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4761
4762 let state = self.repository_state.clone();
4763 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4764 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4765
4766 let foreground_task = cx.spawn(async move |this, cx| {
4767 while let Ok((sha, commit_data)) = result_rx.recv().await {
4768 let result = this.update(cx, |this, cx| {
4769 let old_value = this
4770 .commit_data
4771 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4772 debug_assert!(
4773 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4774 "We should never overwrite commit data"
4775 );
4776
4777 cx.notify();
4778 });
4779 if result.is_err() {
4780 break;
4781 }
4782 }
4783
4784 this.update(cx, |this, _cx| {
4785 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4786 })
4787 .ok();
4788 });
4789
4790 let request_tx_for_handler = request_tx;
4791 let background_executor = cx.background_executor().clone();
4792
4793 cx.background_spawn(async move {
4794 let backend = match state.await {
4795 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4796 Ok(RepositoryState::Remote(_)) => {
4797 log::error!("commit_data_reader not supported for remote repositories");
4798 return;
4799 }
4800 Err(error) => {
4801 log::error!("failed to get repository state: {error}");
4802 return;
4803 }
4804 };
4805
4806 let reader = match backend.commit_data_reader() {
4807 Ok(reader) => reader,
4808 Err(error) => {
4809 log::error!("failed to create commit data reader: {error:?}");
4810 return;
4811 }
4812 };
4813
4814 loop {
4815 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4816
4817 futures::select_biased! {
4818 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4819 let Ok(sha) = sha else {
4820 break;
4821 };
4822
4823 match reader.read(sha).await {
4824 Ok(commit_data) => {
4825 if result_tx.send((sha, commit_data)).await.is_err() {
4826 break;
4827 }
4828 }
4829 Err(error) => {
4830 log::error!("failed to read commit data for {sha}: {error:?}");
4831 }
4832 }
4833 }
4834 _ = futures::FutureExt::fuse(timeout) => {
4835 break;
4836 }
4837 }
4838 }
4839
4840 drop(result_tx);
4841 })
4842 .detach();
4843
4844 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4845 _task: foreground_task,
4846 commit_data_request: request_tx_for_handler,
4847 });
4848 }
4849
4850 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4851 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4852 }
4853
4854 fn save_buffers<'a>(
4855 &self,
4856 entries: impl IntoIterator<Item = &'a RepoPath>,
4857 cx: &mut Context<Self>,
4858 ) -> Vec<Task<anyhow::Result<()>>> {
4859 let mut save_futures = Vec::new();
4860 if let Some(buffer_store) = self.buffer_store(cx) {
4861 buffer_store.update(cx, |buffer_store, cx| {
4862 for path in entries {
4863 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4864 continue;
4865 };
4866 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4867 && buffer
4868 .read(cx)
4869 .file()
4870 .is_some_and(|file| file.disk_state().exists())
4871 && buffer.read(cx).has_unsaved_edits()
4872 {
4873 save_futures.push(buffer_store.save_buffer(buffer, cx));
4874 }
4875 }
4876 })
4877 }
4878 save_futures
4879 }
4880
4881 pub fn stage_entries(
4882 &mut self,
4883 entries: Vec<RepoPath>,
4884 cx: &mut Context<Self>,
4885 ) -> Task<anyhow::Result<()>> {
4886 self.stage_or_unstage_entries(true, entries, cx)
4887 }
4888
4889 pub fn unstage_entries(
4890 &mut self,
4891 entries: Vec<RepoPath>,
4892 cx: &mut Context<Self>,
4893 ) -> Task<anyhow::Result<()>> {
4894 self.stage_or_unstage_entries(false, entries, cx)
4895 }
4896
4897 fn stage_or_unstage_entries(
4898 &mut self,
4899 stage: bool,
4900 entries: Vec<RepoPath>,
4901 cx: &mut Context<Self>,
4902 ) -> Task<anyhow::Result<()>> {
4903 if entries.is_empty() {
4904 return Task::ready(Ok(()));
4905 }
4906 let Some(git_store) = self.git_store.upgrade() else {
4907 return Task::ready(Ok(()));
4908 };
4909 let id = self.id;
4910 let save_tasks = self.save_buffers(&entries, cx);
4911 let paths = entries
4912 .iter()
4913 .map(|p| p.as_unix_str())
4914 .collect::<Vec<_>>()
4915 .join(" ");
4916 let status = if stage {
4917 format!("git add {paths}")
4918 } else {
4919 format!("git reset {paths}")
4920 };
4921 let job_key = GitJobKey::WriteIndex(entries.clone());
4922
4923 self.spawn_job_with_tracking(
4924 entries.clone(),
4925 if stage {
4926 pending_op::GitStatus::Staged
4927 } else {
4928 pending_op::GitStatus::Unstaged
4929 },
4930 cx,
4931 async move |this, cx| {
4932 for save_task in save_tasks {
4933 save_task.await?;
4934 }
4935
4936 this.update(cx, |this, cx| {
4937 let weak_this = cx.weak_entity();
4938 this.send_keyed_job(
4939 Some(job_key),
4940 Some(status.into()),
4941 move |git_repo, mut cx| async move {
4942 let hunk_staging_operation_counts = weak_this
4943 .update(&mut cx, |this, cx| {
4944 let mut hunk_staging_operation_counts = HashMap::default();
4945 for path in &entries {
4946 let Some(project_path) =
4947 this.repo_path_to_project_path(path, cx)
4948 else {
4949 continue;
4950 };
4951 let Some(buffer) = git_store
4952 .read(cx)
4953 .buffer_store
4954 .read(cx)
4955 .get_by_path(&project_path)
4956 else {
4957 continue;
4958 };
4959 let Some(diff_state) = git_store
4960 .read(cx)
4961 .diffs
4962 .get(&buffer.read(cx).remote_id())
4963 .cloned()
4964 else {
4965 continue;
4966 };
4967 let Some(uncommitted_diff) =
4968 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4969 |uncommitted_diff| uncommitted_diff.upgrade(),
4970 )
4971 else {
4972 continue;
4973 };
4974 let buffer_snapshot = buffer.read(cx).text_snapshot();
4975 let file_exists = buffer
4976 .read(cx)
4977 .file()
4978 .is_some_and(|file| file.disk_state().exists());
4979 let hunk_staging_operation_count =
4980 diff_state.update(cx, |diff_state, cx| {
4981 uncommitted_diff.update(
4982 cx,
4983 |uncommitted_diff, cx| {
4984 uncommitted_diff
4985 .stage_or_unstage_all_hunks(
4986 stage,
4987 &buffer_snapshot,
4988 file_exists,
4989 cx,
4990 );
4991 },
4992 );
4993
4994 diff_state.hunk_staging_operation_count += 1;
4995 diff_state.hunk_staging_operation_count
4996 });
4997 hunk_staging_operation_counts.insert(
4998 diff_state.downgrade(),
4999 hunk_staging_operation_count,
5000 );
5001 }
5002 hunk_staging_operation_counts
5003 })
5004 .unwrap_or_default();
5005
5006 let result = match git_repo {
5007 RepositoryState::Local(LocalRepositoryState {
5008 backend,
5009 environment,
5010 ..
5011 }) => {
5012 if stage {
5013 backend.stage_paths(entries, environment.clone()).await
5014 } else {
5015 backend.unstage_paths(entries, environment.clone()).await
5016 }
5017 }
5018 RepositoryState::Remote(RemoteRepositoryState {
5019 project_id,
5020 client,
5021 }) => {
5022 if stage {
5023 client
5024 .request(proto::Stage {
5025 project_id: project_id.0,
5026 repository_id: id.to_proto(),
5027 paths: entries
5028 .into_iter()
5029 .map(|repo_path| repo_path.to_proto())
5030 .collect(),
5031 })
5032 .await
5033 .context("sending stage request")
5034 .map(|_| ())
5035 } else {
5036 client
5037 .request(proto::Unstage {
5038 project_id: project_id.0,
5039 repository_id: id.to_proto(),
5040 paths: entries
5041 .into_iter()
5042 .map(|repo_path| repo_path.to_proto())
5043 .collect(),
5044 })
5045 .await
5046 .context("sending unstage request")
5047 .map(|_| ())
5048 }
5049 }
5050 };
5051
5052 for (diff_state, hunk_staging_operation_count) in
5053 hunk_staging_operation_counts
5054 {
5055 diff_state
5056 .update(&mut cx, |diff_state, cx| {
5057 if result.is_ok() {
5058 diff_state.hunk_staging_operation_count_as_of_write =
5059 hunk_staging_operation_count;
5060 } else if let Some(uncommitted_diff) =
5061 &diff_state.uncommitted_diff
5062 {
5063 uncommitted_diff
5064 .update(cx, |uncommitted_diff, cx| {
5065 uncommitted_diff.clear_pending_hunks(cx);
5066 })
5067 .ok();
5068 }
5069 })
5070 .ok();
5071 }
5072
5073 result
5074 },
5075 )
5076 })?
5077 .await?
5078 },
5079 )
5080 }
5081
5082 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5083 let snapshot = self.snapshot.clone();
5084 let pending_ops = self.pending_ops.clone();
5085 let to_stage = cx.background_spawn(async move {
5086 snapshot
5087 .status()
5088 .filter_map(|entry| {
5089 if let Some(ops) =
5090 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5091 {
5092 if ops.staging() || ops.staged() {
5093 None
5094 } else {
5095 Some(entry.repo_path)
5096 }
5097 } else if entry.status.staging().is_fully_staged() {
5098 None
5099 } else {
5100 Some(entry.repo_path)
5101 }
5102 })
5103 .collect()
5104 });
5105
5106 cx.spawn(async move |this, cx| {
5107 let to_stage = to_stage.await;
5108 this.update(cx, |this, cx| {
5109 this.stage_or_unstage_entries(true, to_stage, cx)
5110 })?
5111 .await
5112 })
5113 }
5114
5115 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5116 let snapshot = self.snapshot.clone();
5117 let pending_ops = self.pending_ops.clone();
5118 let to_unstage = cx.background_spawn(async move {
5119 snapshot
5120 .status()
5121 .filter_map(|entry| {
5122 if let Some(ops) =
5123 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5124 {
5125 if !ops.staging() && !ops.staged() {
5126 None
5127 } else {
5128 Some(entry.repo_path)
5129 }
5130 } else if entry.status.staging().is_fully_unstaged() {
5131 None
5132 } else {
5133 Some(entry.repo_path)
5134 }
5135 })
5136 .collect()
5137 });
5138
5139 cx.spawn(async move |this, cx| {
5140 let to_unstage = to_unstage.await;
5141 this.update(cx, |this, cx| {
5142 this.stage_or_unstage_entries(false, to_unstage, cx)
5143 })?
5144 .await
5145 })
5146 }
5147
5148 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5149 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5150
5151 self.stash_entries(to_stash, cx)
5152 }
5153
5154 pub fn stash_entries(
5155 &mut self,
5156 entries: Vec<RepoPath>,
5157 cx: &mut Context<Self>,
5158 ) -> Task<anyhow::Result<()>> {
5159 let id = self.id;
5160
5161 cx.spawn(async move |this, cx| {
5162 this.update(cx, |this, _| {
5163 this.send_job(None, move |git_repo, _cx| async move {
5164 match git_repo {
5165 RepositoryState::Local(LocalRepositoryState {
5166 backend,
5167 environment,
5168 ..
5169 }) => backend.stash_paths(entries, environment).await,
5170 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5171 client
5172 .request(proto::Stash {
5173 project_id: project_id.0,
5174 repository_id: id.to_proto(),
5175 paths: entries
5176 .into_iter()
5177 .map(|repo_path| repo_path.to_proto())
5178 .collect(),
5179 })
5180 .await?;
5181 Ok(())
5182 }
5183 }
5184 })
5185 })?
5186 .await??;
5187 Ok(())
5188 })
5189 }
5190
5191 pub fn stash_pop(
5192 &mut self,
5193 index: Option<usize>,
5194 cx: &mut Context<Self>,
5195 ) -> Task<anyhow::Result<()>> {
5196 let id = self.id;
5197 cx.spawn(async move |this, cx| {
5198 this.update(cx, |this, _| {
5199 this.send_job(None, move |git_repo, _cx| async move {
5200 match git_repo {
5201 RepositoryState::Local(LocalRepositoryState {
5202 backend,
5203 environment,
5204 ..
5205 }) => backend.stash_pop(index, environment).await,
5206 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5207 client
5208 .request(proto::StashPop {
5209 project_id: project_id.0,
5210 repository_id: id.to_proto(),
5211 stash_index: index.map(|i| i as u64),
5212 })
5213 .await
5214 .context("sending stash pop request")?;
5215 Ok(())
5216 }
5217 }
5218 })
5219 })?
5220 .await??;
5221 Ok(())
5222 })
5223 }
5224
5225 pub fn stash_apply(
5226 &mut self,
5227 index: Option<usize>,
5228 cx: &mut Context<Self>,
5229 ) -> Task<anyhow::Result<()>> {
5230 let id = self.id;
5231 cx.spawn(async move |this, cx| {
5232 this.update(cx, |this, _| {
5233 this.send_job(None, move |git_repo, _cx| async move {
5234 match git_repo {
5235 RepositoryState::Local(LocalRepositoryState {
5236 backend,
5237 environment,
5238 ..
5239 }) => backend.stash_apply(index, environment).await,
5240 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5241 client
5242 .request(proto::StashApply {
5243 project_id: project_id.0,
5244 repository_id: id.to_proto(),
5245 stash_index: index.map(|i| i as u64),
5246 })
5247 .await
5248 .context("sending stash apply request")?;
5249 Ok(())
5250 }
5251 }
5252 })
5253 })?
5254 .await??;
5255 Ok(())
5256 })
5257 }
5258
5259 pub fn stash_drop(
5260 &mut self,
5261 index: Option<usize>,
5262 cx: &mut Context<Self>,
5263 ) -> oneshot::Receiver<anyhow::Result<()>> {
5264 let id = self.id;
5265 let updates_tx = self
5266 .git_store()
5267 .and_then(|git_store| match &git_store.read(cx).state {
5268 GitStoreState::Local { downstream, .. } => downstream
5269 .as_ref()
5270 .map(|downstream| downstream.updates_tx.clone()),
5271 _ => None,
5272 });
5273 let this = cx.weak_entity();
5274 self.send_job(None, move |git_repo, mut cx| async move {
5275 match git_repo {
5276 RepositoryState::Local(LocalRepositoryState {
5277 backend,
5278 environment,
5279 ..
5280 }) => {
5281 // TODO would be nice to not have to do this manually
5282 let result = backend.stash_drop(index, environment).await;
5283 if result.is_ok()
5284 && let Ok(stash_entries) = backend.stash_entries().await
5285 {
5286 let snapshot = this.update(&mut cx, |this, cx| {
5287 this.snapshot.stash_entries = stash_entries;
5288 cx.emit(RepositoryEvent::StashEntriesChanged);
5289 this.snapshot.clone()
5290 })?;
5291 if let Some(updates_tx) = updates_tx {
5292 updates_tx
5293 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5294 .ok();
5295 }
5296 }
5297
5298 result
5299 }
5300 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5301 client
5302 .request(proto::StashDrop {
5303 project_id: project_id.0,
5304 repository_id: id.to_proto(),
5305 stash_index: index.map(|i| i as u64),
5306 })
5307 .await
5308 .context("sending stash pop request")?;
5309 Ok(())
5310 }
5311 }
5312 })
5313 }
5314
5315 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5316 let id = self.id;
5317 self.send_job(
5318 Some(format!("git hook {}", hook.as_str()).into()),
5319 move |git_repo, _cx| async move {
5320 match git_repo {
5321 RepositoryState::Local(LocalRepositoryState {
5322 backend,
5323 environment,
5324 ..
5325 }) => backend.run_hook(hook, environment.clone()).await,
5326 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5327 client
5328 .request(proto::RunGitHook {
5329 project_id: project_id.0,
5330 repository_id: id.to_proto(),
5331 hook: hook.to_proto(),
5332 })
5333 .await?;
5334
5335 Ok(())
5336 }
5337 }
5338 },
5339 )
5340 }
5341
5342 pub fn commit(
5343 &mut self,
5344 message: SharedString,
5345 name_and_email: Option<(SharedString, SharedString)>,
5346 options: CommitOptions,
5347 askpass: AskPassDelegate,
5348 cx: &mut App,
5349 ) -> oneshot::Receiver<Result<()>> {
5350 let id = self.id;
5351 let askpass_delegates = self.askpass_delegates.clone();
5352 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5353
5354 let rx = self.run_hook(RunHook::PreCommit, cx);
5355
5356 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5357 rx.await??;
5358
5359 match git_repo {
5360 RepositoryState::Local(LocalRepositoryState {
5361 backend,
5362 environment,
5363 ..
5364 }) => {
5365 backend
5366 .commit(message, name_and_email, options, askpass, environment)
5367 .await
5368 }
5369 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5370 askpass_delegates.lock().insert(askpass_id, askpass);
5371 let _defer = util::defer(|| {
5372 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5373 debug_assert!(askpass_delegate.is_some());
5374 });
5375 let (name, email) = name_and_email.unzip();
5376 client
5377 .request(proto::Commit {
5378 project_id: project_id.0,
5379 repository_id: id.to_proto(),
5380 message: String::from(message),
5381 name: name.map(String::from),
5382 email: email.map(String::from),
5383 options: Some(proto::commit::CommitOptions {
5384 amend: options.amend,
5385 signoff: options.signoff,
5386 }),
5387 askpass_id,
5388 })
5389 .await?;
5390
5391 Ok(())
5392 }
5393 }
5394 })
5395 }
5396
5397 pub fn fetch(
5398 &mut self,
5399 fetch_options: FetchOptions,
5400 askpass: AskPassDelegate,
5401 _cx: &mut App,
5402 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5403 let askpass_delegates = self.askpass_delegates.clone();
5404 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5405 let id = self.id;
5406
5407 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5408 match git_repo {
5409 RepositoryState::Local(LocalRepositoryState {
5410 backend,
5411 environment,
5412 ..
5413 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5414 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5415 askpass_delegates.lock().insert(askpass_id, askpass);
5416 let _defer = util::defer(|| {
5417 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5418 debug_assert!(askpass_delegate.is_some());
5419 });
5420
5421 let response = client
5422 .request(proto::Fetch {
5423 project_id: project_id.0,
5424 repository_id: id.to_proto(),
5425 askpass_id,
5426 remote: fetch_options.to_proto(),
5427 })
5428 .await?;
5429
5430 Ok(RemoteCommandOutput {
5431 stdout: response.stdout,
5432 stderr: response.stderr,
5433 })
5434 }
5435 }
5436 })
5437 }
5438
5439 pub fn push(
5440 &mut self,
5441 branch: SharedString,
5442 remote_branch: SharedString,
5443 remote: SharedString,
5444 options: Option<PushOptions>,
5445 askpass: AskPassDelegate,
5446 cx: &mut Context<Self>,
5447 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5448 let askpass_delegates = self.askpass_delegates.clone();
5449 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5450 let id = self.id;
5451
5452 let args = options
5453 .map(|option| match option {
5454 PushOptions::SetUpstream => " --set-upstream",
5455 PushOptions::Force => " --force-with-lease",
5456 })
5457 .unwrap_or("");
5458
5459 let updates_tx = self
5460 .git_store()
5461 .and_then(|git_store| match &git_store.read(cx).state {
5462 GitStoreState::Local { downstream, .. } => downstream
5463 .as_ref()
5464 .map(|downstream| downstream.updates_tx.clone()),
5465 _ => None,
5466 });
5467
5468 let this = cx.weak_entity();
5469 self.send_job(
5470 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5471 move |git_repo, mut cx| async move {
5472 match git_repo {
5473 RepositoryState::Local(LocalRepositoryState {
5474 backend,
5475 environment,
5476 ..
5477 }) => {
5478 let result = backend
5479 .push(
5480 branch.to_string(),
5481 remote_branch.to_string(),
5482 remote.to_string(),
5483 options,
5484 askpass,
5485 environment.clone(),
5486 cx.clone(),
5487 )
5488 .await;
5489 // TODO would be nice to not have to do this manually
5490 if result.is_ok() {
5491 let branches = backend.branches().await?;
5492 let branch = branches.into_iter().find(|branch| branch.is_head);
5493 log::info!("head branch after scan is {branch:?}");
5494 let snapshot = this.update(&mut cx, |this, cx| {
5495 this.snapshot.branch = branch;
5496 cx.emit(RepositoryEvent::HeadChanged);
5497 this.snapshot.clone()
5498 })?;
5499 if let Some(updates_tx) = updates_tx {
5500 updates_tx
5501 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5502 .ok();
5503 }
5504 }
5505 result
5506 }
5507 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5508 askpass_delegates.lock().insert(askpass_id, askpass);
5509 let _defer = util::defer(|| {
5510 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5511 debug_assert!(askpass_delegate.is_some());
5512 });
5513 let response = client
5514 .request(proto::Push {
5515 project_id: project_id.0,
5516 repository_id: id.to_proto(),
5517 askpass_id,
5518 branch_name: branch.to_string(),
5519 remote_branch_name: remote_branch.to_string(),
5520 remote_name: remote.to_string(),
5521 options: options.map(|options| match options {
5522 PushOptions::Force => proto::push::PushOptions::Force,
5523 PushOptions::SetUpstream => {
5524 proto::push::PushOptions::SetUpstream
5525 }
5526 }
5527 as i32),
5528 })
5529 .await?;
5530
5531 Ok(RemoteCommandOutput {
5532 stdout: response.stdout,
5533 stderr: response.stderr,
5534 })
5535 }
5536 }
5537 },
5538 )
5539 }
5540
5541 pub fn pull(
5542 &mut self,
5543 branch: Option<SharedString>,
5544 remote: SharedString,
5545 rebase: bool,
5546 askpass: AskPassDelegate,
5547 _cx: &mut App,
5548 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5549 let askpass_delegates = self.askpass_delegates.clone();
5550 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5551 let id = self.id;
5552
5553 let mut status = "git pull".to_string();
5554 if rebase {
5555 status.push_str(" --rebase");
5556 }
5557 status.push_str(&format!(" {}", remote));
5558 if let Some(b) = &branch {
5559 status.push_str(&format!(" {}", b));
5560 }
5561
5562 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5563 match git_repo {
5564 RepositoryState::Local(LocalRepositoryState {
5565 backend,
5566 environment,
5567 ..
5568 }) => {
5569 backend
5570 .pull(
5571 branch.as_ref().map(|b| b.to_string()),
5572 remote.to_string(),
5573 rebase,
5574 askpass,
5575 environment.clone(),
5576 cx,
5577 )
5578 .await
5579 }
5580 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5581 askpass_delegates.lock().insert(askpass_id, askpass);
5582 let _defer = util::defer(|| {
5583 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5584 debug_assert!(askpass_delegate.is_some());
5585 });
5586 let response = client
5587 .request(proto::Pull {
5588 project_id: project_id.0,
5589 repository_id: id.to_proto(),
5590 askpass_id,
5591 rebase,
5592 branch_name: branch.as_ref().map(|b| b.to_string()),
5593 remote_name: remote.to_string(),
5594 })
5595 .await?;
5596
5597 Ok(RemoteCommandOutput {
5598 stdout: response.stdout,
5599 stderr: response.stderr,
5600 })
5601 }
5602 }
5603 })
5604 }
5605
5606 fn spawn_set_index_text_job(
5607 &mut self,
5608 path: RepoPath,
5609 content: Option<String>,
5610 hunk_staging_operation_count: Option<usize>,
5611 cx: &mut Context<Self>,
5612 ) -> oneshot::Receiver<anyhow::Result<()>> {
5613 let id = self.id;
5614 let this = cx.weak_entity();
5615 let git_store = self.git_store.clone();
5616 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5617 self.send_keyed_job(
5618 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5619 None,
5620 move |git_repo, mut cx| async move {
5621 log::debug!(
5622 "start updating index text for buffer {}",
5623 path.as_unix_str()
5624 );
5625
5626 match git_repo {
5627 RepositoryState::Local(LocalRepositoryState {
5628 fs,
5629 backend,
5630 environment,
5631 ..
5632 }) => {
5633 let executable = match fs.metadata(&abs_path).await {
5634 Ok(Some(meta)) => meta.is_executable,
5635 Ok(None) => false,
5636 Err(_err) => false,
5637 };
5638 backend
5639 .set_index_text(path.clone(), content, environment.clone(), executable)
5640 .await?;
5641 }
5642 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5643 client
5644 .request(proto::SetIndexText {
5645 project_id: project_id.0,
5646 repository_id: id.to_proto(),
5647 path: path.to_proto(),
5648 text: content,
5649 })
5650 .await?;
5651 }
5652 }
5653 log::debug!(
5654 "finish updating index text for buffer {}",
5655 path.as_unix_str()
5656 );
5657
5658 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5659 let project_path = this
5660 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5661 .ok()
5662 .flatten();
5663 git_store
5664 .update(&mut cx, |git_store, cx| {
5665 let buffer_id = git_store
5666 .buffer_store
5667 .read(cx)
5668 .get_by_path(&project_path?)?
5669 .read(cx)
5670 .remote_id();
5671 let diff_state = git_store.diffs.get(&buffer_id)?;
5672 diff_state.update(cx, |diff_state, _| {
5673 diff_state.hunk_staging_operation_count_as_of_write =
5674 hunk_staging_operation_count;
5675 });
5676 Some(())
5677 })
5678 .context("Git store dropped")?;
5679 }
5680 Ok(())
5681 },
5682 )
5683 }
5684
5685 pub fn create_remote(
5686 &mut self,
5687 remote_name: String,
5688 remote_url: String,
5689 ) -> oneshot::Receiver<Result<()>> {
5690 let id = self.id;
5691 self.send_job(
5692 Some(format!("git remote add {remote_name} {remote_url}").into()),
5693 move |repo, _cx| async move {
5694 match repo {
5695 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5696 backend.create_remote(remote_name, remote_url).await
5697 }
5698 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5699 client
5700 .request(proto::GitCreateRemote {
5701 project_id: project_id.0,
5702 repository_id: id.to_proto(),
5703 remote_name,
5704 remote_url,
5705 })
5706 .await?;
5707
5708 Ok(())
5709 }
5710 }
5711 },
5712 )
5713 }
5714
5715 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5716 let id = self.id;
5717 self.send_job(
5718 Some(format!("git remove remote {remote_name}").into()),
5719 move |repo, _cx| async move {
5720 match repo {
5721 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5722 backend.remove_remote(remote_name).await
5723 }
5724 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5725 client
5726 .request(proto::GitRemoveRemote {
5727 project_id: project_id.0,
5728 repository_id: id.to_proto(),
5729 remote_name,
5730 })
5731 .await?;
5732
5733 Ok(())
5734 }
5735 }
5736 },
5737 )
5738 }
5739
5740 pub fn get_remotes(
5741 &mut self,
5742 branch_name: Option<String>,
5743 is_push: bool,
5744 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5745 let id = self.id;
5746 self.send_job(None, move |repo, _cx| async move {
5747 match repo {
5748 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5749 let remote = if let Some(branch_name) = branch_name {
5750 if is_push {
5751 backend.get_push_remote(branch_name).await?
5752 } else {
5753 backend.get_branch_remote(branch_name).await?
5754 }
5755 } else {
5756 None
5757 };
5758
5759 match remote {
5760 Some(remote) => Ok(vec![remote]),
5761 None => backend.get_all_remotes().await,
5762 }
5763 }
5764 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5765 let response = client
5766 .request(proto::GetRemotes {
5767 project_id: project_id.0,
5768 repository_id: id.to_proto(),
5769 branch_name,
5770 is_push,
5771 })
5772 .await?;
5773
5774 let remotes = response
5775 .remotes
5776 .into_iter()
5777 .map(|remotes| Remote {
5778 name: remotes.name.into(),
5779 })
5780 .collect();
5781
5782 Ok(remotes)
5783 }
5784 }
5785 })
5786 }
5787
5788 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5789 let id = self.id;
5790 self.send_job(None, move |repo, _| async move {
5791 match repo {
5792 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5793 backend.branches().await
5794 }
5795 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5796 let response = client
5797 .request(proto::GitGetBranches {
5798 project_id: project_id.0,
5799 repository_id: id.to_proto(),
5800 })
5801 .await?;
5802
5803 let branches = response
5804 .branches
5805 .into_iter()
5806 .map(|branch| proto_to_branch(&branch))
5807 .collect();
5808
5809 Ok(branches)
5810 }
5811 }
5812 })
5813 }
5814
5815 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5816 /// returns the pathed for the linked worktree.
5817 ///
5818 /// Returns None if this is the main checkout.
5819 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5820 if self.work_directory_abs_path != self.original_repo_abs_path {
5821 Some(&self.work_directory_abs_path)
5822 } else {
5823 None
5824 }
5825 }
5826
5827 pub fn path_for_new_linked_worktree(
5828 &self,
5829 branch_name: &str,
5830 worktree_directory_setting: &str,
5831 ) -> Result<PathBuf> {
5832 let original_repo = self.original_repo_abs_path.clone();
5833 let project_name = original_repo
5834 .file_name()
5835 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5836 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5837 Ok(directory.join(branch_name).join(project_name))
5838 }
5839
5840 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5841 let id = self.id;
5842 self.send_job(None, move |repo, _| async move {
5843 match repo {
5844 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5845 backend.worktrees().await
5846 }
5847 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5848 let response = client
5849 .request(proto::GitGetWorktrees {
5850 project_id: project_id.0,
5851 repository_id: id.to_proto(),
5852 })
5853 .await?;
5854
5855 let worktrees = response
5856 .worktrees
5857 .into_iter()
5858 .map(|worktree| proto_to_worktree(&worktree))
5859 .collect();
5860
5861 Ok(worktrees)
5862 }
5863 }
5864 })
5865 }
5866
5867 pub fn create_worktree(
5868 &mut self,
5869 branch_name: String,
5870 path: PathBuf,
5871 commit: Option<String>,
5872 ) -> oneshot::Receiver<Result<()>> {
5873 let id = self.id;
5874 self.send_job(
5875 Some(format!("git worktree add: {}", branch_name).into()),
5876 move |repo, _cx| async move {
5877 match repo {
5878 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5879 backend.create_worktree(branch_name, path, commit).await
5880 }
5881 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5882 client
5883 .request(proto::GitCreateWorktree {
5884 project_id: project_id.0,
5885 repository_id: id.to_proto(),
5886 name: branch_name,
5887 directory: path.to_string_lossy().to_string(),
5888 commit,
5889 })
5890 .await?;
5891
5892 Ok(())
5893 }
5894 }
5895 },
5896 )
5897 }
5898
5899 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5900 let id = self.id;
5901 self.send_job(
5902 Some(format!("git worktree remove: {}", path.display()).into()),
5903 move |repo, _cx| async move {
5904 match repo {
5905 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5906 backend.remove_worktree(path, force).await
5907 }
5908 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5909 client
5910 .request(proto::GitRemoveWorktree {
5911 project_id: project_id.0,
5912 repository_id: id.to_proto(),
5913 path: path.to_string_lossy().to_string(),
5914 force,
5915 })
5916 .await?;
5917
5918 Ok(())
5919 }
5920 }
5921 },
5922 )
5923 }
5924
5925 pub fn rename_worktree(
5926 &mut self,
5927 old_path: PathBuf,
5928 new_path: PathBuf,
5929 ) -> oneshot::Receiver<Result<()>> {
5930 let id = self.id;
5931 self.send_job(
5932 Some(format!("git worktree move: {}", old_path.display()).into()),
5933 move |repo, _cx| async move {
5934 match repo {
5935 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5936 backend.rename_worktree(old_path, new_path).await
5937 }
5938 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5939 client
5940 .request(proto::GitRenameWorktree {
5941 project_id: project_id.0,
5942 repository_id: id.to_proto(),
5943 old_path: old_path.to_string_lossy().to_string(),
5944 new_path: new_path.to_string_lossy().to_string(),
5945 })
5946 .await?;
5947
5948 Ok(())
5949 }
5950 }
5951 },
5952 )
5953 }
5954
5955 pub fn default_branch(
5956 &mut self,
5957 include_remote_name: bool,
5958 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5959 let id = self.id;
5960 self.send_job(None, move |repo, _| async move {
5961 match repo {
5962 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5963 backend.default_branch(include_remote_name).await
5964 }
5965 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5966 let response = client
5967 .request(proto::GetDefaultBranch {
5968 project_id: project_id.0,
5969 repository_id: id.to_proto(),
5970 })
5971 .await?;
5972
5973 anyhow::Ok(response.branch.map(SharedString::from))
5974 }
5975 }
5976 })
5977 }
5978
5979 pub fn diff_tree(
5980 &mut self,
5981 diff_type: DiffTreeType,
5982 _cx: &App,
5983 ) -> oneshot::Receiver<Result<TreeDiff>> {
5984 let repository_id = self.snapshot.id;
5985 self.send_job(None, move |repo, _cx| async move {
5986 match repo {
5987 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5988 backend.diff_tree(diff_type).await
5989 }
5990 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5991 let response = client
5992 .request(proto::GetTreeDiff {
5993 project_id: project_id.0,
5994 repository_id: repository_id.0,
5995 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5996 base: diff_type.base().to_string(),
5997 head: diff_type.head().to_string(),
5998 })
5999 .await?;
6000
6001 let entries = response
6002 .entries
6003 .into_iter()
6004 .filter_map(|entry| {
6005 let status = match entry.status() {
6006 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6007 proto::tree_diff_status::Status::Modified => {
6008 TreeDiffStatus::Modified {
6009 old: git::Oid::from_str(
6010 &entry.oid.context("missing oid").log_err()?,
6011 )
6012 .log_err()?,
6013 }
6014 }
6015 proto::tree_diff_status::Status::Deleted => {
6016 TreeDiffStatus::Deleted {
6017 old: git::Oid::from_str(
6018 &entry.oid.context("missing oid").log_err()?,
6019 )
6020 .log_err()?,
6021 }
6022 }
6023 };
6024 Some((
6025 RepoPath::from_rel_path(
6026 &RelPath::from_proto(&entry.path).log_err()?,
6027 ),
6028 status,
6029 ))
6030 })
6031 .collect();
6032
6033 Ok(TreeDiff { entries })
6034 }
6035 }
6036 })
6037 }
6038
6039 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6040 let id = self.id;
6041 self.send_job(None, move |repo, _cx| async move {
6042 match repo {
6043 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6044 backend.diff(diff_type).await
6045 }
6046 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6047 let (proto_diff_type, merge_base_ref) = match &diff_type {
6048 DiffType::HeadToIndex => {
6049 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6050 }
6051 DiffType::HeadToWorktree => {
6052 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6053 }
6054 DiffType::MergeBase { base_ref } => (
6055 proto::git_diff::DiffType::MergeBase.into(),
6056 Some(base_ref.to_string()),
6057 ),
6058 };
6059 let response = client
6060 .request(proto::GitDiff {
6061 project_id: project_id.0,
6062 repository_id: id.to_proto(),
6063 diff_type: proto_diff_type,
6064 merge_base_ref,
6065 })
6066 .await?;
6067
6068 Ok(response.diff)
6069 }
6070 }
6071 })
6072 }
6073
6074 pub fn create_branch(
6075 &mut self,
6076 branch_name: String,
6077 base_branch: Option<String>,
6078 ) -> oneshot::Receiver<Result<()>> {
6079 let id = self.id;
6080 let status_msg = if let Some(ref base) = base_branch {
6081 format!("git switch -c {branch_name} {base}").into()
6082 } else {
6083 format!("git switch -c {branch_name}").into()
6084 };
6085 self.send_job(Some(status_msg), move |repo, _cx| async move {
6086 match repo {
6087 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6088 backend.create_branch(branch_name, base_branch).await
6089 }
6090 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6091 client
6092 .request(proto::GitCreateBranch {
6093 project_id: project_id.0,
6094 repository_id: id.to_proto(),
6095 branch_name,
6096 })
6097 .await?;
6098
6099 Ok(())
6100 }
6101 }
6102 })
6103 }
6104
6105 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6106 let id = self.id;
6107 self.send_job(
6108 Some(format!("git switch {branch_name}").into()),
6109 move |repo, _cx| async move {
6110 match repo {
6111 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6112 backend.change_branch(branch_name).await
6113 }
6114 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6115 client
6116 .request(proto::GitChangeBranch {
6117 project_id: project_id.0,
6118 repository_id: id.to_proto(),
6119 branch_name,
6120 })
6121 .await?;
6122
6123 Ok(())
6124 }
6125 }
6126 },
6127 )
6128 }
6129
6130 pub fn delete_branch(
6131 &mut self,
6132 is_remote: bool,
6133 branch_name: String,
6134 ) -> oneshot::Receiver<Result<()>> {
6135 let id = self.id;
6136 self.send_job(
6137 Some(
6138 format!(
6139 "git branch {} {}",
6140 if is_remote { "-dr" } else { "-d" },
6141 branch_name
6142 )
6143 .into(),
6144 ),
6145 move |repo, _cx| async move {
6146 match repo {
6147 RepositoryState::Local(state) => {
6148 state.backend.delete_branch(is_remote, branch_name).await
6149 }
6150 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6151 client
6152 .request(proto::GitDeleteBranch {
6153 project_id: project_id.0,
6154 repository_id: id.to_proto(),
6155 is_remote,
6156 branch_name,
6157 })
6158 .await?;
6159
6160 Ok(())
6161 }
6162 }
6163 },
6164 )
6165 }
6166
6167 pub fn rename_branch(
6168 &mut self,
6169 branch: String,
6170 new_name: String,
6171 ) -> oneshot::Receiver<Result<()>> {
6172 let id = self.id;
6173 self.send_job(
6174 Some(format!("git branch -m {branch} {new_name}").into()),
6175 move |repo, _cx| async move {
6176 match repo {
6177 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6178 backend.rename_branch(branch, new_name).await
6179 }
6180 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6181 client
6182 .request(proto::GitRenameBranch {
6183 project_id: project_id.0,
6184 repository_id: id.to_proto(),
6185 branch,
6186 new_name,
6187 })
6188 .await?;
6189
6190 Ok(())
6191 }
6192 }
6193 },
6194 )
6195 }
6196
6197 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6198 let id = self.id;
6199 self.send_job(None, move |repo, _cx| async move {
6200 match repo {
6201 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6202 backend.check_for_pushed_commit().await
6203 }
6204 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6205 let response = client
6206 .request(proto::CheckForPushedCommits {
6207 project_id: project_id.0,
6208 repository_id: id.to_proto(),
6209 })
6210 .await?;
6211
6212 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6213
6214 Ok(branches)
6215 }
6216 }
6217 })
6218 }
6219
6220 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6221 self.send_job(None, |repo, _cx| async move {
6222 match repo {
6223 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6224 backend.checkpoint().await
6225 }
6226 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6227 }
6228 })
6229 }
6230
6231 pub fn restore_checkpoint(
6232 &mut self,
6233 checkpoint: GitRepositoryCheckpoint,
6234 ) -> oneshot::Receiver<Result<()>> {
6235 self.send_job(None, move |repo, _cx| async move {
6236 match repo {
6237 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6238 backend.restore_checkpoint(checkpoint).await
6239 }
6240 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6241 }
6242 })
6243 }
6244
6245 pub(crate) fn apply_remote_update(
6246 &mut self,
6247 update: proto::UpdateRepository,
6248 cx: &mut Context<Self>,
6249 ) -> Result<()> {
6250 if let Some(main_path) = &update.original_repo_abs_path {
6251 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6252 }
6253
6254 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6255 let new_head_commit = update
6256 .head_commit_details
6257 .as_ref()
6258 .map(proto_to_commit_details);
6259 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6260 cx.emit(RepositoryEvent::HeadChanged)
6261 }
6262 self.snapshot.branch = new_branch;
6263 self.snapshot.head_commit = new_head_commit;
6264
6265 // We don't store any merge head state for downstream projects; the upstream
6266 // will track it and we will just get the updated conflicts
6267 let new_merge_heads = TreeMap::from_ordered_entries(
6268 update
6269 .current_merge_conflicts
6270 .into_iter()
6271 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6272 );
6273 let conflicts_changed =
6274 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6275 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6276 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6277 let new_stash_entries = GitStash {
6278 entries: update
6279 .stash_entries
6280 .iter()
6281 .filter_map(|entry| proto_to_stash(entry).ok())
6282 .collect(),
6283 };
6284 if self.snapshot.stash_entries != new_stash_entries {
6285 cx.emit(RepositoryEvent::StashEntriesChanged)
6286 }
6287 self.snapshot.stash_entries = new_stash_entries;
6288 let new_linked_worktrees: Arc<[GitWorktree]> = update
6289 .linked_worktrees
6290 .iter()
6291 .map(proto_to_worktree)
6292 .collect();
6293 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6294 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6295 }
6296 self.snapshot.linked_worktrees = new_linked_worktrees;
6297 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6298 self.snapshot.remote_origin_url = update.remote_origin_url;
6299
6300 let edits = update
6301 .removed_statuses
6302 .into_iter()
6303 .filter_map(|path| {
6304 Some(sum_tree::Edit::Remove(PathKey(
6305 RelPath::from_proto(&path).log_err()?,
6306 )))
6307 })
6308 .chain(
6309 update
6310 .updated_statuses
6311 .into_iter()
6312 .filter_map(|updated_status| {
6313 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6314 }),
6315 )
6316 .collect::<Vec<_>>();
6317 if conflicts_changed || !edits.is_empty() {
6318 cx.emit(RepositoryEvent::StatusesChanged);
6319 }
6320 self.snapshot.statuses_by_path.edit(edits, ());
6321
6322 if update.is_last_update {
6323 self.snapshot.scan_id = update.scan_id;
6324 }
6325 self.clear_pending_ops(cx);
6326 Ok(())
6327 }
6328
6329 pub fn compare_checkpoints(
6330 &mut self,
6331 left: GitRepositoryCheckpoint,
6332 right: GitRepositoryCheckpoint,
6333 ) -> oneshot::Receiver<Result<bool>> {
6334 self.send_job(None, move |repo, _cx| async move {
6335 match repo {
6336 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6337 backend.compare_checkpoints(left, right).await
6338 }
6339 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6340 }
6341 })
6342 }
6343
6344 pub fn diff_checkpoints(
6345 &mut self,
6346 base_checkpoint: GitRepositoryCheckpoint,
6347 target_checkpoint: GitRepositoryCheckpoint,
6348 ) -> oneshot::Receiver<Result<String>> {
6349 self.send_job(None, move |repo, _cx| async move {
6350 match repo {
6351 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6352 backend
6353 .diff_checkpoints(base_checkpoint, target_checkpoint)
6354 .await
6355 }
6356 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6357 }
6358 })
6359 }
6360
6361 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6362 let updated = SumTree::from_iter(
6363 self.pending_ops.iter().filter_map(|ops| {
6364 let inner_ops: Vec<PendingOp> =
6365 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6366 if inner_ops.is_empty() {
6367 None
6368 } else {
6369 Some(PendingOps {
6370 repo_path: ops.repo_path.clone(),
6371 ops: inner_ops,
6372 })
6373 }
6374 }),
6375 (),
6376 );
6377
6378 if updated != self.pending_ops {
6379 cx.emit(RepositoryEvent::PendingOpsChanged {
6380 pending_ops: self.pending_ops.clone(),
6381 })
6382 }
6383
6384 self.pending_ops = updated;
6385 }
6386
6387 fn schedule_scan(
6388 &mut self,
6389 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6390 cx: &mut Context<Self>,
6391 ) {
6392 let this = cx.weak_entity();
6393 let _ = self.send_keyed_job(
6394 Some(GitJobKey::ReloadGitState),
6395 None,
6396 |state, mut cx| async move {
6397 log::debug!("run scheduled git status scan");
6398
6399 let Some(this) = this.upgrade() else {
6400 return Ok(());
6401 };
6402 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6403 bail!("not a local repository")
6404 };
6405 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6406 this.update(&mut cx, |this, cx| {
6407 this.clear_pending_ops(cx);
6408 });
6409 if let Some(updates_tx) = updates_tx {
6410 updates_tx
6411 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6412 .ok();
6413 }
6414 Ok(())
6415 },
6416 );
6417 }
6418
6419 fn spawn_local_git_worker(
6420 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6421 cx: &mut Context<Self>,
6422 ) -> mpsc::UnboundedSender<GitJob> {
6423 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6424
6425 cx.spawn(async move |_, cx| {
6426 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6427 if let Some(git_hosting_provider_registry) =
6428 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6429 {
6430 git_hosting_providers::register_additional_providers(
6431 git_hosting_provider_registry,
6432 state.backend.clone(),
6433 )
6434 .await;
6435 }
6436 let state = RepositoryState::Local(state);
6437 let mut jobs = VecDeque::new();
6438 loop {
6439 while let Ok(Some(next_job)) = job_rx.try_next() {
6440 jobs.push_back(next_job);
6441 }
6442
6443 if let Some(job) = jobs.pop_front() {
6444 if let Some(current_key) = &job.key
6445 && jobs
6446 .iter()
6447 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6448 {
6449 continue;
6450 }
6451 (job.job)(state.clone(), cx).await;
6452 } else if let Some(job) = job_rx.next().await {
6453 jobs.push_back(job);
6454 } else {
6455 break;
6456 }
6457 }
6458 anyhow::Ok(())
6459 })
6460 .detach_and_log_err(cx);
6461
6462 job_tx
6463 }
6464
6465 fn spawn_remote_git_worker(
6466 state: RemoteRepositoryState,
6467 cx: &mut Context<Self>,
6468 ) -> mpsc::UnboundedSender<GitJob> {
6469 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6470
6471 cx.spawn(async move |_, cx| {
6472 let state = RepositoryState::Remote(state);
6473 let mut jobs = VecDeque::new();
6474 loop {
6475 while let Ok(Some(next_job)) = job_rx.try_next() {
6476 jobs.push_back(next_job);
6477 }
6478
6479 if let Some(job) = jobs.pop_front() {
6480 if let Some(current_key) = &job.key
6481 && jobs
6482 .iter()
6483 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6484 {
6485 continue;
6486 }
6487 (job.job)(state.clone(), cx).await;
6488 } else if let Some(job) = job_rx.next().await {
6489 jobs.push_back(job);
6490 } else {
6491 break;
6492 }
6493 }
6494 anyhow::Ok(())
6495 })
6496 .detach_and_log_err(cx);
6497
6498 job_tx
6499 }
6500
6501 fn load_staged_text(
6502 &mut self,
6503 buffer_id: BufferId,
6504 repo_path: RepoPath,
6505 cx: &App,
6506 ) -> Task<Result<Option<String>>> {
6507 let rx = self.send_job(None, move |state, _| async move {
6508 match state {
6509 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6510 anyhow::Ok(backend.load_index_text(repo_path).await)
6511 }
6512 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6513 let response = client
6514 .request(proto::OpenUnstagedDiff {
6515 project_id: project_id.to_proto(),
6516 buffer_id: buffer_id.to_proto(),
6517 })
6518 .await?;
6519 Ok(response.staged_text)
6520 }
6521 }
6522 });
6523 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6524 }
6525
6526 fn load_committed_text(
6527 &mut self,
6528 buffer_id: BufferId,
6529 repo_path: RepoPath,
6530 cx: &App,
6531 ) -> Task<Result<DiffBasesChange>> {
6532 let rx = self.send_job(None, move |state, _| async move {
6533 match state {
6534 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6535 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6536 let staged_text = backend.load_index_text(repo_path).await;
6537 let diff_bases_change = if committed_text == staged_text {
6538 DiffBasesChange::SetBoth(committed_text)
6539 } else {
6540 DiffBasesChange::SetEach {
6541 index: staged_text,
6542 head: committed_text,
6543 }
6544 };
6545 anyhow::Ok(diff_bases_change)
6546 }
6547 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6548 use proto::open_uncommitted_diff_response::Mode;
6549
6550 let response = client
6551 .request(proto::OpenUncommittedDiff {
6552 project_id: project_id.to_proto(),
6553 buffer_id: buffer_id.to_proto(),
6554 })
6555 .await?;
6556 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6557 let bases = match mode {
6558 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6559 Mode::IndexAndHead => DiffBasesChange::SetEach {
6560 head: response.committed_text,
6561 index: response.staged_text,
6562 },
6563 };
6564 Ok(bases)
6565 }
6566 }
6567 });
6568
6569 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6570 }
6571
6572 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6573 let repository_id = self.snapshot.id;
6574 let rx = self.send_job(None, move |state, _| async move {
6575 match state {
6576 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6577 backend.load_blob_content(oid).await
6578 }
6579 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6580 let response = client
6581 .request(proto::GetBlobContent {
6582 project_id: project_id.to_proto(),
6583 repository_id: repository_id.0,
6584 oid: oid.to_string(),
6585 })
6586 .await?;
6587 Ok(response.content)
6588 }
6589 }
6590 });
6591 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6592 }
6593
6594 fn paths_changed(
6595 &mut self,
6596 paths: Vec<RepoPath>,
6597 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6598 cx: &mut Context<Self>,
6599 ) {
6600 if !paths.is_empty() {
6601 self.paths_needing_status_update.push(paths);
6602 }
6603
6604 let this = cx.weak_entity();
6605 let _ = self.send_keyed_job(
6606 Some(GitJobKey::RefreshStatuses),
6607 None,
6608 |state, mut cx| async move {
6609 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6610 (
6611 this.snapshot.clone(),
6612 mem::take(&mut this.paths_needing_status_update),
6613 )
6614 })?;
6615 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6616 bail!("not a local repository")
6617 };
6618
6619 if changed_paths.is_empty() {
6620 return Ok(());
6621 }
6622
6623 let has_head = prev_snapshot.head_commit.is_some();
6624
6625 let stash_entries = backend.stash_entries().await?;
6626 let changed_path_statuses = cx
6627 .background_spawn(async move {
6628 let mut changed_paths =
6629 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6630 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6631
6632 let status_task = backend.status(&changed_paths_vec);
6633 let diff_stat_future = if has_head {
6634 backend.diff_stat(&changed_paths_vec)
6635 } else {
6636 future::ready(Ok(status::GitDiffStat {
6637 entries: Arc::default(),
6638 }))
6639 .boxed()
6640 };
6641
6642 let (statuses, diff_stats) =
6643 futures::future::try_join(status_task, diff_stat_future).await?;
6644
6645 let diff_stats: HashMap<RepoPath, DiffStat> =
6646 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6647
6648 let mut changed_path_statuses = Vec::new();
6649 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6650 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6651
6652 for (repo_path, status) in &*statuses.entries {
6653 let current_diff_stat = diff_stats.get(repo_path).copied();
6654
6655 changed_paths.remove(repo_path);
6656 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6657 && cursor.item().is_some_and(|entry| {
6658 entry.status == *status && entry.diff_stat == current_diff_stat
6659 })
6660 {
6661 continue;
6662 }
6663
6664 changed_path_statuses.push(Edit::Insert(StatusEntry {
6665 repo_path: repo_path.clone(),
6666 status: *status,
6667 diff_stat: current_diff_stat,
6668 }));
6669 }
6670 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6671 for path in changed_paths.into_iter() {
6672 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6673 changed_path_statuses
6674 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6675 }
6676 }
6677 anyhow::Ok(changed_path_statuses)
6678 })
6679 .await?;
6680
6681 this.update(&mut cx, |this, cx| {
6682 if this.snapshot.stash_entries != stash_entries {
6683 cx.emit(RepositoryEvent::StashEntriesChanged);
6684 this.snapshot.stash_entries = stash_entries;
6685 }
6686
6687 if !changed_path_statuses.is_empty() {
6688 cx.emit(RepositoryEvent::StatusesChanged);
6689 this.snapshot
6690 .statuses_by_path
6691 .edit(changed_path_statuses, ());
6692 this.snapshot.scan_id += 1;
6693 }
6694
6695 if let Some(updates_tx) = updates_tx {
6696 updates_tx
6697 .unbounded_send(DownstreamUpdate::UpdateRepository(
6698 this.snapshot.clone(),
6699 ))
6700 .ok();
6701 }
6702 })
6703 },
6704 );
6705 }
6706
6707 /// currently running git command and when it started
6708 pub fn current_job(&self) -> Option<JobInfo> {
6709 self.active_jobs.values().next().cloned()
6710 }
6711
6712 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6713 self.send_job(None, |_, _| async {})
6714 }
6715
6716 fn spawn_job_with_tracking<AsyncFn>(
6717 &mut self,
6718 paths: Vec<RepoPath>,
6719 git_status: pending_op::GitStatus,
6720 cx: &mut Context<Self>,
6721 f: AsyncFn,
6722 ) -> Task<Result<()>>
6723 where
6724 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6725 {
6726 let ids = self.new_pending_ops_for_paths(paths, git_status);
6727
6728 cx.spawn(async move |this, cx| {
6729 let (job_status, result) = match f(this.clone(), cx).await {
6730 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6731 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6732 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6733 };
6734
6735 this.update(cx, |this, _| {
6736 let mut edits = Vec::with_capacity(ids.len());
6737 for (id, entry) in ids {
6738 if let Some(mut ops) = this
6739 .pending_ops
6740 .get(&PathKey(entry.as_ref().clone()), ())
6741 .cloned()
6742 {
6743 if let Some(op) = ops.op_by_id_mut(id) {
6744 op.job_status = job_status;
6745 }
6746 edits.push(sum_tree::Edit::Insert(ops));
6747 }
6748 }
6749 this.pending_ops.edit(edits, ());
6750 })?;
6751
6752 result
6753 })
6754 }
6755
6756 fn new_pending_ops_for_paths(
6757 &mut self,
6758 paths: Vec<RepoPath>,
6759 git_status: pending_op::GitStatus,
6760 ) -> Vec<(PendingOpId, RepoPath)> {
6761 let mut edits = Vec::with_capacity(paths.len());
6762 let mut ids = Vec::with_capacity(paths.len());
6763 for path in paths {
6764 let mut ops = self
6765 .pending_ops
6766 .get(&PathKey(path.as_ref().clone()), ())
6767 .cloned()
6768 .unwrap_or_else(|| PendingOps::new(&path));
6769 let id = ops.max_id() + 1;
6770 ops.ops.push(PendingOp {
6771 id,
6772 git_status,
6773 job_status: pending_op::JobStatus::Running,
6774 });
6775 edits.push(sum_tree::Edit::Insert(ops));
6776 ids.push((id, path));
6777 }
6778 self.pending_ops.edit(edits, ());
6779 ids
6780 }
6781 pub fn default_remote_url(&self) -> Option<String> {
6782 self.remote_upstream_url
6783 .clone()
6784 .or(self.remote_origin_url.clone())
6785 }
6786}
6787
6788/// If `path` is a git linked worktree checkout, resolves it to the main
6789/// repository's working directory path. Returns `None` if `path` is a normal
6790/// repository, not a git repo, or if resolution fails.
6791///
6792/// Resolution works by:
6793/// 1. Reading the `.git` file to get the `gitdir:` pointer
6794/// 2. Following that to the worktree-specific git directory
6795/// 3. Reading the `commondir` file to find the shared `.git` directory
6796/// 4. Deriving the main repo's working directory from the common dir
6797pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6798 let dot_git = path.join(".git");
6799 let metadata = fs.metadata(&dot_git).await.ok()??;
6800 if metadata.is_dir {
6801 return None; // Normal repo, not a linked worktree
6802 }
6803 // It's a .git file — parse the gitdir: pointer
6804 let content = fs.load(&dot_git).await.ok()?;
6805 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6806 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6807 // Read commondir to find the main .git directory
6808 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6809 let common_dir = fs
6810 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6811 .await
6812 .ok()?;
6813 Some(git::repository::original_repo_path_from_common_dir(
6814 &common_dir,
6815 ))
6816}
6817
6818/// Validates that the resolved worktree directory is acceptable:
6819/// - The setting must not be an absolute path.
6820/// - The resolved path must be either a subdirectory of the working
6821/// directory or a subdirectory of its parent (i.e., a sibling).
6822///
6823/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6824pub fn worktrees_directory_for_repo(
6825 original_repo_abs_path: &Path,
6826 worktree_directory_setting: &str,
6827) -> Result<PathBuf> {
6828 // Check the original setting before trimming, since a path like "///"
6829 // is absolute but becomes "" after stripping trailing separators.
6830 // Also check for leading `/` or `\` explicitly, because on Windows
6831 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6832 // would slip through even though it's clearly not a relative path.
6833 if Path::new(worktree_directory_setting).is_absolute()
6834 || worktree_directory_setting.starts_with('/')
6835 || worktree_directory_setting.starts_with('\\')
6836 {
6837 anyhow::bail!(
6838 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6839 );
6840 }
6841
6842 if worktree_directory_setting.is_empty() {
6843 anyhow::bail!("git.worktree_directory must not be empty");
6844 }
6845
6846 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6847 if trimmed == ".." {
6848 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6849 }
6850
6851 let joined = original_repo_abs_path.join(trimmed);
6852 let resolved = util::normalize_path(&joined);
6853 let resolved = if resolved.starts_with(original_repo_abs_path) {
6854 resolved
6855 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
6856 resolved.join(repo_dir_name)
6857 } else {
6858 resolved
6859 };
6860
6861 let parent = original_repo_abs_path
6862 .parent()
6863 .unwrap_or(original_repo_abs_path);
6864
6865 if !resolved.starts_with(parent) {
6866 anyhow::bail!(
6867 "git.worktree_directory resolved to {resolved:?}, which is outside \
6868 the project root and its parent directory. It must resolve to a \
6869 subdirectory of {original_repo_abs_path:?} or a sibling of it."
6870 );
6871 }
6872
6873 Ok(resolved)
6874}
6875
6876/// Returns a short name for a linked worktree suitable for UI display
6877///
6878/// Uses the main worktree path to come up with a short name that disambiguates
6879/// the linked worktree from the main worktree.
6880pub fn linked_worktree_short_name(
6881 main_worktree_path: &Path,
6882 linked_worktree_path: &Path,
6883) -> Option<SharedString> {
6884 if main_worktree_path == linked_worktree_path {
6885 return None;
6886 }
6887
6888 let project_name = main_worktree_path.file_name()?.to_str()?;
6889 let directory_name = linked_worktree_path.file_name()?.to_str()?;
6890 let name = if directory_name != project_name {
6891 directory_name.to_string()
6892 } else {
6893 linked_worktree_path
6894 .parent()?
6895 .file_name()?
6896 .to_str()?
6897 .to_string()
6898 };
6899 Some(name.into())
6900}
6901
6902fn get_permalink_in_rust_registry_src(
6903 provider_registry: Arc<GitHostingProviderRegistry>,
6904 path: PathBuf,
6905 selection: Range<u32>,
6906) -> Result<url::Url> {
6907 #[derive(Deserialize)]
6908 struct CargoVcsGit {
6909 sha1: String,
6910 }
6911
6912 #[derive(Deserialize)]
6913 struct CargoVcsInfo {
6914 git: CargoVcsGit,
6915 path_in_vcs: String,
6916 }
6917
6918 #[derive(Deserialize)]
6919 struct CargoPackage {
6920 repository: String,
6921 }
6922
6923 #[derive(Deserialize)]
6924 struct CargoToml {
6925 package: CargoPackage,
6926 }
6927
6928 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6929 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6930 Some((dir, json))
6931 }) else {
6932 bail!("No .cargo_vcs_info.json found in parent directories")
6933 };
6934 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6935 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6936 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6937 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6938 .context("parsing package.repository field of manifest")?;
6939 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6940 let permalink = provider.build_permalink(
6941 remote,
6942 BuildPermalinkParams::new(
6943 &cargo_vcs_info.git.sha1,
6944 &RepoPath::from_rel_path(
6945 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6946 ),
6947 Some(selection),
6948 ),
6949 );
6950 Ok(permalink)
6951}
6952
6953fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6954 let Some(blame) = blame else {
6955 return proto::BlameBufferResponse {
6956 blame_response: None,
6957 };
6958 };
6959
6960 let entries = blame
6961 .entries
6962 .into_iter()
6963 .map(|entry| proto::BlameEntry {
6964 sha: entry.sha.as_bytes().into(),
6965 start_line: entry.range.start,
6966 end_line: entry.range.end,
6967 original_line_number: entry.original_line_number,
6968 author: entry.author,
6969 author_mail: entry.author_mail,
6970 author_time: entry.author_time,
6971 author_tz: entry.author_tz,
6972 committer: entry.committer_name,
6973 committer_mail: entry.committer_email,
6974 committer_time: entry.committer_time,
6975 committer_tz: entry.committer_tz,
6976 summary: entry.summary,
6977 previous: entry.previous,
6978 filename: entry.filename,
6979 })
6980 .collect::<Vec<_>>();
6981
6982 let messages = blame
6983 .messages
6984 .into_iter()
6985 .map(|(oid, message)| proto::CommitMessage {
6986 oid: oid.as_bytes().into(),
6987 message,
6988 })
6989 .collect::<Vec<_>>();
6990
6991 proto::BlameBufferResponse {
6992 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6993 }
6994}
6995
6996fn deserialize_blame_buffer_response(
6997 response: proto::BlameBufferResponse,
6998) -> Option<git::blame::Blame> {
6999 let response = response.blame_response?;
7000 let entries = response
7001 .entries
7002 .into_iter()
7003 .filter_map(|entry| {
7004 Some(git::blame::BlameEntry {
7005 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7006 range: entry.start_line..entry.end_line,
7007 original_line_number: entry.original_line_number,
7008 committer_name: entry.committer,
7009 committer_time: entry.committer_time,
7010 committer_tz: entry.committer_tz,
7011 committer_email: entry.committer_mail,
7012 author: entry.author,
7013 author_mail: entry.author_mail,
7014 author_time: entry.author_time,
7015 author_tz: entry.author_tz,
7016 summary: entry.summary,
7017 previous: entry.previous,
7018 filename: entry.filename,
7019 })
7020 })
7021 .collect::<Vec<_>>();
7022
7023 let messages = response
7024 .messages
7025 .into_iter()
7026 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7027 .collect::<HashMap<_, _>>();
7028
7029 Some(Blame { entries, messages })
7030}
7031
7032fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7033 proto::Branch {
7034 is_head: branch.is_head,
7035 ref_name: branch.ref_name.to_string(),
7036 unix_timestamp: branch
7037 .most_recent_commit
7038 .as_ref()
7039 .map(|commit| commit.commit_timestamp as u64),
7040 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7041 ref_name: upstream.ref_name.to_string(),
7042 tracking: upstream
7043 .tracking
7044 .status()
7045 .map(|upstream| proto::UpstreamTracking {
7046 ahead: upstream.ahead as u64,
7047 behind: upstream.behind as u64,
7048 }),
7049 }),
7050 most_recent_commit: branch
7051 .most_recent_commit
7052 .as_ref()
7053 .map(|commit| proto::CommitSummary {
7054 sha: commit.sha.to_string(),
7055 subject: commit.subject.to_string(),
7056 commit_timestamp: commit.commit_timestamp,
7057 author_name: commit.author_name.to_string(),
7058 }),
7059 }
7060}
7061
7062fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7063 proto::Worktree {
7064 path: worktree.path.to_string_lossy().to_string(),
7065 ref_name: worktree
7066 .ref_name
7067 .as_ref()
7068 .map(|s| s.to_string())
7069 .unwrap_or_default(),
7070 sha: worktree.sha.to_string(),
7071 is_main: worktree.is_main,
7072 }
7073}
7074
7075fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7076 git::repository::Worktree {
7077 path: PathBuf::from(proto.path.clone()),
7078 ref_name: Some(SharedString::from(&proto.ref_name)),
7079 sha: proto.sha.clone().into(),
7080 is_main: proto.is_main,
7081 }
7082}
7083
7084fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7085 git::repository::Branch {
7086 is_head: proto.is_head,
7087 ref_name: proto.ref_name.clone().into(),
7088 upstream: proto
7089 .upstream
7090 .as_ref()
7091 .map(|upstream| git::repository::Upstream {
7092 ref_name: upstream.ref_name.to_string().into(),
7093 tracking: upstream
7094 .tracking
7095 .as_ref()
7096 .map(|tracking| {
7097 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7098 ahead: tracking.ahead as u32,
7099 behind: tracking.behind as u32,
7100 })
7101 })
7102 .unwrap_or(git::repository::UpstreamTracking::Gone),
7103 }),
7104 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7105 git::repository::CommitSummary {
7106 sha: commit.sha.to_string().into(),
7107 subject: commit.subject.to_string().into(),
7108 commit_timestamp: commit.commit_timestamp,
7109 author_name: commit.author_name.to_string().into(),
7110 has_parent: true,
7111 }
7112 }),
7113 }
7114}
7115
7116fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7117 proto::GitCommitDetails {
7118 sha: commit.sha.to_string(),
7119 message: commit.message.to_string(),
7120 commit_timestamp: commit.commit_timestamp,
7121 author_email: commit.author_email.to_string(),
7122 author_name: commit.author_name.to_string(),
7123 }
7124}
7125
7126fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7127 CommitDetails {
7128 sha: proto.sha.clone().into(),
7129 message: proto.message.clone().into(),
7130 commit_timestamp: proto.commit_timestamp,
7131 author_email: proto.author_email.clone().into(),
7132 author_name: proto.author_name.clone().into(),
7133 }
7134}
7135
7136/// This snapshot computes the repository state on the foreground thread while
7137/// running the git commands on the background thread. We update branch, head,
7138/// remotes, and worktrees first so the UI can react sooner, then compute file
7139/// state and emit those events immediately after.
7140async fn compute_snapshot(
7141 this: Entity<Repository>,
7142 backend: Arc<dyn GitRepository>,
7143 cx: &mut AsyncApp,
7144) -> Result<RepositorySnapshot> {
7145 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7146 this.paths_needing_status_update.clear();
7147 (
7148 this.id,
7149 this.work_directory_abs_path.clone(),
7150 this.snapshot.clone(),
7151 )
7152 });
7153
7154 let head_commit_future = {
7155 let backend = backend.clone();
7156 async move {
7157 Ok(match backend.head_sha().await {
7158 Some(head_sha) => backend.show(head_sha).await.log_err(),
7159 None => None,
7160 })
7161 }
7162 };
7163 let (branches, head_commit, all_worktrees) = cx
7164 .background_spawn({
7165 let backend = backend.clone();
7166 async move {
7167 futures::future::try_join3(
7168 backend.branches(),
7169 head_commit_future,
7170 backend.worktrees(),
7171 )
7172 .await
7173 }
7174 })
7175 .await?;
7176 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7177 let branch_list: Arc<[Branch]> = branches.into();
7178
7179 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7180 .into_iter()
7181 .filter(|wt| wt.path != *work_directory_abs_path)
7182 .collect();
7183
7184 let (remote_origin_url, remote_upstream_url) = cx
7185 .background_spawn({
7186 let backend = backend.clone();
7187 async move {
7188 Ok::<_, anyhow::Error>(
7189 futures::future::join(
7190 backend.remote_url("origin"),
7191 backend.remote_url("upstream"),
7192 )
7193 .await,
7194 )
7195 }
7196 })
7197 .await?;
7198
7199 let snapshot = this.update(cx, |this, cx| {
7200 let head_changed =
7201 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7202 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7203 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7204
7205 this.snapshot = RepositorySnapshot {
7206 id,
7207 work_directory_abs_path,
7208 branch,
7209 branch_list: branch_list.clone(),
7210 head_commit,
7211 remote_origin_url,
7212 remote_upstream_url,
7213 linked_worktrees,
7214 scan_id: prev_snapshot.scan_id + 1,
7215 ..prev_snapshot
7216 };
7217
7218 if head_changed {
7219 cx.emit(RepositoryEvent::HeadChanged);
7220 }
7221
7222 if branch_list_changed {
7223 cx.emit(RepositoryEvent::BranchListChanged);
7224 }
7225
7226 if worktrees_changed {
7227 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7228 }
7229
7230 this.snapshot.clone()
7231 });
7232
7233 let (statuses, diff_stats, stash_entries) = cx
7234 .background_spawn({
7235 let backend = backend.clone();
7236 let snapshot = snapshot.clone();
7237 async move {
7238 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7239 if snapshot.head_commit.is_some() {
7240 backend.diff_stat(&[])
7241 } else {
7242 future::ready(Ok(status::GitDiffStat {
7243 entries: Arc::default(),
7244 }))
7245 .boxed()
7246 };
7247 futures::future::try_join3(
7248 backend.status(&[RepoPath::from_rel_path(
7249 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7250 )]),
7251 diff_stat_future,
7252 backend.stash_entries(),
7253 )
7254 .await
7255 }
7256 })
7257 .await?;
7258
7259 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7260 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7261 let mut conflicted_paths = Vec::new();
7262 let statuses_by_path = SumTree::from_iter(
7263 statuses.entries.iter().map(|(repo_path, status)| {
7264 if status.is_conflicted() {
7265 conflicted_paths.push(repo_path.clone());
7266 }
7267 StatusEntry {
7268 repo_path: repo_path.clone(),
7269 status: *status,
7270 diff_stat: diff_stat_map.get(repo_path).copied(),
7271 }
7272 }),
7273 (),
7274 );
7275
7276 let merge_details = cx
7277 .background_spawn({
7278 let backend = backend.clone();
7279 let mut merge_details = snapshot.merge.clone();
7280 async move {
7281 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7282 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7283 }
7284 })
7285 .await?;
7286 let (merge_details, conflicts_changed) = merge_details;
7287 log::debug!("new merge details: {merge_details:?}");
7288
7289 Ok(this.update(cx, |this, cx| {
7290 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7291 cx.emit(RepositoryEvent::StatusesChanged);
7292 }
7293 if stash_entries != this.snapshot.stash_entries {
7294 cx.emit(RepositoryEvent::StashEntriesChanged);
7295 }
7296
7297 this.snapshot.scan_id += 1;
7298 this.snapshot.merge = merge_details;
7299 this.snapshot.statuses_by_path = statuses_by_path;
7300 this.snapshot.stash_entries = stash_entries;
7301
7302 this.snapshot.clone()
7303 }))
7304}
7305
7306fn status_from_proto(
7307 simple_status: i32,
7308 status: Option<proto::GitFileStatus>,
7309) -> anyhow::Result<FileStatus> {
7310 use proto::git_file_status::Variant;
7311
7312 let Some(variant) = status.and_then(|status| status.variant) else {
7313 let code = proto::GitStatus::from_i32(simple_status)
7314 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7315 let result = match code {
7316 proto::GitStatus::Added => TrackedStatus {
7317 worktree_status: StatusCode::Added,
7318 index_status: StatusCode::Unmodified,
7319 }
7320 .into(),
7321 proto::GitStatus::Modified => TrackedStatus {
7322 worktree_status: StatusCode::Modified,
7323 index_status: StatusCode::Unmodified,
7324 }
7325 .into(),
7326 proto::GitStatus::Conflict => UnmergedStatus {
7327 first_head: UnmergedStatusCode::Updated,
7328 second_head: UnmergedStatusCode::Updated,
7329 }
7330 .into(),
7331 proto::GitStatus::Deleted => TrackedStatus {
7332 worktree_status: StatusCode::Deleted,
7333 index_status: StatusCode::Unmodified,
7334 }
7335 .into(),
7336 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7337 };
7338 return Ok(result);
7339 };
7340
7341 let result = match variant {
7342 Variant::Untracked(_) => FileStatus::Untracked,
7343 Variant::Ignored(_) => FileStatus::Ignored,
7344 Variant::Unmerged(unmerged) => {
7345 let [first_head, second_head] =
7346 [unmerged.first_head, unmerged.second_head].map(|head| {
7347 let code = proto::GitStatus::from_i32(head)
7348 .with_context(|| format!("Invalid git status code: {head}"))?;
7349 let result = match code {
7350 proto::GitStatus::Added => UnmergedStatusCode::Added,
7351 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7352 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7353 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7354 };
7355 Ok(result)
7356 });
7357 let [first_head, second_head] = [first_head?, second_head?];
7358 UnmergedStatus {
7359 first_head,
7360 second_head,
7361 }
7362 .into()
7363 }
7364 Variant::Tracked(tracked) => {
7365 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7366 .map(|status| {
7367 let code = proto::GitStatus::from_i32(status)
7368 .with_context(|| format!("Invalid git status code: {status}"))?;
7369 let result = match code {
7370 proto::GitStatus::Modified => StatusCode::Modified,
7371 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7372 proto::GitStatus::Added => StatusCode::Added,
7373 proto::GitStatus::Deleted => StatusCode::Deleted,
7374 proto::GitStatus::Renamed => StatusCode::Renamed,
7375 proto::GitStatus::Copied => StatusCode::Copied,
7376 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7377 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7378 };
7379 Ok(result)
7380 });
7381 let [index_status, worktree_status] = [index_status?, worktree_status?];
7382 TrackedStatus {
7383 index_status,
7384 worktree_status,
7385 }
7386 .into()
7387 }
7388 };
7389 Ok(result)
7390}
7391
7392fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7393 use proto::git_file_status::{Tracked, Unmerged, Variant};
7394
7395 let variant = match status {
7396 FileStatus::Untracked => Variant::Untracked(Default::default()),
7397 FileStatus::Ignored => Variant::Ignored(Default::default()),
7398 FileStatus::Unmerged(UnmergedStatus {
7399 first_head,
7400 second_head,
7401 }) => Variant::Unmerged(Unmerged {
7402 first_head: unmerged_status_to_proto(first_head),
7403 second_head: unmerged_status_to_proto(second_head),
7404 }),
7405 FileStatus::Tracked(TrackedStatus {
7406 index_status,
7407 worktree_status,
7408 }) => Variant::Tracked(Tracked {
7409 index_status: tracked_status_to_proto(index_status),
7410 worktree_status: tracked_status_to_proto(worktree_status),
7411 }),
7412 };
7413 proto::GitFileStatus {
7414 variant: Some(variant),
7415 }
7416}
7417
7418fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7419 match code {
7420 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7421 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7422 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7423 }
7424}
7425
7426fn tracked_status_to_proto(code: StatusCode) -> i32 {
7427 match code {
7428 StatusCode::Added => proto::GitStatus::Added as _,
7429 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7430 StatusCode::Modified => proto::GitStatus::Modified as _,
7431 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7432 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7433 StatusCode::Copied => proto::GitStatus::Copied as _,
7434 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7435 }
7436}