1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let uncommitted_diff = this
825 .update(cx, |this, cx| {
826 this.open_uncommitted_diff(buffer.clone(), cx)
827 })?
828 .await?;
829 buffer_diff.update(cx, |buffer_diff, _| {
830 buffer_diff.set_secondary_diff(uncommitted_diff);
831 });
832
833 this.update(cx, |this, cx| {
834 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
835 .detach();
836
837 this.loading_diffs.remove(&(buffer_id, diff_kind));
838
839 let git_store = cx.weak_entity();
840 let diff_state = this
841 .diffs
842 .entry(buffer_id)
843 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
844
845 diff_state.update(cx, |state, _| {
846 if let Some(oid) = oid {
847 if let Some(content) = content {
848 state.oid_texts.insert(oid, content);
849 }
850 }
851 state.oid_diffs.insert(oid, buffer_diff.downgrade());
852 });
853 })?;
854
855 Ok(buffer_diff)
856 }
857 .await;
858 result.map_err(Arc::new)
859 })
860 .shared();
861
862 self.loading_diffs
863 .insert((buffer_id, diff_kind), task.clone());
864 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
865 }
866
867 #[ztracing::instrument(skip_all)]
868 pub fn open_uncommitted_diff(
869 &mut self,
870 buffer: Entity<Buffer>,
871 cx: &mut Context<Self>,
872 ) -> Task<Result<Entity<BufferDiff>>> {
873 let buffer_id = buffer.read(cx).remote_id();
874
875 if let Some(diff_state) = self.diffs.get(&buffer_id)
876 && let Some(uncommitted_diff) = diff_state
877 .read(cx)
878 .uncommitted_diff
879 .as_ref()
880 .and_then(|weak| weak.upgrade())
881 {
882 if let Some(task) =
883 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
884 {
885 return cx.background_executor().spawn(async move {
886 task.await;
887 Ok(uncommitted_diff)
888 });
889 }
890 return Task::ready(Ok(uncommitted_diff));
891 }
892
893 let Some((repo, repo_path)) =
894 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
895 else {
896 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
897 };
898
899 let task = self
900 .loading_diffs
901 .entry((buffer_id, DiffKind::Uncommitted))
902 .or_insert_with(|| {
903 let changes = repo.update(cx, |repo, cx| {
904 repo.load_committed_text(buffer_id, repo_path, cx)
905 });
906
907 // todo(lw): hot foreground spawn
908 cx.spawn(async move |this, cx| {
909 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
910 .await
911 .map_err(Arc::new)
912 })
913 .shared()
914 })
915 .clone();
916
917 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
918 }
919
920 #[ztracing::instrument(skip_all)]
921 async fn open_diff_internal(
922 this: WeakEntity<Self>,
923 kind: DiffKind,
924 texts: Result<DiffBasesChange>,
925 buffer_entity: Entity<Buffer>,
926 cx: &mut AsyncApp,
927 ) -> Result<Entity<BufferDiff>> {
928 let diff_bases_change = match texts {
929 Err(e) => {
930 this.update(cx, |this, cx| {
931 let buffer = buffer_entity.read(cx);
932 let buffer_id = buffer.remote_id();
933 this.loading_diffs.remove(&(buffer_id, kind));
934 })?;
935 return Err(e);
936 }
937 Ok(change) => change,
938 };
939
940 this.update(cx, |this, cx| {
941 let buffer = buffer_entity.read(cx);
942 let buffer_id = buffer.remote_id();
943 let language = buffer.language().cloned();
944 let language_registry = buffer.language_registry();
945 let text_snapshot = buffer.text_snapshot();
946 this.loading_diffs.remove(&(buffer_id, kind));
947
948 let git_store = cx.weak_entity();
949 let diff_state = this
950 .diffs
951 .entry(buffer_id)
952 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
953
954 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
955
956 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
957 diff_state.update(cx, |diff_state, cx| {
958 diff_state.language_changed = true;
959 diff_state.language = language;
960 diff_state.language_registry = language_registry;
961
962 match kind {
963 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
964 DiffKind::Uncommitted => {
965 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
966 diff
967 } else {
968 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
969 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
970 unstaged_diff
971 };
972
973 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
974 diff_state.uncommitted_diff = Some(diff.downgrade())
975 }
976 DiffKind::SinceOid(_) => {
977 unreachable!("open_diff_internal is not used for OID diffs")
978 }
979 }
980
981 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
982 let rx = diff_state.wait_for_recalculation();
983
984 anyhow::Ok(async move {
985 if let Some(rx) = rx {
986 rx.await;
987 }
988 Ok(diff)
989 })
990 })
991 })??
992 .await
993 }
994
995 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
996 let diff_state = self.diffs.get(&buffer_id)?;
997 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
998 }
999
1000 pub fn get_uncommitted_diff(
1001 &self,
1002 buffer_id: BufferId,
1003 cx: &App,
1004 ) -> Option<Entity<BufferDiff>> {
1005 let diff_state = self.diffs.get(&buffer_id)?;
1006 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1007 }
1008
1009 pub fn get_diff_since_oid(
1010 &self,
1011 buffer_id: BufferId,
1012 oid: Option<git::Oid>,
1013 cx: &App,
1014 ) -> Option<Entity<BufferDiff>> {
1015 let diff_state = self.diffs.get(&buffer_id)?;
1016 diff_state.read(cx).oid_diff(oid)
1017 }
1018
1019 pub fn open_conflict_set(
1020 &mut self,
1021 buffer: Entity<Buffer>,
1022 cx: &mut Context<Self>,
1023 ) -> Entity<ConflictSet> {
1024 log::debug!("open conflict set");
1025 let buffer_id = buffer.read(cx).remote_id();
1026
1027 if let Some(git_state) = self.diffs.get(&buffer_id)
1028 && let Some(conflict_set) = git_state
1029 .read(cx)
1030 .conflict_set
1031 .as_ref()
1032 .and_then(|weak| weak.upgrade())
1033 {
1034 let conflict_set = conflict_set;
1035 let buffer_snapshot = buffer.read(cx).text_snapshot();
1036
1037 git_state.update(cx, |state, cx| {
1038 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1039 });
1040
1041 return conflict_set;
1042 }
1043
1044 let is_unmerged = self
1045 .repository_and_path_for_buffer_id(buffer_id, cx)
1046 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1047 let git_store = cx.weak_entity();
1048 let buffer_git_state = self
1049 .diffs
1050 .entry(buffer_id)
1051 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1052 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1053
1054 self._subscriptions
1055 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1056 cx.emit(GitStoreEvent::ConflictsUpdated);
1057 }));
1058
1059 buffer_git_state.update(cx, |state, cx| {
1060 state.conflict_set = Some(conflict_set.downgrade());
1061 let buffer_snapshot = buffer.read(cx).text_snapshot();
1062 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1063 });
1064
1065 conflict_set
1066 }
1067
1068 pub fn project_path_git_status(
1069 &self,
1070 project_path: &ProjectPath,
1071 cx: &App,
1072 ) -> Option<FileStatus> {
1073 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1074 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1075 }
1076
1077 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1078 let mut work_directory_abs_paths = Vec::new();
1079 let mut checkpoints = Vec::new();
1080 for repository in self.repositories.values() {
1081 repository.update(cx, |repository, _| {
1082 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1083 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1084 });
1085 }
1086
1087 cx.background_executor().spawn(async move {
1088 let checkpoints = future::try_join_all(checkpoints).await?;
1089 Ok(GitStoreCheckpoint {
1090 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1091 .into_iter()
1092 .zip(checkpoints)
1093 .collect(),
1094 })
1095 })
1096 }
1097
1098 pub fn restore_checkpoint(
1099 &self,
1100 checkpoint: GitStoreCheckpoint,
1101 cx: &mut App,
1102 ) -> Task<Result<()>> {
1103 let repositories_by_work_dir_abs_path = self
1104 .repositories
1105 .values()
1106 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1107 .collect::<HashMap<_, _>>();
1108
1109 let mut tasks = Vec::new();
1110 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1111 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1112 let restore = repository.update(cx, |repository, _| {
1113 repository.restore_checkpoint(checkpoint)
1114 });
1115 tasks.push(async move { restore.await? });
1116 }
1117 }
1118 cx.background_spawn(async move {
1119 future::try_join_all(tasks).await?;
1120 Ok(())
1121 })
1122 }
1123
1124 /// Compares two checkpoints, returning true if they are equal.
1125 pub fn compare_checkpoints(
1126 &self,
1127 left: GitStoreCheckpoint,
1128 mut right: GitStoreCheckpoint,
1129 cx: &mut App,
1130 ) -> Task<Result<bool>> {
1131 let repositories_by_work_dir_abs_path = self
1132 .repositories
1133 .values()
1134 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1135 .collect::<HashMap<_, _>>();
1136
1137 let mut tasks = Vec::new();
1138 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1139 if let Some(right_checkpoint) = right
1140 .checkpoints_by_work_dir_abs_path
1141 .remove(&work_dir_abs_path)
1142 {
1143 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1144 {
1145 let compare = repository.update(cx, |repository, _| {
1146 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1147 });
1148
1149 tasks.push(async move { compare.await? });
1150 }
1151 } else {
1152 return Task::ready(Ok(false));
1153 }
1154 }
1155 cx.background_spawn(async move {
1156 Ok(future::try_join_all(tasks)
1157 .await?
1158 .into_iter()
1159 .all(|result| result))
1160 })
1161 }
1162
1163 /// Blames a buffer.
1164 pub fn blame_buffer(
1165 &self,
1166 buffer: &Entity<Buffer>,
1167 version: Option<clock::Global>,
1168 cx: &mut Context<Self>,
1169 ) -> Task<Result<Option<Blame>>> {
1170 let buffer = buffer.read(cx);
1171 let Some((repo, repo_path)) =
1172 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1173 else {
1174 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1175 };
1176 let content = match &version {
1177 Some(version) => buffer.rope_for_version(version),
1178 None => buffer.as_rope().clone(),
1179 };
1180 let line_ending = buffer.line_ending();
1181 let version = version.unwrap_or(buffer.version());
1182 let buffer_id = buffer.remote_id();
1183
1184 let repo = repo.downgrade();
1185 cx.spawn(async move |_, cx| {
1186 let repository_state = repo
1187 .update(cx, |repo, _| repo.repository_state.clone())?
1188 .await
1189 .map_err(|err| anyhow::anyhow!(err))?;
1190 match repository_state {
1191 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1192 .blame(repo_path.clone(), content, line_ending)
1193 .await
1194 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1195 .map(Some),
1196 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1197 let response = client
1198 .request(proto::BlameBuffer {
1199 project_id: project_id.to_proto(),
1200 buffer_id: buffer_id.into(),
1201 version: serialize_version(&version),
1202 })
1203 .await?;
1204 Ok(deserialize_blame_buffer_response(response))
1205 }
1206 }
1207 })
1208 }
1209
1210 pub fn file_history(
1211 &self,
1212 repo: &Entity<Repository>,
1213 path: RepoPath,
1214 cx: &mut App,
1215 ) -> Task<Result<git::repository::FileHistory>> {
1216 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1217
1218 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1219 }
1220
1221 pub fn file_history_paginated(
1222 &self,
1223 repo: &Entity<Repository>,
1224 path: RepoPath,
1225 skip: usize,
1226 limit: Option<usize>,
1227 cx: &mut App,
1228 ) -> Task<Result<git::repository::FileHistory>> {
1229 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1230
1231 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1232 }
1233
1234 pub fn get_permalink_to_line(
1235 &self,
1236 buffer: &Entity<Buffer>,
1237 selection: Range<u32>,
1238 cx: &mut App,
1239 ) -> Task<Result<url::Url>> {
1240 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1241 return Task::ready(Err(anyhow!("buffer has no file")));
1242 };
1243
1244 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1245 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1246 cx,
1247 ) else {
1248 // If we're not in a Git repo, check whether this is a Rust source
1249 // file in the Cargo registry (presumably opened with go-to-definition
1250 // from a normal Rust file). If so, we can put together a permalink
1251 // using crate metadata.
1252 if buffer
1253 .read(cx)
1254 .language()
1255 .is_none_or(|lang| lang.name() != "Rust")
1256 {
1257 return Task::ready(Err(anyhow!("no permalink available")));
1258 }
1259 let file_path = file.worktree.read(cx).absolutize(&file.path);
1260 return cx.spawn(async move |cx| {
1261 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1262 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1263 .context("no permalink available")
1264 });
1265 };
1266
1267 let buffer_id = buffer.read(cx).remote_id();
1268 let branch = repo.read(cx).branch.clone();
1269 let remote = branch
1270 .as_ref()
1271 .and_then(|b| b.upstream.as_ref())
1272 .and_then(|b| b.remote_name())
1273 .unwrap_or("origin")
1274 .to_string();
1275
1276 let rx = repo.update(cx, |repo, _| {
1277 repo.send_job(None, move |state, cx| async move {
1278 match state {
1279 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1280 let origin_url = backend
1281 .remote_url(&remote)
1282 .await
1283 .with_context(|| format!("remote \"{remote}\" not found"))?;
1284
1285 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1286
1287 let provider_registry =
1288 cx.update(GitHostingProviderRegistry::default_global);
1289
1290 let (provider, remote) =
1291 parse_git_remote_url(provider_registry, &origin_url)
1292 .context("parsing Git remote URL")?;
1293
1294 Ok(provider.build_permalink(
1295 remote,
1296 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1297 ))
1298 }
1299 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1300 let response = client
1301 .request(proto::GetPermalinkToLine {
1302 project_id: project_id.to_proto(),
1303 buffer_id: buffer_id.into(),
1304 selection: Some(proto::Range {
1305 start: selection.start as u64,
1306 end: selection.end as u64,
1307 }),
1308 })
1309 .await?;
1310
1311 url::Url::parse(&response.permalink).context("failed to parse permalink")
1312 }
1313 }
1314 })
1315 });
1316 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1317 }
1318
1319 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1320 match &self.state {
1321 GitStoreState::Local {
1322 downstream: downstream_client,
1323 ..
1324 } => downstream_client
1325 .as_ref()
1326 .map(|state| (state.client.clone(), state.project_id)),
1327 GitStoreState::Remote {
1328 downstream: downstream_client,
1329 ..
1330 } => downstream_client.clone(),
1331 }
1332 }
1333
1334 fn upstream_client(&self) -> Option<AnyProtoClient> {
1335 match &self.state {
1336 GitStoreState::Local { .. } => None,
1337 GitStoreState::Remote {
1338 upstream_client, ..
1339 } => Some(upstream_client.clone()),
1340 }
1341 }
1342
1343 fn on_worktree_store_event(
1344 &mut self,
1345 worktree_store: Entity<WorktreeStore>,
1346 event: &WorktreeStoreEvent,
1347 cx: &mut Context<Self>,
1348 ) {
1349 let GitStoreState::Local {
1350 project_environment,
1351 downstream,
1352 next_repository_id,
1353 fs,
1354 } = &self.state
1355 else {
1356 return;
1357 };
1358
1359 match event {
1360 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1361 if let Some(worktree) = self
1362 .worktree_store
1363 .read(cx)
1364 .worktree_for_id(*worktree_id, cx)
1365 {
1366 let paths_by_git_repo =
1367 self.process_updated_entries(&worktree, updated_entries, cx);
1368 let downstream = downstream
1369 .as_ref()
1370 .map(|downstream| downstream.updates_tx.clone());
1371 cx.spawn(async move |_, cx| {
1372 let paths_by_git_repo = paths_by_git_repo.await;
1373 for (repo, paths) in paths_by_git_repo {
1374 repo.update(cx, |repo, cx| {
1375 repo.paths_changed(paths, downstream.clone(), cx);
1376 });
1377 }
1378 })
1379 .detach();
1380 }
1381 }
1382 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1383 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1384 else {
1385 return;
1386 };
1387 if !worktree.read(cx).is_visible() {
1388 log::debug!(
1389 "not adding repositories for local worktree {:?} because it's not visible",
1390 worktree.read(cx).abs_path()
1391 );
1392 return;
1393 }
1394 self.update_repositories_from_worktree(
1395 *worktree_id,
1396 project_environment.clone(),
1397 next_repository_id.clone(),
1398 downstream
1399 .as_ref()
1400 .map(|downstream| downstream.updates_tx.clone()),
1401 changed_repos.clone(),
1402 fs.clone(),
1403 cx,
1404 );
1405 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1406 }
1407 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1408 let repos_without_worktree: Vec<RepositoryId> = self
1409 .worktree_ids
1410 .iter_mut()
1411 .filter_map(|(repo_id, worktree_ids)| {
1412 worktree_ids.remove(worktree_id);
1413 if worktree_ids.is_empty() {
1414 Some(*repo_id)
1415 } else {
1416 None
1417 }
1418 })
1419 .collect();
1420 let is_active_repo_removed = repos_without_worktree
1421 .iter()
1422 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1423
1424 for repo_id in repos_without_worktree {
1425 self.repositories.remove(&repo_id);
1426 self.worktree_ids.remove(&repo_id);
1427 if let Some(updates_tx) =
1428 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1429 {
1430 updates_tx
1431 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1432 .ok();
1433 }
1434 }
1435
1436 if is_active_repo_removed {
1437 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1438 self.active_repo_id = Some(repo_id);
1439 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1440 } else {
1441 self.active_repo_id = None;
1442 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1443 }
1444 }
1445 }
1446 _ => {}
1447 }
1448 }
1449 fn on_repository_event(
1450 &mut self,
1451 repo: Entity<Repository>,
1452 event: &RepositoryEvent,
1453 cx: &mut Context<Self>,
1454 ) {
1455 let id = repo.read(cx).id;
1456 let repo_snapshot = repo.read(cx).snapshot.clone();
1457 for (buffer_id, diff) in self.diffs.iter() {
1458 if let Some((buffer_repo, repo_path)) =
1459 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1460 && buffer_repo == repo
1461 {
1462 diff.update(cx, |diff, cx| {
1463 if let Some(conflict_set) = &diff.conflict_set {
1464 let conflict_status_changed =
1465 conflict_set.update(cx, |conflict_set, cx| {
1466 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1467 conflict_set.set_has_conflict(has_conflict, cx)
1468 })?;
1469 if conflict_status_changed {
1470 let buffer_store = self.buffer_store.read(cx);
1471 if let Some(buffer) = buffer_store.get(*buffer_id) {
1472 let _ = diff
1473 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1474 }
1475 }
1476 }
1477 anyhow::Ok(())
1478 })
1479 .ok();
1480 }
1481 }
1482 cx.emit(GitStoreEvent::RepositoryUpdated(
1483 id,
1484 event.clone(),
1485 self.active_repo_id == Some(id),
1486 ))
1487 }
1488
1489 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1490 cx.emit(GitStoreEvent::JobsUpdated)
1491 }
1492
1493 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1494 fn update_repositories_from_worktree(
1495 &mut self,
1496 worktree_id: WorktreeId,
1497 project_environment: Entity<ProjectEnvironment>,
1498 next_repository_id: Arc<AtomicU64>,
1499 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1500 updated_git_repositories: UpdatedGitRepositoriesSet,
1501 fs: Arc<dyn Fs>,
1502 cx: &mut Context<Self>,
1503 ) {
1504 let mut removed_ids = Vec::new();
1505 for update in updated_git_repositories.iter() {
1506 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1507 let existing_work_directory_abs_path =
1508 repo.read(cx).work_directory_abs_path.clone();
1509 Some(&existing_work_directory_abs_path)
1510 == update.old_work_directory_abs_path.as_ref()
1511 || Some(&existing_work_directory_abs_path)
1512 == update.new_work_directory_abs_path.as_ref()
1513 }) {
1514 let repo_id = *id;
1515 if let Some(new_work_directory_abs_path) =
1516 update.new_work_directory_abs_path.clone()
1517 {
1518 self.worktree_ids
1519 .entry(repo_id)
1520 .or_insert_with(HashSet::new)
1521 .insert(worktree_id);
1522 existing.update(cx, |existing, cx| {
1523 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1524 existing.schedule_scan(updates_tx.clone(), cx);
1525 });
1526 } else {
1527 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1528 worktree_ids.remove(&worktree_id);
1529 if worktree_ids.is_empty() {
1530 removed_ids.push(repo_id);
1531 }
1532 }
1533 }
1534 } else if let UpdatedGitRepository {
1535 new_work_directory_abs_path: Some(work_directory_abs_path),
1536 dot_git_abs_path: Some(dot_git_abs_path),
1537 repository_dir_abs_path: Some(_repository_dir_abs_path),
1538 common_dir_abs_path: Some(common_dir_abs_path),
1539 ..
1540 } = update
1541 {
1542 let original_repo_abs_path: Arc<Path> =
1543 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1544 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1545 let is_trusted = TrustedWorktrees::try_get_global(cx)
1546 .map(|trusted_worktrees| {
1547 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1548 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1549 })
1550 })
1551 .unwrap_or(false);
1552 let git_store = cx.weak_entity();
1553 let repo = cx.new(|cx| {
1554 let mut repo = Repository::local(
1555 id,
1556 work_directory_abs_path.clone(),
1557 original_repo_abs_path.clone(),
1558 dot_git_abs_path.clone(),
1559 project_environment.downgrade(),
1560 fs.clone(),
1561 is_trusted,
1562 git_store,
1563 cx,
1564 );
1565 if let Some(updates_tx) = updates_tx.as_ref() {
1566 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1567 updates_tx
1568 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1569 .ok();
1570 }
1571 repo.schedule_scan(updates_tx.clone(), cx);
1572 repo
1573 });
1574 self._subscriptions
1575 .push(cx.subscribe(&repo, Self::on_repository_event));
1576 self._subscriptions
1577 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1578 self.repositories.insert(id, repo);
1579 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1580 cx.emit(GitStoreEvent::RepositoryAdded);
1581 self.active_repo_id.get_or_insert_with(|| {
1582 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1583 id
1584 });
1585 }
1586 }
1587
1588 for id in removed_ids {
1589 if self.active_repo_id == Some(id) {
1590 self.active_repo_id = None;
1591 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1592 }
1593 self.repositories.remove(&id);
1594 if let Some(updates_tx) = updates_tx.as_ref() {
1595 updates_tx
1596 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1597 .ok();
1598 }
1599 }
1600 }
1601
1602 fn on_trusted_worktrees_event(
1603 &mut self,
1604 _: Entity<TrustedWorktreesStore>,
1605 event: &TrustedWorktreesEvent,
1606 cx: &mut Context<Self>,
1607 ) {
1608 if !matches!(self.state, GitStoreState::Local { .. }) {
1609 return;
1610 }
1611
1612 let (is_trusted, event_paths) = match event {
1613 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1614 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1615 };
1616
1617 for (repo_id, worktree_ids) in &self.worktree_ids {
1618 if worktree_ids
1619 .iter()
1620 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1621 {
1622 if let Some(repo) = self.repositories.get(repo_id) {
1623 let repository_state = repo.read(cx).repository_state.clone();
1624 cx.background_spawn(async move {
1625 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1626 state.backend.set_trusted(is_trusted);
1627 }
1628 })
1629 .detach();
1630 }
1631 }
1632 }
1633 }
1634
1635 fn on_buffer_store_event(
1636 &mut self,
1637 _: Entity<BufferStore>,
1638 event: &BufferStoreEvent,
1639 cx: &mut Context<Self>,
1640 ) {
1641 match event {
1642 BufferStoreEvent::BufferAdded(buffer) => {
1643 cx.subscribe(buffer, |this, buffer, event, cx| {
1644 if let BufferEvent::LanguageChanged(_) = event {
1645 let buffer_id = buffer.read(cx).remote_id();
1646 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1647 diff_state.update(cx, |diff_state, cx| {
1648 diff_state.buffer_language_changed(buffer, cx);
1649 });
1650 }
1651 }
1652 })
1653 .detach();
1654 }
1655 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1656 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1657 diffs.remove(buffer_id);
1658 }
1659 }
1660 BufferStoreEvent::BufferDropped(buffer_id) => {
1661 self.diffs.remove(buffer_id);
1662 for diffs in self.shared_diffs.values_mut() {
1663 diffs.remove(buffer_id);
1664 }
1665 }
1666 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1667 // Whenever a buffer's file path changes, it's possible that the
1668 // new path is actually a path that is being tracked by a git
1669 // repository. In that case, we'll want to update the buffer's
1670 // `BufferDiffState`, in case it already has one.
1671 let buffer_id = buffer.read(cx).remote_id();
1672 let diff_state = self.diffs.get(&buffer_id);
1673 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1674
1675 if let Some(diff_state) = diff_state
1676 && let Some((repo, repo_path)) = repo
1677 {
1678 let buffer = buffer.clone();
1679 let diff_state = diff_state.clone();
1680
1681 cx.spawn(async move |_git_store, cx| {
1682 async {
1683 let diff_bases_change = repo
1684 .update(cx, |repo, cx| {
1685 repo.load_committed_text(buffer_id, repo_path, cx)
1686 })
1687 .await?;
1688
1689 diff_state.update(cx, |diff_state, cx| {
1690 let buffer_snapshot = buffer.read(cx).text_snapshot();
1691 diff_state.diff_bases_changed(
1692 buffer_snapshot,
1693 Some(diff_bases_change),
1694 cx,
1695 );
1696 });
1697 anyhow::Ok(())
1698 }
1699 .await
1700 .log_err();
1701 })
1702 .detach();
1703 }
1704 }
1705 }
1706 }
1707
1708 pub fn recalculate_buffer_diffs(
1709 &mut self,
1710 buffers: Vec<Entity<Buffer>>,
1711 cx: &mut Context<Self>,
1712 ) -> impl Future<Output = ()> + use<> {
1713 let mut futures = Vec::new();
1714 for buffer in buffers {
1715 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1716 let buffer = buffer.read(cx).text_snapshot();
1717 diff_state.update(cx, |diff_state, cx| {
1718 diff_state.recalculate_diffs(buffer.clone(), cx);
1719 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1720 });
1721 futures.push(diff_state.update(cx, |diff_state, cx| {
1722 diff_state
1723 .reparse_conflict_markers(buffer, cx)
1724 .map(|_| {})
1725 .boxed()
1726 }));
1727 }
1728 }
1729 async move {
1730 futures::future::join_all(futures).await;
1731 }
1732 }
1733
1734 fn on_buffer_diff_event(
1735 &mut self,
1736 diff: Entity<buffer_diff::BufferDiff>,
1737 event: &BufferDiffEvent,
1738 cx: &mut Context<Self>,
1739 ) {
1740 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1741 let buffer_id = diff.read(cx).buffer_id;
1742 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1743 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1744 diff_state.hunk_staging_operation_count += 1;
1745 diff_state.hunk_staging_operation_count
1746 });
1747 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1748 let recv = repo.update(cx, |repo, cx| {
1749 log::debug!("hunks changed for {}", path.as_unix_str());
1750 repo.spawn_set_index_text_job(
1751 path,
1752 new_index_text.as_ref().map(|rope| rope.to_string()),
1753 Some(hunk_staging_operation_count),
1754 cx,
1755 )
1756 });
1757 let diff = diff.downgrade();
1758 cx.spawn(async move |this, cx| {
1759 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1760 diff.update(cx, |diff, cx| {
1761 diff.clear_pending_hunks(cx);
1762 })
1763 .ok();
1764 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1765 .ok();
1766 }
1767 })
1768 .detach();
1769 }
1770 }
1771 }
1772 }
1773
1774 fn local_worktree_git_repos_changed(
1775 &mut self,
1776 worktree: Entity<Worktree>,
1777 changed_repos: &UpdatedGitRepositoriesSet,
1778 cx: &mut Context<Self>,
1779 ) {
1780 log::debug!("local worktree repos changed");
1781 debug_assert!(worktree.read(cx).is_local());
1782
1783 for repository in self.repositories.values() {
1784 repository.update(cx, |repository, cx| {
1785 let repo_abs_path = &repository.work_directory_abs_path;
1786 if changed_repos.iter().any(|update| {
1787 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1788 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1789 }) {
1790 repository.reload_buffer_diff_bases(cx);
1791 }
1792 });
1793 }
1794 }
1795
1796 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1797 &self.repositories
1798 }
1799
1800 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1801 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1802 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1803 Some(status.status)
1804 }
1805
1806 pub fn repository_and_path_for_buffer_id(
1807 &self,
1808 buffer_id: BufferId,
1809 cx: &App,
1810 ) -> Option<(Entity<Repository>, RepoPath)> {
1811 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1812 let project_path = buffer.read(cx).project_path(cx)?;
1813 self.repository_and_path_for_project_path(&project_path, cx)
1814 }
1815
1816 pub fn repository_and_path_for_project_path(
1817 &self,
1818 path: &ProjectPath,
1819 cx: &App,
1820 ) -> Option<(Entity<Repository>, RepoPath)> {
1821 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1822 self.repositories
1823 .values()
1824 .filter_map(|repo| {
1825 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1826 Some((repo.clone(), repo_path))
1827 })
1828 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1829 }
1830
1831 pub fn git_init(
1832 &self,
1833 path: Arc<Path>,
1834 fallback_branch_name: String,
1835 cx: &App,
1836 ) -> Task<Result<()>> {
1837 match &self.state {
1838 GitStoreState::Local { fs, .. } => {
1839 let fs = fs.clone();
1840 cx.background_executor()
1841 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1842 }
1843 GitStoreState::Remote {
1844 upstream_client,
1845 upstream_project_id: project_id,
1846 ..
1847 } => {
1848 let client = upstream_client.clone();
1849 let project_id = *project_id;
1850 cx.background_executor().spawn(async move {
1851 client
1852 .request(proto::GitInit {
1853 project_id: project_id,
1854 abs_path: path.to_string_lossy().into_owned(),
1855 fallback_branch_name,
1856 })
1857 .await?;
1858 Ok(())
1859 })
1860 }
1861 }
1862 }
1863
1864 pub fn git_clone(
1865 &self,
1866 repo: String,
1867 path: impl Into<Arc<std::path::Path>>,
1868 cx: &App,
1869 ) -> Task<Result<()>> {
1870 let path = path.into();
1871 match &self.state {
1872 GitStoreState::Local { fs, .. } => {
1873 let fs = fs.clone();
1874 cx.background_executor()
1875 .spawn(async move { fs.git_clone(&repo, &path).await })
1876 }
1877 GitStoreState::Remote {
1878 upstream_client,
1879 upstream_project_id,
1880 ..
1881 } => {
1882 if upstream_client.is_via_collab() {
1883 return Task::ready(Err(anyhow!(
1884 "Git Clone isn't supported for project guests"
1885 )));
1886 }
1887 let request = upstream_client.request(proto::GitClone {
1888 project_id: *upstream_project_id,
1889 abs_path: path.to_string_lossy().into_owned(),
1890 remote_repo: repo,
1891 });
1892
1893 cx.background_spawn(async move {
1894 let result = request.await?;
1895
1896 match result.success {
1897 true => Ok(()),
1898 false => Err(anyhow!("Git Clone failed")),
1899 }
1900 })
1901 }
1902 }
1903 }
1904
1905 async fn handle_update_repository(
1906 this: Entity<Self>,
1907 envelope: TypedEnvelope<proto::UpdateRepository>,
1908 mut cx: AsyncApp,
1909 ) -> Result<()> {
1910 this.update(&mut cx, |this, cx| {
1911 let path_style = this.worktree_store.read(cx).path_style();
1912 let mut update = envelope.payload;
1913
1914 let id = RepositoryId::from_proto(update.id);
1915 let client = this.upstream_client().context("no upstream client")?;
1916
1917 let original_repo_abs_path: Option<Arc<Path>> = update
1918 .original_repo_abs_path
1919 .as_deref()
1920 .map(|p| Path::new(p).into());
1921
1922 let mut repo_subscription = None;
1923 let repo = this.repositories.entry(id).or_insert_with(|| {
1924 let git_store = cx.weak_entity();
1925 let repo = cx.new(|cx| {
1926 Repository::remote(
1927 id,
1928 Path::new(&update.abs_path).into(),
1929 original_repo_abs_path.clone(),
1930 path_style,
1931 ProjectId(update.project_id),
1932 client,
1933 git_store,
1934 cx,
1935 )
1936 });
1937 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1938 cx.emit(GitStoreEvent::RepositoryAdded);
1939 repo
1940 });
1941 this._subscriptions.extend(repo_subscription);
1942
1943 repo.update(cx, {
1944 let update = update.clone();
1945 |repo, cx| repo.apply_remote_update(update, cx)
1946 })?;
1947
1948 this.active_repo_id.get_or_insert_with(|| {
1949 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1950 id
1951 });
1952
1953 if let Some((client, project_id)) = this.downstream_client() {
1954 update.project_id = project_id.to_proto();
1955 client.send(update).log_err();
1956 }
1957 Ok(())
1958 })
1959 }
1960
1961 async fn handle_remove_repository(
1962 this: Entity<Self>,
1963 envelope: TypedEnvelope<proto::RemoveRepository>,
1964 mut cx: AsyncApp,
1965 ) -> Result<()> {
1966 this.update(&mut cx, |this, cx| {
1967 let mut update = envelope.payload;
1968 let id = RepositoryId::from_proto(update.id);
1969 this.repositories.remove(&id);
1970 if let Some((client, project_id)) = this.downstream_client() {
1971 update.project_id = project_id.to_proto();
1972 client.send(update).log_err();
1973 }
1974 if this.active_repo_id == Some(id) {
1975 this.active_repo_id = None;
1976 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1977 }
1978 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1979 });
1980 Ok(())
1981 }
1982
1983 async fn handle_git_init(
1984 this: Entity<Self>,
1985 envelope: TypedEnvelope<proto::GitInit>,
1986 cx: AsyncApp,
1987 ) -> Result<proto::Ack> {
1988 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1989 let name = envelope.payload.fallback_branch_name;
1990 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1991 .await?;
1992
1993 Ok(proto::Ack {})
1994 }
1995
1996 async fn handle_git_clone(
1997 this: Entity<Self>,
1998 envelope: TypedEnvelope<proto::GitClone>,
1999 cx: AsyncApp,
2000 ) -> Result<proto::GitCloneResponse> {
2001 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2002 let repo_name = envelope.payload.remote_repo;
2003 let result = cx
2004 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2005 .await;
2006
2007 Ok(proto::GitCloneResponse {
2008 success: result.is_ok(),
2009 })
2010 }
2011
2012 async fn handle_fetch(
2013 this: Entity<Self>,
2014 envelope: TypedEnvelope<proto::Fetch>,
2015 mut cx: AsyncApp,
2016 ) -> Result<proto::RemoteMessageResponse> {
2017 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2018 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2019 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2020 let askpass_id = envelope.payload.askpass_id;
2021
2022 let askpass = make_remote_delegate(
2023 this,
2024 envelope.payload.project_id,
2025 repository_id,
2026 askpass_id,
2027 &mut cx,
2028 );
2029
2030 let remote_output = repository_handle
2031 .update(&mut cx, |repository_handle, cx| {
2032 repository_handle.fetch(fetch_options, askpass, cx)
2033 })
2034 .await??;
2035
2036 Ok(proto::RemoteMessageResponse {
2037 stdout: remote_output.stdout,
2038 stderr: remote_output.stderr,
2039 })
2040 }
2041
2042 async fn handle_push(
2043 this: Entity<Self>,
2044 envelope: TypedEnvelope<proto::Push>,
2045 mut cx: AsyncApp,
2046 ) -> Result<proto::RemoteMessageResponse> {
2047 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2048 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2049
2050 let askpass_id = envelope.payload.askpass_id;
2051 let askpass = make_remote_delegate(
2052 this,
2053 envelope.payload.project_id,
2054 repository_id,
2055 askpass_id,
2056 &mut cx,
2057 );
2058
2059 let options = envelope
2060 .payload
2061 .options
2062 .as_ref()
2063 .map(|_| match envelope.payload.options() {
2064 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2065 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2066 });
2067
2068 let branch_name = envelope.payload.branch_name.into();
2069 let remote_branch_name = envelope.payload.remote_branch_name.into();
2070 let remote_name = envelope.payload.remote_name.into();
2071
2072 let remote_output = repository_handle
2073 .update(&mut cx, |repository_handle, cx| {
2074 repository_handle.push(
2075 branch_name,
2076 remote_branch_name,
2077 remote_name,
2078 options,
2079 askpass,
2080 cx,
2081 )
2082 })
2083 .await??;
2084 Ok(proto::RemoteMessageResponse {
2085 stdout: remote_output.stdout,
2086 stderr: remote_output.stderr,
2087 })
2088 }
2089
2090 async fn handle_pull(
2091 this: Entity<Self>,
2092 envelope: TypedEnvelope<proto::Pull>,
2093 mut cx: AsyncApp,
2094 ) -> Result<proto::RemoteMessageResponse> {
2095 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2096 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2097 let askpass_id = envelope.payload.askpass_id;
2098 let askpass = make_remote_delegate(
2099 this,
2100 envelope.payload.project_id,
2101 repository_id,
2102 askpass_id,
2103 &mut cx,
2104 );
2105
2106 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2107 let remote_name = envelope.payload.remote_name.into();
2108 let rebase = envelope.payload.rebase;
2109
2110 let remote_message = repository_handle
2111 .update(&mut cx, |repository_handle, cx| {
2112 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2113 })
2114 .await??;
2115
2116 Ok(proto::RemoteMessageResponse {
2117 stdout: remote_message.stdout,
2118 stderr: remote_message.stderr,
2119 })
2120 }
2121
2122 async fn handle_stage(
2123 this: Entity<Self>,
2124 envelope: TypedEnvelope<proto::Stage>,
2125 mut cx: AsyncApp,
2126 ) -> Result<proto::Ack> {
2127 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2128 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2129
2130 let entries = envelope
2131 .payload
2132 .paths
2133 .into_iter()
2134 .map(|path| RepoPath::new(&path))
2135 .collect::<Result<Vec<_>>>()?;
2136
2137 repository_handle
2138 .update(&mut cx, |repository_handle, cx| {
2139 repository_handle.stage_entries(entries, cx)
2140 })
2141 .await?;
2142 Ok(proto::Ack {})
2143 }
2144
2145 async fn handle_unstage(
2146 this: Entity<Self>,
2147 envelope: TypedEnvelope<proto::Unstage>,
2148 mut cx: AsyncApp,
2149 ) -> Result<proto::Ack> {
2150 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2151 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2152
2153 let entries = envelope
2154 .payload
2155 .paths
2156 .into_iter()
2157 .map(|path| RepoPath::new(&path))
2158 .collect::<Result<Vec<_>>>()?;
2159
2160 repository_handle
2161 .update(&mut cx, |repository_handle, cx| {
2162 repository_handle.unstage_entries(entries, cx)
2163 })
2164 .await?;
2165
2166 Ok(proto::Ack {})
2167 }
2168
2169 async fn handle_stash(
2170 this: Entity<Self>,
2171 envelope: TypedEnvelope<proto::Stash>,
2172 mut cx: AsyncApp,
2173 ) -> Result<proto::Ack> {
2174 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2175 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2176
2177 let entries = envelope
2178 .payload
2179 .paths
2180 .into_iter()
2181 .map(|path| RepoPath::new(&path))
2182 .collect::<Result<Vec<_>>>()?;
2183
2184 repository_handle
2185 .update(&mut cx, |repository_handle, cx| {
2186 repository_handle.stash_entries(entries, cx)
2187 })
2188 .await?;
2189
2190 Ok(proto::Ack {})
2191 }
2192
2193 async fn handle_stash_pop(
2194 this: Entity<Self>,
2195 envelope: TypedEnvelope<proto::StashPop>,
2196 mut cx: AsyncApp,
2197 ) -> Result<proto::Ack> {
2198 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2199 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2200 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2201
2202 repository_handle
2203 .update(&mut cx, |repository_handle, cx| {
2204 repository_handle.stash_pop(stash_index, cx)
2205 })
2206 .await?;
2207
2208 Ok(proto::Ack {})
2209 }
2210
2211 async fn handle_stash_apply(
2212 this: Entity<Self>,
2213 envelope: TypedEnvelope<proto::StashApply>,
2214 mut cx: AsyncApp,
2215 ) -> Result<proto::Ack> {
2216 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2217 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2218 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2219
2220 repository_handle
2221 .update(&mut cx, |repository_handle, cx| {
2222 repository_handle.stash_apply(stash_index, cx)
2223 })
2224 .await?;
2225
2226 Ok(proto::Ack {})
2227 }
2228
2229 async fn handle_stash_drop(
2230 this: Entity<Self>,
2231 envelope: TypedEnvelope<proto::StashDrop>,
2232 mut cx: AsyncApp,
2233 ) -> Result<proto::Ack> {
2234 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2235 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2236 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2237
2238 repository_handle
2239 .update(&mut cx, |repository_handle, cx| {
2240 repository_handle.stash_drop(stash_index, cx)
2241 })
2242 .await??;
2243
2244 Ok(proto::Ack {})
2245 }
2246
2247 async fn handle_set_index_text(
2248 this: Entity<Self>,
2249 envelope: TypedEnvelope<proto::SetIndexText>,
2250 mut cx: AsyncApp,
2251 ) -> Result<proto::Ack> {
2252 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2253 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2254 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2255
2256 repository_handle
2257 .update(&mut cx, |repository_handle, cx| {
2258 repository_handle.spawn_set_index_text_job(
2259 repo_path,
2260 envelope.payload.text,
2261 None,
2262 cx,
2263 )
2264 })
2265 .await??;
2266 Ok(proto::Ack {})
2267 }
2268
2269 async fn handle_run_hook(
2270 this: Entity<Self>,
2271 envelope: TypedEnvelope<proto::RunGitHook>,
2272 mut cx: AsyncApp,
2273 ) -> Result<proto::Ack> {
2274 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2275 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2276 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2277 repository_handle
2278 .update(&mut cx, |repository_handle, cx| {
2279 repository_handle.run_hook(hook, cx)
2280 })
2281 .await??;
2282 Ok(proto::Ack {})
2283 }
2284
2285 async fn handle_commit(
2286 this: Entity<Self>,
2287 envelope: TypedEnvelope<proto::Commit>,
2288 mut cx: AsyncApp,
2289 ) -> Result<proto::Ack> {
2290 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2291 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2292 let askpass_id = envelope.payload.askpass_id;
2293
2294 let askpass = make_remote_delegate(
2295 this,
2296 envelope.payload.project_id,
2297 repository_id,
2298 askpass_id,
2299 &mut cx,
2300 );
2301
2302 let message = SharedString::from(envelope.payload.message);
2303 let name = envelope.payload.name.map(SharedString::from);
2304 let email = envelope.payload.email.map(SharedString::from);
2305 let options = envelope.payload.options.unwrap_or_default();
2306
2307 repository_handle
2308 .update(&mut cx, |repository_handle, cx| {
2309 repository_handle.commit(
2310 message,
2311 name.zip(email),
2312 CommitOptions {
2313 amend: options.amend,
2314 signoff: options.signoff,
2315 },
2316 askpass,
2317 cx,
2318 )
2319 })
2320 .await??;
2321 Ok(proto::Ack {})
2322 }
2323
2324 async fn handle_get_remotes(
2325 this: Entity<Self>,
2326 envelope: TypedEnvelope<proto::GetRemotes>,
2327 mut cx: AsyncApp,
2328 ) -> Result<proto::GetRemotesResponse> {
2329 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2330 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2331
2332 let branch_name = envelope.payload.branch_name;
2333 let is_push = envelope.payload.is_push;
2334
2335 let remotes = repository_handle
2336 .update(&mut cx, |repository_handle, _| {
2337 repository_handle.get_remotes(branch_name, is_push)
2338 })
2339 .await??;
2340
2341 Ok(proto::GetRemotesResponse {
2342 remotes: remotes
2343 .into_iter()
2344 .map(|remotes| proto::get_remotes_response::Remote {
2345 name: remotes.name.to_string(),
2346 })
2347 .collect::<Vec<_>>(),
2348 })
2349 }
2350
2351 async fn handle_get_worktrees(
2352 this: Entity<Self>,
2353 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2354 mut cx: AsyncApp,
2355 ) -> Result<proto::GitWorktreesResponse> {
2356 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2357 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2358
2359 let worktrees = repository_handle
2360 .update(&mut cx, |repository_handle, _| {
2361 repository_handle.worktrees()
2362 })
2363 .await??;
2364
2365 Ok(proto::GitWorktreesResponse {
2366 worktrees: worktrees
2367 .into_iter()
2368 .map(|worktree| worktree_to_proto(&worktree))
2369 .collect::<Vec<_>>(),
2370 })
2371 }
2372
2373 async fn handle_create_worktree(
2374 this: Entity<Self>,
2375 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2376 mut cx: AsyncApp,
2377 ) -> Result<proto::Ack> {
2378 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2379 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2380 let directory = PathBuf::from(envelope.payload.directory);
2381 let name = envelope.payload.name;
2382 let commit = envelope.payload.commit;
2383
2384 repository_handle
2385 .update(&mut cx, |repository_handle, _| {
2386 repository_handle.create_worktree(name, directory, commit)
2387 })
2388 .await??;
2389
2390 Ok(proto::Ack {})
2391 }
2392
2393 async fn handle_remove_worktree(
2394 this: Entity<Self>,
2395 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2396 mut cx: AsyncApp,
2397 ) -> Result<proto::Ack> {
2398 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2399 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2400 let path = PathBuf::from(envelope.payload.path);
2401 let force = envelope.payload.force;
2402
2403 repository_handle
2404 .update(&mut cx, |repository_handle, _| {
2405 repository_handle.remove_worktree(path, force)
2406 })
2407 .await??;
2408
2409 Ok(proto::Ack {})
2410 }
2411
2412 async fn handle_rename_worktree(
2413 this: Entity<Self>,
2414 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2415 mut cx: AsyncApp,
2416 ) -> Result<proto::Ack> {
2417 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2418 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2419 let old_path = PathBuf::from(envelope.payload.old_path);
2420 let new_path = PathBuf::from(envelope.payload.new_path);
2421
2422 repository_handle
2423 .update(&mut cx, |repository_handle, _| {
2424 repository_handle.rename_worktree(old_path, new_path)
2425 })
2426 .await??;
2427
2428 Ok(proto::Ack {})
2429 }
2430
2431 async fn handle_get_branches(
2432 this: Entity<Self>,
2433 envelope: TypedEnvelope<proto::GitGetBranches>,
2434 mut cx: AsyncApp,
2435 ) -> Result<proto::GitBranchesResponse> {
2436 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2437 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2438
2439 let branches = repository_handle
2440 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2441 .await??;
2442
2443 Ok(proto::GitBranchesResponse {
2444 branches: branches
2445 .into_iter()
2446 .map(|branch| branch_to_proto(&branch))
2447 .collect::<Vec<_>>(),
2448 })
2449 }
2450 async fn handle_get_default_branch(
2451 this: Entity<Self>,
2452 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2453 mut cx: AsyncApp,
2454 ) -> Result<proto::GetDefaultBranchResponse> {
2455 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2456 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2457
2458 let branch = repository_handle
2459 .update(&mut cx, |repository_handle, _| {
2460 repository_handle.default_branch(false)
2461 })
2462 .await??
2463 .map(Into::into);
2464
2465 Ok(proto::GetDefaultBranchResponse { branch })
2466 }
2467 async fn handle_create_branch(
2468 this: Entity<Self>,
2469 envelope: TypedEnvelope<proto::GitCreateBranch>,
2470 mut cx: AsyncApp,
2471 ) -> Result<proto::Ack> {
2472 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2473 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2474 let branch_name = envelope.payload.branch_name;
2475
2476 repository_handle
2477 .update(&mut cx, |repository_handle, _| {
2478 repository_handle.create_branch(branch_name, None)
2479 })
2480 .await??;
2481
2482 Ok(proto::Ack {})
2483 }
2484
2485 async fn handle_change_branch(
2486 this: Entity<Self>,
2487 envelope: TypedEnvelope<proto::GitChangeBranch>,
2488 mut cx: AsyncApp,
2489 ) -> Result<proto::Ack> {
2490 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2491 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2492 let branch_name = envelope.payload.branch_name;
2493
2494 repository_handle
2495 .update(&mut cx, |repository_handle, _| {
2496 repository_handle.change_branch(branch_name)
2497 })
2498 .await??;
2499
2500 Ok(proto::Ack {})
2501 }
2502
2503 async fn handle_rename_branch(
2504 this: Entity<Self>,
2505 envelope: TypedEnvelope<proto::GitRenameBranch>,
2506 mut cx: AsyncApp,
2507 ) -> Result<proto::Ack> {
2508 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2509 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2510 let branch = envelope.payload.branch;
2511 let new_name = envelope.payload.new_name;
2512
2513 repository_handle
2514 .update(&mut cx, |repository_handle, _| {
2515 repository_handle.rename_branch(branch, new_name)
2516 })
2517 .await??;
2518
2519 Ok(proto::Ack {})
2520 }
2521
2522 async fn handle_create_remote(
2523 this: Entity<Self>,
2524 envelope: TypedEnvelope<proto::GitCreateRemote>,
2525 mut cx: AsyncApp,
2526 ) -> Result<proto::Ack> {
2527 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2528 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2529 let remote_name = envelope.payload.remote_name;
2530 let remote_url = envelope.payload.remote_url;
2531
2532 repository_handle
2533 .update(&mut cx, |repository_handle, _| {
2534 repository_handle.create_remote(remote_name, remote_url)
2535 })
2536 .await??;
2537
2538 Ok(proto::Ack {})
2539 }
2540
2541 async fn handle_delete_branch(
2542 this: Entity<Self>,
2543 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2544 mut cx: AsyncApp,
2545 ) -> Result<proto::Ack> {
2546 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2547 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2548 let branch_name = envelope.payload.branch_name;
2549
2550 repository_handle
2551 .update(&mut cx, |repository_handle, _| {
2552 repository_handle.delete_branch(branch_name)
2553 })
2554 .await??;
2555
2556 Ok(proto::Ack {})
2557 }
2558
2559 async fn handle_remove_remote(
2560 this: Entity<Self>,
2561 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2562 mut cx: AsyncApp,
2563 ) -> Result<proto::Ack> {
2564 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2565 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2566 let remote_name = envelope.payload.remote_name;
2567
2568 repository_handle
2569 .update(&mut cx, |repository_handle, _| {
2570 repository_handle.remove_remote(remote_name)
2571 })
2572 .await??;
2573
2574 Ok(proto::Ack {})
2575 }
2576
2577 async fn handle_show(
2578 this: Entity<Self>,
2579 envelope: TypedEnvelope<proto::GitShow>,
2580 mut cx: AsyncApp,
2581 ) -> Result<proto::GitCommitDetails> {
2582 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2583 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2584
2585 let commit = repository_handle
2586 .update(&mut cx, |repository_handle, _| {
2587 repository_handle.show(envelope.payload.commit)
2588 })
2589 .await??;
2590 Ok(proto::GitCommitDetails {
2591 sha: commit.sha.into(),
2592 message: commit.message.into(),
2593 commit_timestamp: commit.commit_timestamp,
2594 author_email: commit.author_email.into(),
2595 author_name: commit.author_name.into(),
2596 })
2597 }
2598
2599 async fn handle_load_commit_diff(
2600 this: Entity<Self>,
2601 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2602 mut cx: AsyncApp,
2603 ) -> Result<proto::LoadCommitDiffResponse> {
2604 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2605 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2606
2607 let commit_diff = repository_handle
2608 .update(&mut cx, |repository_handle, _| {
2609 repository_handle.load_commit_diff(envelope.payload.commit)
2610 })
2611 .await??;
2612 Ok(proto::LoadCommitDiffResponse {
2613 files: commit_diff
2614 .files
2615 .into_iter()
2616 .map(|file| proto::CommitFile {
2617 path: file.path.to_proto(),
2618 old_text: file.old_text,
2619 new_text: file.new_text,
2620 is_binary: file.is_binary,
2621 })
2622 .collect(),
2623 })
2624 }
2625
2626 async fn handle_file_history(
2627 this: Entity<Self>,
2628 envelope: TypedEnvelope<proto::GitFileHistory>,
2629 mut cx: AsyncApp,
2630 ) -> Result<proto::GitFileHistoryResponse> {
2631 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2632 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2633 let path = RepoPath::from_proto(&envelope.payload.path)?;
2634 let skip = envelope.payload.skip as usize;
2635 let limit = envelope.payload.limit.map(|l| l as usize);
2636
2637 let file_history = repository_handle
2638 .update(&mut cx, |repository_handle, _| {
2639 repository_handle.file_history_paginated(path, skip, limit)
2640 })
2641 .await??;
2642
2643 Ok(proto::GitFileHistoryResponse {
2644 entries: file_history
2645 .entries
2646 .into_iter()
2647 .map(|entry| proto::FileHistoryEntry {
2648 sha: entry.sha.to_string(),
2649 subject: entry.subject.to_string(),
2650 message: entry.message.to_string(),
2651 commit_timestamp: entry.commit_timestamp,
2652 author_name: entry.author_name.to_string(),
2653 author_email: entry.author_email.to_string(),
2654 })
2655 .collect(),
2656 path: file_history.path.to_proto(),
2657 })
2658 }
2659
2660 async fn handle_reset(
2661 this: Entity<Self>,
2662 envelope: TypedEnvelope<proto::GitReset>,
2663 mut cx: AsyncApp,
2664 ) -> Result<proto::Ack> {
2665 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2666 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2667
2668 let mode = match envelope.payload.mode() {
2669 git_reset::ResetMode::Soft => ResetMode::Soft,
2670 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2671 };
2672
2673 repository_handle
2674 .update(&mut cx, |repository_handle, cx| {
2675 repository_handle.reset(envelope.payload.commit, mode, cx)
2676 })
2677 .await??;
2678 Ok(proto::Ack {})
2679 }
2680
2681 async fn handle_checkout_files(
2682 this: Entity<Self>,
2683 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2684 mut cx: AsyncApp,
2685 ) -> Result<proto::Ack> {
2686 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2687 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2688 let paths = envelope
2689 .payload
2690 .paths
2691 .iter()
2692 .map(|s| RepoPath::from_proto(s))
2693 .collect::<Result<Vec<_>>>()?;
2694
2695 repository_handle
2696 .update(&mut cx, |repository_handle, cx| {
2697 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2698 })
2699 .await?;
2700 Ok(proto::Ack {})
2701 }
2702
2703 async fn handle_open_commit_message_buffer(
2704 this: Entity<Self>,
2705 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2706 mut cx: AsyncApp,
2707 ) -> Result<proto::OpenBufferResponse> {
2708 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2709 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2710 let buffer = repository
2711 .update(&mut cx, |repository, cx| {
2712 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2713 })
2714 .await?;
2715
2716 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2717 this.update(&mut cx, |this, cx| {
2718 this.buffer_store.update(cx, |buffer_store, cx| {
2719 buffer_store
2720 .create_buffer_for_peer(
2721 &buffer,
2722 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2723 cx,
2724 )
2725 .detach_and_log_err(cx);
2726 })
2727 });
2728
2729 Ok(proto::OpenBufferResponse {
2730 buffer_id: buffer_id.to_proto(),
2731 })
2732 }
2733
2734 async fn handle_askpass(
2735 this: Entity<Self>,
2736 envelope: TypedEnvelope<proto::AskPassRequest>,
2737 mut cx: AsyncApp,
2738 ) -> Result<proto::AskPassResponse> {
2739 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2740 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2741
2742 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2743 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2744 debug_panic!("no askpass found");
2745 anyhow::bail!("no askpass found");
2746 };
2747
2748 let response = askpass
2749 .ask_password(envelope.payload.prompt)
2750 .await
2751 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2752
2753 delegates
2754 .lock()
2755 .insert(envelope.payload.askpass_id, askpass);
2756
2757 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2758 Ok(proto::AskPassResponse {
2759 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2760 })
2761 }
2762
2763 async fn handle_check_for_pushed_commits(
2764 this: Entity<Self>,
2765 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2766 mut cx: AsyncApp,
2767 ) -> Result<proto::CheckForPushedCommitsResponse> {
2768 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2769 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2770
2771 let branches = repository_handle
2772 .update(&mut cx, |repository_handle, _| {
2773 repository_handle.check_for_pushed_commits()
2774 })
2775 .await??;
2776 Ok(proto::CheckForPushedCommitsResponse {
2777 pushed_to: branches
2778 .into_iter()
2779 .map(|commit| commit.to_string())
2780 .collect(),
2781 })
2782 }
2783
2784 async fn handle_git_diff(
2785 this: Entity<Self>,
2786 envelope: TypedEnvelope<proto::GitDiff>,
2787 mut cx: AsyncApp,
2788 ) -> Result<proto::GitDiffResponse> {
2789 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2790 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2791 let diff_type = match envelope.payload.diff_type() {
2792 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2793 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2794 proto::git_diff::DiffType::MergeBase => {
2795 let base_ref = envelope
2796 .payload
2797 .merge_base_ref
2798 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2799 DiffType::MergeBase {
2800 base_ref: base_ref.into(),
2801 }
2802 }
2803 };
2804
2805 let mut diff = repository_handle
2806 .update(&mut cx, |repository_handle, cx| {
2807 repository_handle.diff(diff_type, cx)
2808 })
2809 .await??;
2810 const ONE_MB: usize = 1_000_000;
2811 if diff.len() > ONE_MB {
2812 diff = diff.chars().take(ONE_MB).collect()
2813 }
2814
2815 Ok(proto::GitDiffResponse { diff })
2816 }
2817
2818 async fn handle_tree_diff(
2819 this: Entity<Self>,
2820 request: TypedEnvelope<proto::GetTreeDiff>,
2821 mut cx: AsyncApp,
2822 ) -> Result<proto::GetTreeDiffResponse> {
2823 let repository_id = RepositoryId(request.payload.repository_id);
2824 let diff_type = if request.payload.is_merge {
2825 DiffTreeType::MergeBase {
2826 base: request.payload.base.into(),
2827 head: request.payload.head.into(),
2828 }
2829 } else {
2830 DiffTreeType::Since {
2831 base: request.payload.base.into(),
2832 head: request.payload.head.into(),
2833 }
2834 };
2835
2836 let diff = this
2837 .update(&mut cx, |this, cx| {
2838 let repository = this.repositories().get(&repository_id)?;
2839 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2840 })
2841 .context("missing repository")?
2842 .await??;
2843
2844 Ok(proto::GetTreeDiffResponse {
2845 entries: diff
2846 .entries
2847 .into_iter()
2848 .map(|(path, status)| proto::TreeDiffStatus {
2849 path: path.as_ref().to_proto(),
2850 status: match status {
2851 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2852 TreeDiffStatus::Modified { .. } => {
2853 proto::tree_diff_status::Status::Modified.into()
2854 }
2855 TreeDiffStatus::Deleted { .. } => {
2856 proto::tree_diff_status::Status::Deleted.into()
2857 }
2858 },
2859 oid: match status {
2860 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2861 Some(old.to_string())
2862 }
2863 TreeDiffStatus::Added => None,
2864 },
2865 })
2866 .collect(),
2867 })
2868 }
2869
2870 async fn handle_get_blob_content(
2871 this: Entity<Self>,
2872 request: TypedEnvelope<proto::GetBlobContent>,
2873 mut cx: AsyncApp,
2874 ) -> Result<proto::GetBlobContentResponse> {
2875 let oid = git::Oid::from_str(&request.payload.oid)?;
2876 let repository_id = RepositoryId(request.payload.repository_id);
2877 let content = this
2878 .update(&mut cx, |this, cx| {
2879 let repository = this.repositories().get(&repository_id)?;
2880 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2881 })
2882 .context("missing repository")?
2883 .await?;
2884 Ok(proto::GetBlobContentResponse { content })
2885 }
2886
2887 async fn handle_open_unstaged_diff(
2888 this: Entity<Self>,
2889 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2890 mut cx: AsyncApp,
2891 ) -> Result<proto::OpenUnstagedDiffResponse> {
2892 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2893 let diff = this
2894 .update(&mut cx, |this, cx| {
2895 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2896 Some(this.open_unstaged_diff(buffer, cx))
2897 })
2898 .context("missing buffer")?
2899 .await?;
2900 this.update(&mut cx, |this, _| {
2901 let shared_diffs = this
2902 .shared_diffs
2903 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2904 .or_default();
2905 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2906 });
2907 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2908 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2909 }
2910
2911 async fn handle_open_uncommitted_diff(
2912 this: Entity<Self>,
2913 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2914 mut cx: AsyncApp,
2915 ) -> Result<proto::OpenUncommittedDiffResponse> {
2916 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2917 let diff = this
2918 .update(&mut cx, |this, cx| {
2919 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2920 Some(this.open_uncommitted_diff(buffer, cx))
2921 })
2922 .context("missing buffer")?
2923 .await?;
2924 this.update(&mut cx, |this, _| {
2925 let shared_diffs = this
2926 .shared_diffs
2927 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2928 .or_default();
2929 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2930 });
2931 Ok(diff.read_with(&cx, |diff, cx| {
2932 use proto::open_uncommitted_diff_response::Mode;
2933
2934 let unstaged_diff = diff.secondary_diff();
2935 let index_snapshot = unstaged_diff.and_then(|diff| {
2936 let diff = diff.read(cx);
2937 diff.base_text_exists().then(|| diff.base_text(cx))
2938 });
2939
2940 let mode;
2941 let staged_text;
2942 let committed_text;
2943 if diff.base_text_exists() {
2944 let committed_snapshot = diff.base_text(cx);
2945 committed_text = Some(committed_snapshot.text());
2946 if let Some(index_text) = index_snapshot {
2947 if index_text.remote_id() == committed_snapshot.remote_id() {
2948 mode = Mode::IndexMatchesHead;
2949 staged_text = None;
2950 } else {
2951 mode = Mode::IndexAndHead;
2952 staged_text = Some(index_text.text());
2953 }
2954 } else {
2955 mode = Mode::IndexAndHead;
2956 staged_text = None;
2957 }
2958 } else {
2959 mode = Mode::IndexAndHead;
2960 committed_text = None;
2961 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2962 }
2963
2964 proto::OpenUncommittedDiffResponse {
2965 committed_text,
2966 staged_text,
2967 mode: mode.into(),
2968 }
2969 }))
2970 }
2971
2972 async fn handle_update_diff_bases(
2973 this: Entity<Self>,
2974 request: TypedEnvelope<proto::UpdateDiffBases>,
2975 mut cx: AsyncApp,
2976 ) -> Result<()> {
2977 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2978 this.update(&mut cx, |this, cx| {
2979 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2980 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2981 {
2982 let buffer = buffer.read(cx).text_snapshot();
2983 diff_state.update(cx, |diff_state, cx| {
2984 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2985 })
2986 }
2987 });
2988 Ok(())
2989 }
2990
2991 async fn handle_blame_buffer(
2992 this: Entity<Self>,
2993 envelope: TypedEnvelope<proto::BlameBuffer>,
2994 mut cx: AsyncApp,
2995 ) -> Result<proto::BlameBufferResponse> {
2996 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2997 let version = deserialize_version(&envelope.payload.version);
2998 let buffer = this.read_with(&cx, |this, cx| {
2999 this.buffer_store.read(cx).get_existing(buffer_id)
3000 })?;
3001 buffer
3002 .update(&mut cx, |buffer, _| {
3003 buffer.wait_for_version(version.clone())
3004 })
3005 .await?;
3006 let blame = this
3007 .update(&mut cx, |this, cx| {
3008 this.blame_buffer(&buffer, Some(version), cx)
3009 })
3010 .await?;
3011 Ok(serialize_blame_buffer_response(blame))
3012 }
3013
3014 async fn handle_get_permalink_to_line(
3015 this: Entity<Self>,
3016 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3017 mut cx: AsyncApp,
3018 ) -> Result<proto::GetPermalinkToLineResponse> {
3019 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3020 // let version = deserialize_version(&envelope.payload.version);
3021 let selection = {
3022 let proto_selection = envelope
3023 .payload
3024 .selection
3025 .context("no selection to get permalink for defined")?;
3026 proto_selection.start as u32..proto_selection.end as u32
3027 };
3028 let buffer = this.read_with(&cx, |this, cx| {
3029 this.buffer_store.read(cx).get_existing(buffer_id)
3030 })?;
3031 let permalink = this
3032 .update(&mut cx, |this, cx| {
3033 this.get_permalink_to_line(&buffer, selection, cx)
3034 })
3035 .await?;
3036 Ok(proto::GetPermalinkToLineResponse {
3037 permalink: permalink.to_string(),
3038 })
3039 }
3040
3041 fn repository_for_request(
3042 this: &Entity<Self>,
3043 id: RepositoryId,
3044 cx: &mut AsyncApp,
3045 ) -> Result<Entity<Repository>> {
3046 this.read_with(cx, |this, _| {
3047 this.repositories
3048 .get(&id)
3049 .context("missing repository handle")
3050 .cloned()
3051 })
3052 }
3053
3054 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3055 self.repositories
3056 .iter()
3057 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3058 .collect()
3059 }
3060
3061 fn process_updated_entries(
3062 &self,
3063 worktree: &Entity<Worktree>,
3064 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3065 cx: &mut App,
3066 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3067 let path_style = worktree.read(cx).path_style();
3068 let mut repo_paths = self
3069 .repositories
3070 .values()
3071 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3072 .collect::<Vec<_>>();
3073 let mut entries: Vec<_> = updated_entries
3074 .iter()
3075 .map(|(path, _, _)| path.clone())
3076 .collect();
3077 entries.sort();
3078 let worktree = worktree.read(cx);
3079
3080 let entries = entries
3081 .into_iter()
3082 .map(|path| worktree.absolutize(&path))
3083 .collect::<Arc<[_]>>();
3084
3085 let executor = cx.background_executor().clone();
3086 cx.background_executor().spawn(async move {
3087 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3088 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3089 let mut tasks = FuturesOrdered::new();
3090 for (repo_path, repo) in repo_paths.into_iter().rev() {
3091 let entries = entries.clone();
3092 let task = executor.spawn(async move {
3093 // Find all repository paths that belong to this repo
3094 let mut ix = entries.partition_point(|path| path < &*repo_path);
3095 if ix == entries.len() {
3096 return None;
3097 };
3098
3099 let mut paths = Vec::new();
3100 // All paths prefixed by a given repo will constitute a continuous range.
3101 while let Some(path) = entries.get(ix)
3102 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3103 &repo_path, path, path_style,
3104 )
3105 {
3106 paths.push((repo_path, ix));
3107 ix += 1;
3108 }
3109 if paths.is_empty() {
3110 None
3111 } else {
3112 Some((repo, paths))
3113 }
3114 });
3115 tasks.push_back(task);
3116 }
3117
3118 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3119 let mut path_was_used = vec![false; entries.len()];
3120 let tasks = tasks.collect::<Vec<_>>().await;
3121 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3122 // We always want to assign a path to it's innermost repository.
3123 for t in tasks {
3124 let Some((repo, paths)) = t else {
3125 continue;
3126 };
3127 let entry = paths_by_git_repo.entry(repo).or_default();
3128 for (repo_path, ix) in paths {
3129 if path_was_used[ix] {
3130 continue;
3131 }
3132 path_was_used[ix] = true;
3133 entry.push(repo_path);
3134 }
3135 }
3136
3137 paths_by_git_repo
3138 })
3139 }
3140}
3141
3142impl BufferGitState {
3143 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3144 Self {
3145 unstaged_diff: Default::default(),
3146 uncommitted_diff: Default::default(),
3147 oid_diffs: Default::default(),
3148 recalculate_diff_task: Default::default(),
3149 language: Default::default(),
3150 language_registry: Default::default(),
3151 recalculating_tx: postage::watch::channel_with(false).0,
3152 hunk_staging_operation_count: 0,
3153 hunk_staging_operation_count_as_of_write: 0,
3154 head_text: Default::default(),
3155 index_text: Default::default(),
3156 oid_texts: Default::default(),
3157 head_changed: Default::default(),
3158 index_changed: Default::default(),
3159 language_changed: Default::default(),
3160 conflict_updated_futures: Default::default(),
3161 conflict_set: Default::default(),
3162 reparse_conflict_markers_task: Default::default(),
3163 }
3164 }
3165
3166 #[ztracing::instrument(skip_all)]
3167 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3168 self.language = buffer.read(cx).language().cloned();
3169 self.language_changed = true;
3170 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3171 }
3172
3173 fn reparse_conflict_markers(
3174 &mut self,
3175 buffer: text::BufferSnapshot,
3176 cx: &mut Context<Self>,
3177 ) -> oneshot::Receiver<()> {
3178 let (tx, rx) = oneshot::channel();
3179
3180 let Some(conflict_set) = self
3181 .conflict_set
3182 .as_ref()
3183 .and_then(|conflict_set| conflict_set.upgrade())
3184 else {
3185 return rx;
3186 };
3187
3188 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3189 if conflict_set.has_conflict {
3190 Some(conflict_set.snapshot())
3191 } else {
3192 None
3193 }
3194 });
3195
3196 if let Some(old_snapshot) = old_snapshot {
3197 self.conflict_updated_futures.push(tx);
3198 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3199 let (snapshot, changed_range) = cx
3200 .background_spawn(async move {
3201 let new_snapshot = ConflictSet::parse(&buffer);
3202 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3203 (new_snapshot, changed_range)
3204 })
3205 .await;
3206 this.update(cx, |this, cx| {
3207 if let Some(conflict_set) = &this.conflict_set {
3208 conflict_set
3209 .update(cx, |conflict_set, cx| {
3210 conflict_set.set_snapshot(snapshot, changed_range, cx);
3211 })
3212 .ok();
3213 }
3214 let futures = std::mem::take(&mut this.conflict_updated_futures);
3215 for tx in futures {
3216 tx.send(()).ok();
3217 }
3218 })
3219 }))
3220 }
3221
3222 rx
3223 }
3224
3225 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3226 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3227 }
3228
3229 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3230 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3231 }
3232
3233 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3234 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3235 }
3236
3237 fn handle_base_texts_updated(
3238 &mut self,
3239 buffer: text::BufferSnapshot,
3240 message: proto::UpdateDiffBases,
3241 cx: &mut Context<Self>,
3242 ) {
3243 use proto::update_diff_bases::Mode;
3244
3245 let Some(mode) = Mode::from_i32(message.mode) else {
3246 return;
3247 };
3248
3249 let diff_bases_change = match mode {
3250 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3251 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3252 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3253 Mode::IndexAndHead => DiffBasesChange::SetEach {
3254 index: message.staged_text,
3255 head: message.committed_text,
3256 },
3257 };
3258
3259 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3260 }
3261
3262 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3263 if *self.recalculating_tx.borrow() {
3264 let mut rx = self.recalculating_tx.subscribe();
3265 Some(async move {
3266 loop {
3267 let is_recalculating = rx.recv().await;
3268 if is_recalculating != Some(true) {
3269 break;
3270 }
3271 }
3272 })
3273 } else {
3274 None
3275 }
3276 }
3277
3278 fn diff_bases_changed(
3279 &mut self,
3280 buffer: text::BufferSnapshot,
3281 diff_bases_change: Option<DiffBasesChange>,
3282 cx: &mut Context<Self>,
3283 ) {
3284 match diff_bases_change {
3285 Some(DiffBasesChange::SetIndex(index)) => {
3286 self.index_text = index.map(|mut index| {
3287 text::LineEnding::normalize(&mut index);
3288 Arc::from(index.as_str())
3289 });
3290 self.index_changed = true;
3291 }
3292 Some(DiffBasesChange::SetHead(head)) => {
3293 self.head_text = head.map(|mut head| {
3294 text::LineEnding::normalize(&mut head);
3295 Arc::from(head.as_str())
3296 });
3297 self.head_changed = true;
3298 }
3299 Some(DiffBasesChange::SetBoth(text)) => {
3300 let text = text.map(|mut text| {
3301 text::LineEnding::normalize(&mut text);
3302 Arc::from(text.as_str())
3303 });
3304 self.head_text = text.clone();
3305 self.index_text = text;
3306 self.head_changed = true;
3307 self.index_changed = true;
3308 }
3309 Some(DiffBasesChange::SetEach { index, head }) => {
3310 self.index_text = index.map(|mut index| {
3311 text::LineEnding::normalize(&mut index);
3312 Arc::from(index.as_str())
3313 });
3314 self.index_changed = true;
3315 self.head_text = head.map(|mut head| {
3316 text::LineEnding::normalize(&mut head);
3317 Arc::from(head.as_str())
3318 });
3319 self.head_changed = true;
3320 }
3321 None => {}
3322 }
3323
3324 self.recalculate_diffs(buffer, cx)
3325 }
3326
3327 #[ztracing::instrument(skip_all)]
3328 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3329 *self.recalculating_tx.borrow_mut() = true;
3330
3331 let language = self.language.clone();
3332 let language_registry = self.language_registry.clone();
3333 let unstaged_diff = self.unstaged_diff();
3334 let uncommitted_diff = self.uncommitted_diff();
3335 let head = self.head_text.clone();
3336 let index = self.index_text.clone();
3337 let index_changed = self.index_changed;
3338 let head_changed = self.head_changed;
3339 let language_changed = self.language_changed;
3340 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3341 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3342 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3343 (None, None) => true,
3344 _ => false,
3345 };
3346
3347 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3348 .oid_diffs
3349 .iter()
3350 .filter_map(|(oid, weak)| {
3351 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3352 weak.upgrade().map(|diff| (*oid, diff, base_text))
3353 })
3354 .collect();
3355
3356 self.oid_diffs.retain(|oid, weak| {
3357 let alive = weak.upgrade().is_some();
3358 if !alive {
3359 if let Some(oid) = oid {
3360 self.oid_texts.remove(oid);
3361 }
3362 }
3363 alive
3364 });
3365 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3366 log::debug!(
3367 "start recalculating diffs for buffer {}",
3368 buffer.remote_id()
3369 );
3370
3371 let mut new_unstaged_diff = None;
3372 if let Some(unstaged_diff) = &unstaged_diff {
3373 new_unstaged_diff = Some(
3374 cx.update(|cx| {
3375 unstaged_diff.read(cx).update_diff(
3376 buffer.clone(),
3377 index,
3378 index_changed.then_some(false),
3379 language.clone(),
3380 cx,
3381 )
3382 })
3383 .await,
3384 );
3385 }
3386
3387 // Dropping BufferDiff can be expensive, so yield back to the event loop
3388 // for a bit
3389 yield_now().await;
3390
3391 let mut new_uncommitted_diff = None;
3392 if let Some(uncommitted_diff) = &uncommitted_diff {
3393 new_uncommitted_diff = if index_matches_head {
3394 new_unstaged_diff.clone()
3395 } else {
3396 Some(
3397 cx.update(|cx| {
3398 uncommitted_diff.read(cx).update_diff(
3399 buffer.clone(),
3400 head,
3401 head_changed.then_some(true),
3402 language.clone(),
3403 cx,
3404 )
3405 })
3406 .await,
3407 )
3408 }
3409 }
3410
3411 // Dropping BufferDiff can be expensive, so yield back to the event loop
3412 // for a bit
3413 yield_now().await;
3414
3415 let cancel = this.update(cx, |this, _| {
3416 // This checks whether all pending stage/unstage operations
3417 // have quiesced (i.e. both the corresponding write and the
3418 // read of that write have completed). If not, then we cancel
3419 // this recalculation attempt to avoid invalidating pending
3420 // state too quickly; another recalculation will come along
3421 // later and clear the pending state once the state of the index has settled.
3422 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3423 *this.recalculating_tx.borrow_mut() = false;
3424 true
3425 } else {
3426 false
3427 }
3428 })?;
3429 if cancel {
3430 log::debug!(
3431 concat!(
3432 "aborting recalculating diffs for buffer {}",
3433 "due to subsequent hunk operations",
3434 ),
3435 buffer.remote_id()
3436 );
3437 return Ok(());
3438 }
3439
3440 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3441 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3442 {
3443 let task = unstaged_diff.update(cx, |diff, cx| {
3444 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3445 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3446 // view multibuffers.
3447 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3448 });
3449 Some(task.await)
3450 } else {
3451 None
3452 };
3453
3454 yield_now().await;
3455
3456 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3457 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3458 {
3459 uncommitted_diff
3460 .update(cx, |diff, cx| {
3461 if language_changed {
3462 diff.language_changed(language.clone(), language_registry, cx);
3463 }
3464 diff.set_snapshot_with_secondary(
3465 new_uncommitted_diff,
3466 &buffer,
3467 unstaged_changed_range.flatten(),
3468 true,
3469 cx,
3470 )
3471 })
3472 .await;
3473 }
3474
3475 yield_now().await;
3476
3477 for (oid, oid_diff, base_text) in oid_diffs {
3478 let new_oid_diff = cx
3479 .update(|cx| {
3480 oid_diff.read(cx).update_diff(
3481 buffer.clone(),
3482 base_text,
3483 None,
3484 language.clone(),
3485 cx,
3486 )
3487 })
3488 .await;
3489
3490 oid_diff
3491 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3492 .await;
3493
3494 log::debug!(
3495 "finished recalculating oid diff for buffer {} oid {:?}",
3496 buffer.remote_id(),
3497 oid
3498 );
3499
3500 yield_now().await;
3501 }
3502
3503 log::debug!(
3504 "finished recalculating diffs for buffer {}",
3505 buffer.remote_id()
3506 );
3507
3508 if let Some(this) = this.upgrade() {
3509 this.update(cx, |this, _| {
3510 this.index_changed = false;
3511 this.head_changed = false;
3512 this.language_changed = false;
3513 *this.recalculating_tx.borrow_mut() = false;
3514 });
3515 }
3516
3517 Ok(())
3518 }));
3519 }
3520}
3521
3522fn make_remote_delegate(
3523 this: Entity<GitStore>,
3524 project_id: u64,
3525 repository_id: RepositoryId,
3526 askpass_id: u64,
3527 cx: &mut AsyncApp,
3528) -> AskPassDelegate {
3529 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3530 this.update(cx, |this, cx| {
3531 let Some((client, _)) = this.downstream_client() else {
3532 return;
3533 };
3534 let response = client.request(proto::AskPassRequest {
3535 project_id,
3536 repository_id: repository_id.to_proto(),
3537 askpass_id,
3538 prompt,
3539 });
3540 cx.spawn(async move |_, _| {
3541 let mut response = response.await?.response;
3542 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3543 .ok();
3544 response.zeroize();
3545 anyhow::Ok(())
3546 })
3547 .detach_and_log_err(cx);
3548 });
3549 })
3550}
3551
3552impl RepositoryId {
3553 pub fn to_proto(self) -> u64 {
3554 self.0
3555 }
3556
3557 pub fn from_proto(id: u64) -> Self {
3558 RepositoryId(id)
3559 }
3560}
3561
3562impl RepositorySnapshot {
3563 fn empty(
3564 id: RepositoryId,
3565 work_directory_abs_path: Arc<Path>,
3566 original_repo_abs_path: Option<Arc<Path>>,
3567 path_style: PathStyle,
3568 ) -> Self {
3569 Self {
3570 id,
3571 statuses_by_path: Default::default(),
3572 original_repo_abs_path: original_repo_abs_path
3573 .unwrap_or_else(|| work_directory_abs_path.clone()),
3574 work_directory_abs_path,
3575 branch: None,
3576 head_commit: None,
3577 scan_id: 0,
3578 merge: Default::default(),
3579 remote_origin_url: None,
3580 remote_upstream_url: None,
3581 stash_entries: Default::default(),
3582 linked_worktrees: Arc::from([]),
3583 path_style,
3584 }
3585 }
3586
3587 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3588 proto::UpdateRepository {
3589 branch_summary: self.branch.as_ref().map(branch_to_proto),
3590 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3591 updated_statuses: self
3592 .statuses_by_path
3593 .iter()
3594 .map(|entry| entry.to_proto())
3595 .collect(),
3596 removed_statuses: Default::default(),
3597 current_merge_conflicts: self
3598 .merge
3599 .merge_heads_by_conflicted_path
3600 .iter()
3601 .map(|(repo_path, _)| repo_path.to_proto())
3602 .collect(),
3603 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3604 project_id,
3605 id: self.id.to_proto(),
3606 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3607 entry_ids: vec![self.id.to_proto()],
3608 scan_id: self.scan_id,
3609 is_last_update: true,
3610 stash_entries: self
3611 .stash_entries
3612 .entries
3613 .iter()
3614 .map(stash_to_proto)
3615 .collect(),
3616 remote_upstream_url: self.remote_upstream_url.clone(),
3617 remote_origin_url: self.remote_origin_url.clone(),
3618 original_repo_abs_path: Some(
3619 self.original_repo_abs_path.to_string_lossy().into_owned(),
3620 ),
3621 linked_worktrees: self
3622 .linked_worktrees
3623 .iter()
3624 .map(worktree_to_proto)
3625 .collect(),
3626 }
3627 }
3628
3629 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3630 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3631 let mut removed_statuses: Vec<String> = Vec::new();
3632
3633 let mut new_statuses = self.statuses_by_path.iter().peekable();
3634 let mut old_statuses = old.statuses_by_path.iter().peekable();
3635
3636 let mut current_new_entry = new_statuses.next();
3637 let mut current_old_entry = old_statuses.next();
3638 loop {
3639 match (current_new_entry, current_old_entry) {
3640 (Some(new_entry), Some(old_entry)) => {
3641 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3642 Ordering::Less => {
3643 updated_statuses.push(new_entry.to_proto());
3644 current_new_entry = new_statuses.next();
3645 }
3646 Ordering::Equal => {
3647 if new_entry.status != old_entry.status
3648 || new_entry.diff_stat != old_entry.diff_stat
3649 {
3650 updated_statuses.push(new_entry.to_proto());
3651 }
3652 current_old_entry = old_statuses.next();
3653 current_new_entry = new_statuses.next();
3654 }
3655 Ordering::Greater => {
3656 removed_statuses.push(old_entry.repo_path.to_proto());
3657 current_old_entry = old_statuses.next();
3658 }
3659 }
3660 }
3661 (None, Some(old_entry)) => {
3662 removed_statuses.push(old_entry.repo_path.to_proto());
3663 current_old_entry = old_statuses.next();
3664 }
3665 (Some(new_entry), None) => {
3666 updated_statuses.push(new_entry.to_proto());
3667 current_new_entry = new_statuses.next();
3668 }
3669 (None, None) => break,
3670 }
3671 }
3672
3673 proto::UpdateRepository {
3674 branch_summary: self.branch.as_ref().map(branch_to_proto),
3675 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3676 updated_statuses,
3677 removed_statuses,
3678 current_merge_conflicts: self
3679 .merge
3680 .merge_heads_by_conflicted_path
3681 .iter()
3682 .map(|(path, _)| path.to_proto())
3683 .collect(),
3684 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3685 project_id,
3686 id: self.id.to_proto(),
3687 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3688 entry_ids: vec![],
3689 scan_id: self.scan_id,
3690 is_last_update: true,
3691 stash_entries: self
3692 .stash_entries
3693 .entries
3694 .iter()
3695 .map(stash_to_proto)
3696 .collect(),
3697 remote_upstream_url: self.remote_upstream_url.clone(),
3698 remote_origin_url: self.remote_origin_url.clone(),
3699 original_repo_abs_path: Some(
3700 self.original_repo_abs_path.to_string_lossy().into_owned(),
3701 ),
3702 linked_worktrees: self
3703 .linked_worktrees
3704 .iter()
3705 .map(worktree_to_proto)
3706 .collect(),
3707 }
3708 }
3709
3710 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3711 &self.linked_worktrees
3712 }
3713
3714 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3715 self.statuses_by_path.iter().cloned()
3716 }
3717
3718 pub fn status_summary(&self) -> GitSummary {
3719 self.statuses_by_path.summary().item_summary
3720 }
3721
3722 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3723 self.statuses_by_path
3724 .get(&PathKey(path.as_ref().clone()), ())
3725 .cloned()
3726 }
3727
3728 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3729 self.statuses_by_path
3730 .get(&PathKey(path.as_ref().clone()), ())
3731 .and_then(|entry| entry.diff_stat)
3732 }
3733
3734 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3735 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3736 }
3737
3738 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3739 self.path_style
3740 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3741 .unwrap()
3742 .into()
3743 }
3744
3745 #[inline]
3746 fn abs_path_to_repo_path_inner(
3747 work_directory_abs_path: &Path,
3748 abs_path: &Path,
3749 path_style: PathStyle,
3750 ) -> Option<RepoPath> {
3751 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3752 Some(RepoPath::from_rel_path(&rel_path))
3753 }
3754
3755 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3756 self.merge
3757 .merge_heads_by_conflicted_path
3758 .contains_key(repo_path)
3759 }
3760
3761 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3762 let had_conflict_on_last_merge_head_change = self
3763 .merge
3764 .merge_heads_by_conflicted_path
3765 .contains_key(repo_path);
3766 let has_conflict_currently = self
3767 .status_for_path(repo_path)
3768 .is_some_and(|entry| entry.status.is_conflicted());
3769 had_conflict_on_last_merge_head_change || has_conflict_currently
3770 }
3771
3772 /// This is the name that will be displayed in the repository selector for this repository.
3773 pub fn display_name(&self) -> SharedString {
3774 self.work_directory_abs_path
3775 .file_name()
3776 .unwrap_or_default()
3777 .to_string_lossy()
3778 .to_string()
3779 .into()
3780 }
3781}
3782
3783pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3784 proto::StashEntry {
3785 oid: entry.oid.as_bytes().to_vec(),
3786 message: entry.message.clone(),
3787 branch: entry.branch.clone(),
3788 index: entry.index as u64,
3789 timestamp: entry.timestamp,
3790 }
3791}
3792
3793pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3794 Ok(StashEntry {
3795 oid: Oid::from_bytes(&entry.oid)?,
3796 message: entry.message.clone(),
3797 index: entry.index as usize,
3798 branch: entry.branch.clone(),
3799 timestamp: entry.timestamp,
3800 })
3801}
3802
3803impl MergeDetails {
3804 async fn update(
3805 &mut self,
3806 backend: &Arc<dyn GitRepository>,
3807 current_conflicted_paths: Vec<RepoPath>,
3808 ) -> Result<bool> {
3809 log::debug!("load merge details");
3810 self.message = backend.merge_message().await.map(SharedString::from);
3811 let heads = backend
3812 .revparse_batch(vec![
3813 "MERGE_HEAD".into(),
3814 "CHERRY_PICK_HEAD".into(),
3815 "REBASE_HEAD".into(),
3816 "REVERT_HEAD".into(),
3817 "APPLY_HEAD".into(),
3818 ])
3819 .await
3820 .log_err()
3821 .unwrap_or_default()
3822 .into_iter()
3823 .map(|opt| opt.map(SharedString::from))
3824 .collect::<Vec<_>>();
3825
3826 let mut conflicts_changed = false;
3827
3828 // Record the merge state for newly conflicted paths
3829 for path in ¤t_conflicted_paths {
3830 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3831 conflicts_changed = true;
3832 self.merge_heads_by_conflicted_path
3833 .insert(path.clone(), heads.clone());
3834 }
3835 }
3836
3837 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3838 self.merge_heads_by_conflicted_path
3839 .retain(|path, old_merge_heads| {
3840 let keep = current_conflicted_paths.contains(path)
3841 || (old_merge_heads == &heads
3842 && old_merge_heads.iter().any(|head| head.is_some()));
3843 if !keep {
3844 conflicts_changed = true;
3845 }
3846 keep
3847 });
3848
3849 Ok(conflicts_changed)
3850 }
3851}
3852
3853impl Repository {
3854 pub fn is_trusted(&self) -> bool {
3855 match self.repository_state.peek() {
3856 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3857 _ => false,
3858 }
3859 }
3860
3861 pub fn snapshot(&self) -> RepositorySnapshot {
3862 self.snapshot.clone()
3863 }
3864
3865 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3866 self.pending_ops.iter().cloned()
3867 }
3868
3869 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3870 self.pending_ops.summary().clone()
3871 }
3872
3873 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3874 self.pending_ops
3875 .get(&PathKey(path.as_ref().clone()), ())
3876 .cloned()
3877 }
3878
3879 fn local(
3880 id: RepositoryId,
3881 work_directory_abs_path: Arc<Path>,
3882 original_repo_abs_path: Arc<Path>,
3883 dot_git_abs_path: Arc<Path>,
3884 project_environment: WeakEntity<ProjectEnvironment>,
3885 fs: Arc<dyn Fs>,
3886 is_trusted: bool,
3887 git_store: WeakEntity<GitStore>,
3888 cx: &mut Context<Self>,
3889 ) -> Self {
3890 let snapshot = RepositorySnapshot::empty(
3891 id,
3892 work_directory_abs_path.clone(),
3893 Some(original_repo_abs_path),
3894 PathStyle::local(),
3895 );
3896 let state = cx
3897 .spawn(async move |_, cx| {
3898 LocalRepositoryState::new(
3899 work_directory_abs_path,
3900 dot_git_abs_path,
3901 project_environment,
3902 fs,
3903 is_trusted,
3904 cx,
3905 )
3906 .await
3907 .map_err(|err| err.to_string())
3908 })
3909 .shared();
3910 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3911 let state = cx
3912 .spawn(async move |_, _| {
3913 let state = state.await?;
3914 Ok(RepositoryState::Local(state))
3915 })
3916 .shared();
3917
3918 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3919 RepositoryEvent::BranchChanged => {
3920 if this.scan_id > 1 {
3921 this.initial_graph_data.clear();
3922 }
3923 }
3924 _ => {}
3925 })
3926 .detach();
3927
3928 Repository {
3929 this: cx.weak_entity(),
3930 git_store,
3931 snapshot,
3932 pending_ops: Default::default(),
3933 repository_state: state,
3934 commit_message_buffer: None,
3935 askpass_delegates: Default::default(),
3936 paths_needing_status_update: Default::default(),
3937 latest_askpass_id: 0,
3938 job_sender,
3939 job_id: 0,
3940 active_jobs: Default::default(),
3941 initial_graph_data: Default::default(),
3942 commit_data: Default::default(),
3943 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3944 }
3945 }
3946
3947 fn remote(
3948 id: RepositoryId,
3949 work_directory_abs_path: Arc<Path>,
3950 original_repo_abs_path: Option<Arc<Path>>,
3951 path_style: PathStyle,
3952 project_id: ProjectId,
3953 client: AnyProtoClient,
3954 git_store: WeakEntity<GitStore>,
3955 cx: &mut Context<Self>,
3956 ) -> Self {
3957 let snapshot = RepositorySnapshot::empty(
3958 id,
3959 work_directory_abs_path,
3960 original_repo_abs_path,
3961 path_style,
3962 );
3963 let repository_state = RemoteRepositoryState { project_id, client };
3964 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3965 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3966 Self {
3967 this: cx.weak_entity(),
3968 snapshot,
3969 commit_message_buffer: None,
3970 git_store,
3971 pending_ops: Default::default(),
3972 paths_needing_status_update: Default::default(),
3973 job_sender,
3974 repository_state,
3975 askpass_delegates: Default::default(),
3976 latest_askpass_id: 0,
3977 active_jobs: Default::default(),
3978 job_id: 0,
3979 initial_graph_data: Default::default(),
3980 commit_data: Default::default(),
3981 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3982 }
3983 }
3984
3985 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3986 self.git_store.upgrade()
3987 }
3988
3989 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3990 let this = cx.weak_entity();
3991 let git_store = self.git_store.clone();
3992 let _ = self.send_keyed_job(
3993 Some(GitJobKey::ReloadBufferDiffBases),
3994 None,
3995 |state, mut cx| async move {
3996 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3997 log::error!("tried to recompute diffs for a non-local repository");
3998 return Ok(());
3999 };
4000
4001 let Some(this) = this.upgrade() else {
4002 return Ok(());
4003 };
4004
4005 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4006 git_store.update(cx, |git_store, cx| {
4007 git_store
4008 .diffs
4009 .iter()
4010 .filter_map(|(buffer_id, diff_state)| {
4011 let buffer_store = git_store.buffer_store.read(cx);
4012 let buffer = buffer_store.get(*buffer_id)?;
4013 let file = File::from_dyn(buffer.read(cx).file())?;
4014 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4015 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4016 log::debug!(
4017 "start reload diff bases for repo path {}",
4018 repo_path.as_unix_str()
4019 );
4020 diff_state.update(cx, |diff_state, _| {
4021 let has_unstaged_diff = diff_state
4022 .unstaged_diff
4023 .as_ref()
4024 .is_some_and(|diff| diff.is_upgradable());
4025 let has_uncommitted_diff = diff_state
4026 .uncommitted_diff
4027 .as_ref()
4028 .is_some_and(|set| set.is_upgradable());
4029
4030 Some((
4031 buffer,
4032 repo_path,
4033 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4034 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4035 ))
4036 })
4037 })
4038 .collect::<Vec<_>>()
4039 })
4040 })?;
4041
4042 let buffer_diff_base_changes = cx
4043 .background_spawn(async move {
4044 let mut changes = Vec::new();
4045 for (buffer, repo_path, current_index_text, current_head_text) in
4046 &repo_diff_state_updates
4047 {
4048 let index_text = if current_index_text.is_some() {
4049 backend.load_index_text(repo_path.clone()).await
4050 } else {
4051 None
4052 };
4053 let head_text = if current_head_text.is_some() {
4054 backend.load_committed_text(repo_path.clone()).await
4055 } else {
4056 None
4057 };
4058
4059 let change =
4060 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4061 (Some(current_index), Some(current_head)) => {
4062 let index_changed =
4063 index_text.as_deref() != current_index.as_deref();
4064 let head_changed =
4065 head_text.as_deref() != current_head.as_deref();
4066 if index_changed && head_changed {
4067 if index_text == head_text {
4068 Some(DiffBasesChange::SetBoth(head_text))
4069 } else {
4070 Some(DiffBasesChange::SetEach {
4071 index: index_text,
4072 head: head_text,
4073 })
4074 }
4075 } else if index_changed {
4076 Some(DiffBasesChange::SetIndex(index_text))
4077 } else if head_changed {
4078 Some(DiffBasesChange::SetHead(head_text))
4079 } else {
4080 None
4081 }
4082 }
4083 (Some(current_index), None) => {
4084 let index_changed =
4085 index_text.as_deref() != current_index.as_deref();
4086 index_changed
4087 .then_some(DiffBasesChange::SetIndex(index_text))
4088 }
4089 (None, Some(current_head)) => {
4090 let head_changed =
4091 head_text.as_deref() != current_head.as_deref();
4092 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4093 }
4094 (None, None) => None,
4095 };
4096
4097 changes.push((buffer.clone(), change))
4098 }
4099 changes
4100 })
4101 .await;
4102
4103 git_store.update(&mut cx, |git_store, cx| {
4104 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4105 let buffer_snapshot = buffer.read(cx).text_snapshot();
4106 let buffer_id = buffer_snapshot.remote_id();
4107 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4108 continue;
4109 };
4110
4111 let downstream_client = git_store.downstream_client();
4112 diff_state.update(cx, |diff_state, cx| {
4113 use proto::update_diff_bases::Mode;
4114
4115 if let Some((diff_bases_change, (client, project_id))) =
4116 diff_bases_change.clone().zip(downstream_client)
4117 {
4118 let (staged_text, committed_text, mode) = match diff_bases_change {
4119 DiffBasesChange::SetIndex(index) => {
4120 (index, None, Mode::IndexOnly)
4121 }
4122 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4123 DiffBasesChange::SetEach { index, head } => {
4124 (index, head, Mode::IndexAndHead)
4125 }
4126 DiffBasesChange::SetBoth(text) => {
4127 (None, text, Mode::IndexMatchesHead)
4128 }
4129 };
4130 client
4131 .send(proto::UpdateDiffBases {
4132 project_id: project_id.to_proto(),
4133 buffer_id: buffer_id.to_proto(),
4134 staged_text,
4135 committed_text,
4136 mode: mode as i32,
4137 })
4138 .log_err();
4139 }
4140
4141 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4142 });
4143 }
4144 })
4145 },
4146 );
4147 }
4148
4149 pub fn send_job<F, Fut, R>(
4150 &mut self,
4151 status: Option<SharedString>,
4152 job: F,
4153 ) -> oneshot::Receiver<R>
4154 where
4155 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4156 Fut: Future<Output = R> + 'static,
4157 R: Send + 'static,
4158 {
4159 self.send_keyed_job(None, status, job)
4160 }
4161
4162 fn send_keyed_job<F, Fut, R>(
4163 &mut self,
4164 key: Option<GitJobKey>,
4165 status: Option<SharedString>,
4166 job: F,
4167 ) -> oneshot::Receiver<R>
4168 where
4169 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4170 Fut: Future<Output = R> + 'static,
4171 R: Send + 'static,
4172 {
4173 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4174 let job_id = post_inc(&mut self.job_id);
4175 let this = self.this.clone();
4176 self.job_sender
4177 .unbounded_send(GitJob {
4178 key,
4179 job: Box::new(move |state, cx: &mut AsyncApp| {
4180 let job = job(state, cx.clone());
4181 cx.spawn(async move |cx| {
4182 if let Some(s) = status.clone() {
4183 this.update(cx, |this, cx| {
4184 this.active_jobs.insert(
4185 job_id,
4186 JobInfo {
4187 start: Instant::now(),
4188 message: s.clone(),
4189 },
4190 );
4191
4192 cx.notify();
4193 })
4194 .ok();
4195 }
4196 let result = job.await;
4197
4198 this.update(cx, |this, cx| {
4199 this.active_jobs.remove(&job_id);
4200 cx.notify();
4201 })
4202 .ok();
4203
4204 result_tx.send(result).ok();
4205 })
4206 }),
4207 })
4208 .ok();
4209 result_rx
4210 }
4211
4212 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4213 let Some(git_store) = self.git_store.upgrade() else {
4214 return;
4215 };
4216 let entity = cx.entity();
4217 git_store.update(cx, |git_store, cx| {
4218 let Some((&id, _)) = git_store
4219 .repositories
4220 .iter()
4221 .find(|(_, handle)| *handle == &entity)
4222 else {
4223 return;
4224 };
4225 git_store.active_repo_id = Some(id);
4226 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4227 });
4228 }
4229
4230 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4231 self.snapshot.status()
4232 }
4233
4234 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4235 self.snapshot.diff_stat_for_path(path)
4236 }
4237
4238 pub fn cached_stash(&self) -> GitStash {
4239 self.snapshot.stash_entries.clone()
4240 }
4241
4242 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4243 let git_store = self.git_store.upgrade()?;
4244 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4245 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4246 let abs_path = SanitizedPath::new(&abs_path);
4247 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4248 Some(ProjectPath {
4249 worktree_id: worktree.read(cx).id(),
4250 path: relative_path,
4251 })
4252 }
4253
4254 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4255 let git_store = self.git_store.upgrade()?;
4256 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4257 let abs_path = worktree_store.absolutize(path, cx)?;
4258 self.snapshot.abs_path_to_repo_path(&abs_path)
4259 }
4260
4261 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4262 other
4263 .read(cx)
4264 .snapshot
4265 .work_directory_abs_path
4266 .starts_with(&self.snapshot.work_directory_abs_path)
4267 }
4268
4269 pub fn open_commit_buffer(
4270 &mut self,
4271 languages: Option<Arc<LanguageRegistry>>,
4272 buffer_store: Entity<BufferStore>,
4273 cx: &mut Context<Self>,
4274 ) -> Task<Result<Entity<Buffer>>> {
4275 let id = self.id;
4276 if let Some(buffer) = self.commit_message_buffer.clone() {
4277 return Task::ready(Ok(buffer));
4278 }
4279 let this = cx.weak_entity();
4280
4281 let rx = self.send_job(None, move |state, mut cx| async move {
4282 let Some(this) = this.upgrade() else {
4283 bail!("git store was dropped");
4284 };
4285 match state {
4286 RepositoryState::Local(..) => {
4287 this.update(&mut cx, |_, cx| {
4288 Self::open_local_commit_buffer(languages, buffer_store, cx)
4289 })
4290 .await
4291 }
4292 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4293 let request = client.request(proto::OpenCommitMessageBuffer {
4294 project_id: project_id.0,
4295 repository_id: id.to_proto(),
4296 });
4297 let response = request.await.context("requesting to open commit buffer")?;
4298 let buffer_id = BufferId::new(response.buffer_id)?;
4299 let buffer = buffer_store
4300 .update(&mut cx, |buffer_store, cx| {
4301 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4302 })
4303 .await?;
4304 if let Some(language_registry) = languages {
4305 let git_commit_language =
4306 language_registry.language_for_name("Git Commit").await?;
4307 buffer.update(&mut cx, |buffer, cx| {
4308 buffer.set_language(Some(git_commit_language), cx);
4309 });
4310 }
4311 this.update(&mut cx, |this, _| {
4312 this.commit_message_buffer = Some(buffer.clone());
4313 });
4314 Ok(buffer)
4315 }
4316 }
4317 });
4318
4319 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4320 }
4321
4322 fn open_local_commit_buffer(
4323 language_registry: Option<Arc<LanguageRegistry>>,
4324 buffer_store: Entity<BufferStore>,
4325 cx: &mut Context<Self>,
4326 ) -> Task<Result<Entity<Buffer>>> {
4327 cx.spawn(async move |repository, cx| {
4328 let git_commit_language = match language_registry {
4329 Some(language_registry) => {
4330 Some(language_registry.language_for_name("Git Commit").await?)
4331 }
4332 None => None,
4333 };
4334 let buffer = buffer_store
4335 .update(cx, |buffer_store, cx| {
4336 buffer_store.create_buffer(git_commit_language, false, cx)
4337 })
4338 .await?;
4339
4340 repository.update(cx, |repository, _| {
4341 repository.commit_message_buffer = Some(buffer.clone());
4342 })?;
4343 Ok(buffer)
4344 })
4345 }
4346
4347 pub fn checkout_files(
4348 &mut self,
4349 commit: &str,
4350 paths: Vec<RepoPath>,
4351 cx: &mut Context<Self>,
4352 ) -> Task<Result<()>> {
4353 let commit = commit.to_string();
4354 let id = self.id;
4355
4356 self.spawn_job_with_tracking(
4357 paths.clone(),
4358 pending_op::GitStatus::Reverted,
4359 cx,
4360 async move |this, cx| {
4361 this.update(cx, |this, _cx| {
4362 this.send_job(
4363 Some(format!("git checkout {}", commit).into()),
4364 move |git_repo, _| async move {
4365 match git_repo {
4366 RepositoryState::Local(LocalRepositoryState {
4367 backend,
4368 environment,
4369 ..
4370 }) => {
4371 backend
4372 .checkout_files(commit, paths, environment.clone())
4373 .await
4374 }
4375 RepositoryState::Remote(RemoteRepositoryState {
4376 project_id,
4377 client,
4378 }) => {
4379 client
4380 .request(proto::GitCheckoutFiles {
4381 project_id: project_id.0,
4382 repository_id: id.to_proto(),
4383 commit,
4384 paths: paths
4385 .into_iter()
4386 .map(|p| p.to_proto())
4387 .collect(),
4388 })
4389 .await?;
4390
4391 Ok(())
4392 }
4393 }
4394 },
4395 )
4396 })?
4397 .await?
4398 },
4399 )
4400 }
4401
4402 pub fn reset(
4403 &mut self,
4404 commit: String,
4405 reset_mode: ResetMode,
4406 _cx: &mut App,
4407 ) -> oneshot::Receiver<Result<()>> {
4408 let id = self.id;
4409
4410 self.send_job(None, move |git_repo, _| async move {
4411 match git_repo {
4412 RepositoryState::Local(LocalRepositoryState {
4413 backend,
4414 environment,
4415 ..
4416 }) => backend.reset(commit, reset_mode, environment).await,
4417 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4418 client
4419 .request(proto::GitReset {
4420 project_id: project_id.0,
4421 repository_id: id.to_proto(),
4422 commit,
4423 mode: match reset_mode {
4424 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4425 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4426 },
4427 })
4428 .await?;
4429
4430 Ok(())
4431 }
4432 }
4433 })
4434 }
4435
4436 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4437 let id = self.id;
4438 self.send_job(None, move |git_repo, _cx| async move {
4439 match git_repo {
4440 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4441 backend.show(commit).await
4442 }
4443 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4444 let resp = client
4445 .request(proto::GitShow {
4446 project_id: project_id.0,
4447 repository_id: id.to_proto(),
4448 commit,
4449 })
4450 .await?;
4451
4452 Ok(CommitDetails {
4453 sha: resp.sha.into(),
4454 message: resp.message.into(),
4455 commit_timestamp: resp.commit_timestamp,
4456 author_email: resp.author_email.into(),
4457 author_name: resp.author_name.into(),
4458 })
4459 }
4460 }
4461 })
4462 }
4463
4464 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4465 let id = self.id;
4466 self.send_job(None, move |git_repo, cx| async move {
4467 match git_repo {
4468 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4469 backend.load_commit(commit, cx).await
4470 }
4471 RepositoryState::Remote(RemoteRepositoryState {
4472 client, project_id, ..
4473 }) => {
4474 let response = client
4475 .request(proto::LoadCommitDiff {
4476 project_id: project_id.0,
4477 repository_id: id.to_proto(),
4478 commit,
4479 })
4480 .await?;
4481 Ok(CommitDiff {
4482 files: response
4483 .files
4484 .into_iter()
4485 .map(|file| {
4486 Ok(CommitFile {
4487 path: RepoPath::from_proto(&file.path)?,
4488 old_text: file.old_text,
4489 new_text: file.new_text,
4490 is_binary: file.is_binary,
4491 })
4492 })
4493 .collect::<Result<Vec<_>>>()?,
4494 })
4495 }
4496 }
4497 })
4498 }
4499
4500 pub fn file_history(
4501 &mut self,
4502 path: RepoPath,
4503 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4504 self.file_history_paginated(path, 0, None)
4505 }
4506
4507 pub fn file_history_paginated(
4508 &mut self,
4509 path: RepoPath,
4510 skip: usize,
4511 limit: Option<usize>,
4512 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4513 let id = self.id;
4514 self.send_job(None, move |git_repo, _cx| async move {
4515 match git_repo {
4516 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4517 backend.file_history_paginated(path, skip, limit).await
4518 }
4519 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4520 let response = client
4521 .request(proto::GitFileHistory {
4522 project_id: project_id.0,
4523 repository_id: id.to_proto(),
4524 path: path.to_proto(),
4525 skip: skip as u64,
4526 limit: limit.map(|l| l as u64),
4527 })
4528 .await?;
4529 Ok(git::repository::FileHistory {
4530 entries: response
4531 .entries
4532 .into_iter()
4533 .map(|entry| git::repository::FileHistoryEntry {
4534 sha: entry.sha.into(),
4535 subject: entry.subject.into(),
4536 message: entry.message.into(),
4537 commit_timestamp: entry.commit_timestamp,
4538 author_name: entry.author_name.into(),
4539 author_email: entry.author_email.into(),
4540 })
4541 .collect(),
4542 path: RepoPath::from_proto(&response.path)?,
4543 })
4544 }
4545 }
4546 })
4547 }
4548
4549 pub fn get_graph_data(
4550 &self,
4551 log_source: LogSource,
4552 log_order: LogOrder,
4553 ) -> Option<&InitialGitGraphData> {
4554 self.initial_graph_data.get(&(log_source, log_order))
4555 }
4556
4557 pub fn graph_data(
4558 &mut self,
4559 log_source: LogSource,
4560 log_order: LogOrder,
4561 range: Range<usize>,
4562 cx: &mut Context<Self>,
4563 ) -> GraphDataResponse<'_> {
4564 let initial_commit_data = self
4565 .initial_graph_data
4566 .entry((log_source.clone(), log_order))
4567 .or_insert_with(|| {
4568 let state = self.repository_state.clone();
4569 let log_source = log_source.clone();
4570
4571 let fetch_task = cx.spawn(async move |repository, cx| {
4572 let state = state.await;
4573 let result = match state {
4574 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4575 Self::local_git_graph_data(
4576 repository.clone(),
4577 backend,
4578 log_source.clone(),
4579 log_order,
4580 cx,
4581 )
4582 .await
4583 }
4584 Ok(RepositoryState::Remote(_)) => {
4585 Err("Git graph is not supported for collab yet".into())
4586 }
4587 Err(e) => Err(SharedString::from(e)),
4588 };
4589
4590 if let Err(fetch_task_error) = result {
4591 repository
4592 .update(cx, |repository, _| {
4593 if let Some(data) = repository
4594 .initial_graph_data
4595 .get_mut(&(log_source, log_order))
4596 {
4597 data.error = Some(fetch_task_error);
4598 } else {
4599 debug_panic!(
4600 "This task would be dropped if this entry doesn't exist"
4601 );
4602 }
4603 })
4604 .ok();
4605 }
4606 });
4607
4608 InitialGitGraphData {
4609 fetch_task,
4610 error: None,
4611 commit_data: Vec::new(),
4612 commit_oid_to_index: HashMap::default(),
4613 }
4614 });
4615
4616 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4617 let max_end = initial_commit_data.commit_data.len();
4618
4619 GraphDataResponse {
4620 commits: &initial_commit_data.commit_data
4621 [range.start.min(max_start)..range.end.min(max_end)],
4622 is_loading: !initial_commit_data.fetch_task.is_ready(),
4623 error: initial_commit_data.error.clone(),
4624 }
4625 }
4626
4627 async fn local_git_graph_data(
4628 this: WeakEntity<Self>,
4629 backend: Arc<dyn GitRepository>,
4630 log_source: LogSource,
4631 log_order: LogOrder,
4632 cx: &mut AsyncApp,
4633 ) -> Result<(), SharedString> {
4634 let (request_tx, request_rx) =
4635 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4636
4637 let task = cx.background_executor().spawn({
4638 let log_source = log_source.clone();
4639 async move {
4640 backend
4641 .initial_graph_data(log_source, log_order, request_tx)
4642 .await
4643 .map_err(|err| SharedString::from(err.to_string()))
4644 }
4645 });
4646
4647 let graph_data_key = (log_source, log_order);
4648
4649 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4650 this.update(cx, |repository, cx| {
4651 let graph_data = repository
4652 .initial_graph_data
4653 .entry(graph_data_key.clone())
4654 .and_modify(|graph_data| {
4655 for commit_data in initial_graph_commit_data {
4656 graph_data
4657 .commit_oid_to_index
4658 .insert(commit_data.sha, graph_data.commit_data.len());
4659 graph_data.commit_data.push(commit_data);
4660
4661 cx.emit(RepositoryEvent::GraphEvent(
4662 graph_data_key.clone(),
4663 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4664 ));
4665 }
4666 });
4667
4668 match &graph_data {
4669 Entry::Occupied(_) => {}
4670 Entry::Vacant(_) => {
4671 debug_panic!("This task should be dropped if data doesn't exist");
4672 }
4673 }
4674 })
4675 .ok();
4676 }
4677
4678 task.await?;
4679 Ok(())
4680 }
4681
4682 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4683 if !self.commit_data.contains_key(&sha) {
4684 match &self.graph_commit_data_handler {
4685 GraphCommitHandlerState::Open(handler) => {
4686 if handler.commit_data_request.try_send(sha).is_ok() {
4687 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4688 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4689 }
4690 }
4691 GraphCommitHandlerState::Closed => {
4692 self.open_graph_commit_data_handler(cx);
4693 }
4694 GraphCommitHandlerState::Starting => {}
4695 }
4696 }
4697
4698 self.commit_data
4699 .get(&sha)
4700 .unwrap_or(&CommitDataState::Loading)
4701 }
4702
4703 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4704 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4705
4706 let state = self.repository_state.clone();
4707 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4708 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4709
4710 let foreground_task = cx.spawn(async move |this, cx| {
4711 while let Ok((sha, commit_data)) = result_rx.recv().await {
4712 let result = this.update(cx, |this, cx| {
4713 let old_value = this
4714 .commit_data
4715 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4716 debug_assert!(
4717 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4718 "We should never overwrite commit data"
4719 );
4720
4721 cx.notify();
4722 });
4723 if result.is_err() {
4724 break;
4725 }
4726 }
4727
4728 this.update(cx, |this, _cx| {
4729 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4730 })
4731 .ok();
4732 });
4733
4734 let request_tx_for_handler = request_tx;
4735 let background_executor = cx.background_executor().clone();
4736
4737 cx.background_spawn(async move {
4738 let backend = match state.await {
4739 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4740 Ok(RepositoryState::Remote(_)) => {
4741 log::error!("commit_data_reader not supported for remote repositories");
4742 return;
4743 }
4744 Err(error) => {
4745 log::error!("failed to get repository state: {error}");
4746 return;
4747 }
4748 };
4749
4750 let reader = match backend.commit_data_reader() {
4751 Ok(reader) => reader,
4752 Err(error) => {
4753 log::error!("failed to create commit data reader: {error:?}");
4754 return;
4755 }
4756 };
4757
4758 loop {
4759 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4760
4761 futures::select_biased! {
4762 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4763 let Ok(sha) = sha else {
4764 break;
4765 };
4766
4767 match reader.read(sha).await {
4768 Ok(commit_data) => {
4769 if result_tx.send((sha, commit_data)).await.is_err() {
4770 break;
4771 }
4772 }
4773 Err(error) => {
4774 log::error!("failed to read commit data for {sha}: {error:?}");
4775 }
4776 }
4777 }
4778 _ = futures::FutureExt::fuse(timeout) => {
4779 break;
4780 }
4781 }
4782 }
4783
4784 drop(result_tx);
4785 })
4786 .detach();
4787
4788 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4789 _task: foreground_task,
4790 commit_data_request: request_tx_for_handler,
4791 });
4792 }
4793
4794 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4795 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4796 }
4797
4798 fn save_buffers<'a>(
4799 &self,
4800 entries: impl IntoIterator<Item = &'a RepoPath>,
4801 cx: &mut Context<Self>,
4802 ) -> Vec<Task<anyhow::Result<()>>> {
4803 let mut save_futures = Vec::new();
4804 if let Some(buffer_store) = self.buffer_store(cx) {
4805 buffer_store.update(cx, |buffer_store, cx| {
4806 for path in entries {
4807 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4808 continue;
4809 };
4810 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4811 && buffer
4812 .read(cx)
4813 .file()
4814 .is_some_and(|file| file.disk_state().exists())
4815 && buffer.read(cx).has_unsaved_edits()
4816 {
4817 save_futures.push(buffer_store.save_buffer(buffer, cx));
4818 }
4819 }
4820 })
4821 }
4822 save_futures
4823 }
4824
4825 pub fn stage_entries(
4826 &mut self,
4827 entries: Vec<RepoPath>,
4828 cx: &mut Context<Self>,
4829 ) -> Task<anyhow::Result<()>> {
4830 self.stage_or_unstage_entries(true, entries, cx)
4831 }
4832
4833 pub fn unstage_entries(
4834 &mut self,
4835 entries: Vec<RepoPath>,
4836 cx: &mut Context<Self>,
4837 ) -> Task<anyhow::Result<()>> {
4838 self.stage_or_unstage_entries(false, entries, cx)
4839 }
4840
4841 fn stage_or_unstage_entries(
4842 &mut self,
4843 stage: bool,
4844 entries: Vec<RepoPath>,
4845 cx: &mut Context<Self>,
4846 ) -> Task<anyhow::Result<()>> {
4847 if entries.is_empty() {
4848 return Task::ready(Ok(()));
4849 }
4850 let Some(git_store) = self.git_store.upgrade() else {
4851 return Task::ready(Ok(()));
4852 };
4853 let id = self.id;
4854 let save_tasks = self.save_buffers(&entries, cx);
4855 let paths = entries
4856 .iter()
4857 .map(|p| p.as_unix_str())
4858 .collect::<Vec<_>>()
4859 .join(" ");
4860 let status = if stage {
4861 format!("git add {paths}")
4862 } else {
4863 format!("git reset {paths}")
4864 };
4865 let job_key = GitJobKey::WriteIndex(entries.clone());
4866
4867 self.spawn_job_with_tracking(
4868 entries.clone(),
4869 if stage {
4870 pending_op::GitStatus::Staged
4871 } else {
4872 pending_op::GitStatus::Unstaged
4873 },
4874 cx,
4875 async move |this, cx| {
4876 for save_task in save_tasks {
4877 save_task.await?;
4878 }
4879
4880 this.update(cx, |this, cx| {
4881 let weak_this = cx.weak_entity();
4882 this.send_keyed_job(
4883 Some(job_key),
4884 Some(status.into()),
4885 move |git_repo, mut cx| async move {
4886 let hunk_staging_operation_counts = weak_this
4887 .update(&mut cx, |this, cx| {
4888 let mut hunk_staging_operation_counts = HashMap::default();
4889 for path in &entries {
4890 let Some(project_path) =
4891 this.repo_path_to_project_path(path, cx)
4892 else {
4893 continue;
4894 };
4895 let Some(buffer) = git_store
4896 .read(cx)
4897 .buffer_store
4898 .read(cx)
4899 .get_by_path(&project_path)
4900 else {
4901 continue;
4902 };
4903 let Some(diff_state) = git_store
4904 .read(cx)
4905 .diffs
4906 .get(&buffer.read(cx).remote_id())
4907 .cloned()
4908 else {
4909 continue;
4910 };
4911 let Some(uncommitted_diff) =
4912 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4913 |uncommitted_diff| uncommitted_diff.upgrade(),
4914 )
4915 else {
4916 continue;
4917 };
4918 let buffer_snapshot = buffer.read(cx).text_snapshot();
4919 let file_exists = buffer
4920 .read(cx)
4921 .file()
4922 .is_some_and(|file| file.disk_state().exists());
4923 let hunk_staging_operation_count =
4924 diff_state.update(cx, |diff_state, cx| {
4925 uncommitted_diff.update(
4926 cx,
4927 |uncommitted_diff, cx| {
4928 uncommitted_diff
4929 .stage_or_unstage_all_hunks(
4930 stage,
4931 &buffer_snapshot,
4932 file_exists,
4933 cx,
4934 );
4935 },
4936 );
4937
4938 diff_state.hunk_staging_operation_count += 1;
4939 diff_state.hunk_staging_operation_count
4940 });
4941 hunk_staging_operation_counts.insert(
4942 diff_state.downgrade(),
4943 hunk_staging_operation_count,
4944 );
4945 }
4946 hunk_staging_operation_counts
4947 })
4948 .unwrap_or_default();
4949
4950 let result = match git_repo {
4951 RepositoryState::Local(LocalRepositoryState {
4952 backend,
4953 environment,
4954 ..
4955 }) => {
4956 if stage {
4957 backend.stage_paths(entries, environment.clone()).await
4958 } else {
4959 backend.unstage_paths(entries, environment.clone()).await
4960 }
4961 }
4962 RepositoryState::Remote(RemoteRepositoryState {
4963 project_id,
4964 client,
4965 }) => {
4966 if stage {
4967 client
4968 .request(proto::Stage {
4969 project_id: project_id.0,
4970 repository_id: id.to_proto(),
4971 paths: entries
4972 .into_iter()
4973 .map(|repo_path| repo_path.to_proto())
4974 .collect(),
4975 })
4976 .await
4977 .context("sending stage request")
4978 .map(|_| ())
4979 } else {
4980 client
4981 .request(proto::Unstage {
4982 project_id: project_id.0,
4983 repository_id: id.to_proto(),
4984 paths: entries
4985 .into_iter()
4986 .map(|repo_path| repo_path.to_proto())
4987 .collect(),
4988 })
4989 .await
4990 .context("sending unstage request")
4991 .map(|_| ())
4992 }
4993 }
4994 };
4995
4996 for (diff_state, hunk_staging_operation_count) in
4997 hunk_staging_operation_counts
4998 {
4999 diff_state
5000 .update(&mut cx, |diff_state, cx| {
5001 if result.is_ok() {
5002 diff_state.hunk_staging_operation_count_as_of_write =
5003 hunk_staging_operation_count;
5004 } else if let Some(uncommitted_diff) =
5005 &diff_state.uncommitted_diff
5006 {
5007 uncommitted_diff
5008 .update(cx, |uncommitted_diff, cx| {
5009 uncommitted_diff.clear_pending_hunks(cx);
5010 })
5011 .ok();
5012 }
5013 })
5014 .ok();
5015 }
5016
5017 result
5018 },
5019 )
5020 })?
5021 .await?
5022 },
5023 )
5024 }
5025
5026 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5027 let to_stage = self
5028 .cached_status()
5029 .filter_map(|entry| {
5030 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
5031 if ops.staging() || ops.staged() {
5032 None
5033 } else {
5034 Some(entry.repo_path)
5035 }
5036 } else if entry.status.staging().is_fully_staged() {
5037 None
5038 } else {
5039 Some(entry.repo_path)
5040 }
5041 })
5042 .collect();
5043 self.stage_or_unstage_entries(true, to_stage, cx)
5044 }
5045
5046 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5047 let to_unstage = self
5048 .cached_status()
5049 .filter_map(|entry| {
5050 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
5051 if !ops.staging() && !ops.staged() {
5052 None
5053 } else {
5054 Some(entry.repo_path)
5055 }
5056 } else if entry.status.staging().is_fully_unstaged() {
5057 None
5058 } else {
5059 Some(entry.repo_path)
5060 }
5061 })
5062 .collect();
5063 self.stage_or_unstage_entries(false, to_unstage, cx)
5064 }
5065
5066 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5067 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5068
5069 self.stash_entries(to_stash, cx)
5070 }
5071
5072 pub fn stash_entries(
5073 &mut self,
5074 entries: Vec<RepoPath>,
5075 cx: &mut Context<Self>,
5076 ) -> Task<anyhow::Result<()>> {
5077 let id = self.id;
5078
5079 cx.spawn(async move |this, cx| {
5080 this.update(cx, |this, _| {
5081 this.send_job(None, move |git_repo, _cx| async move {
5082 match git_repo {
5083 RepositoryState::Local(LocalRepositoryState {
5084 backend,
5085 environment,
5086 ..
5087 }) => backend.stash_paths(entries, environment).await,
5088 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5089 client
5090 .request(proto::Stash {
5091 project_id: project_id.0,
5092 repository_id: id.to_proto(),
5093 paths: entries
5094 .into_iter()
5095 .map(|repo_path| repo_path.to_proto())
5096 .collect(),
5097 })
5098 .await?;
5099 Ok(())
5100 }
5101 }
5102 })
5103 })?
5104 .await??;
5105 Ok(())
5106 })
5107 }
5108
5109 pub fn stash_pop(
5110 &mut self,
5111 index: Option<usize>,
5112 cx: &mut Context<Self>,
5113 ) -> Task<anyhow::Result<()>> {
5114 let id = self.id;
5115 cx.spawn(async move |this, cx| {
5116 this.update(cx, |this, _| {
5117 this.send_job(None, move |git_repo, _cx| async move {
5118 match git_repo {
5119 RepositoryState::Local(LocalRepositoryState {
5120 backend,
5121 environment,
5122 ..
5123 }) => backend.stash_pop(index, environment).await,
5124 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5125 client
5126 .request(proto::StashPop {
5127 project_id: project_id.0,
5128 repository_id: id.to_proto(),
5129 stash_index: index.map(|i| i as u64),
5130 })
5131 .await
5132 .context("sending stash pop request")?;
5133 Ok(())
5134 }
5135 }
5136 })
5137 })?
5138 .await??;
5139 Ok(())
5140 })
5141 }
5142
5143 pub fn stash_apply(
5144 &mut self,
5145 index: Option<usize>,
5146 cx: &mut Context<Self>,
5147 ) -> Task<anyhow::Result<()>> {
5148 let id = self.id;
5149 cx.spawn(async move |this, cx| {
5150 this.update(cx, |this, _| {
5151 this.send_job(None, move |git_repo, _cx| async move {
5152 match git_repo {
5153 RepositoryState::Local(LocalRepositoryState {
5154 backend,
5155 environment,
5156 ..
5157 }) => backend.stash_apply(index, environment).await,
5158 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5159 client
5160 .request(proto::StashApply {
5161 project_id: project_id.0,
5162 repository_id: id.to_proto(),
5163 stash_index: index.map(|i| i as u64),
5164 })
5165 .await
5166 .context("sending stash apply request")?;
5167 Ok(())
5168 }
5169 }
5170 })
5171 })?
5172 .await??;
5173 Ok(())
5174 })
5175 }
5176
5177 pub fn stash_drop(
5178 &mut self,
5179 index: Option<usize>,
5180 cx: &mut Context<Self>,
5181 ) -> oneshot::Receiver<anyhow::Result<()>> {
5182 let id = self.id;
5183 let updates_tx = self
5184 .git_store()
5185 .and_then(|git_store| match &git_store.read(cx).state {
5186 GitStoreState::Local { downstream, .. } => downstream
5187 .as_ref()
5188 .map(|downstream| downstream.updates_tx.clone()),
5189 _ => None,
5190 });
5191 let this = cx.weak_entity();
5192 self.send_job(None, move |git_repo, mut cx| async move {
5193 match git_repo {
5194 RepositoryState::Local(LocalRepositoryState {
5195 backend,
5196 environment,
5197 ..
5198 }) => {
5199 // TODO would be nice to not have to do this manually
5200 let result = backend.stash_drop(index, environment).await;
5201 if result.is_ok()
5202 && let Ok(stash_entries) = backend.stash_entries().await
5203 {
5204 let snapshot = this.update(&mut cx, |this, cx| {
5205 this.snapshot.stash_entries = stash_entries;
5206 cx.emit(RepositoryEvent::StashEntriesChanged);
5207 this.snapshot.clone()
5208 })?;
5209 if let Some(updates_tx) = updates_tx {
5210 updates_tx
5211 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5212 .ok();
5213 }
5214 }
5215
5216 result
5217 }
5218 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5219 client
5220 .request(proto::StashDrop {
5221 project_id: project_id.0,
5222 repository_id: id.to_proto(),
5223 stash_index: index.map(|i| i as u64),
5224 })
5225 .await
5226 .context("sending stash pop request")?;
5227 Ok(())
5228 }
5229 }
5230 })
5231 }
5232
5233 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5234 let id = self.id;
5235 self.send_job(
5236 Some(format!("git hook {}", hook.as_str()).into()),
5237 move |git_repo, _cx| async move {
5238 match git_repo {
5239 RepositoryState::Local(LocalRepositoryState {
5240 backend,
5241 environment,
5242 ..
5243 }) => backend.run_hook(hook, environment.clone()).await,
5244 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5245 client
5246 .request(proto::RunGitHook {
5247 project_id: project_id.0,
5248 repository_id: id.to_proto(),
5249 hook: hook.to_proto(),
5250 })
5251 .await?;
5252
5253 Ok(())
5254 }
5255 }
5256 },
5257 )
5258 }
5259
5260 pub fn commit(
5261 &mut self,
5262 message: SharedString,
5263 name_and_email: Option<(SharedString, SharedString)>,
5264 options: CommitOptions,
5265 askpass: AskPassDelegate,
5266 cx: &mut App,
5267 ) -> oneshot::Receiver<Result<()>> {
5268 let id = self.id;
5269 let askpass_delegates = self.askpass_delegates.clone();
5270 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5271
5272 let rx = self.run_hook(RunHook::PreCommit, cx);
5273
5274 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5275 rx.await??;
5276
5277 match git_repo {
5278 RepositoryState::Local(LocalRepositoryState {
5279 backend,
5280 environment,
5281 ..
5282 }) => {
5283 backend
5284 .commit(message, name_and_email, options, askpass, environment)
5285 .await
5286 }
5287 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5288 askpass_delegates.lock().insert(askpass_id, askpass);
5289 let _defer = util::defer(|| {
5290 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5291 debug_assert!(askpass_delegate.is_some());
5292 });
5293 let (name, email) = name_and_email.unzip();
5294 client
5295 .request(proto::Commit {
5296 project_id: project_id.0,
5297 repository_id: id.to_proto(),
5298 message: String::from(message),
5299 name: name.map(String::from),
5300 email: email.map(String::from),
5301 options: Some(proto::commit::CommitOptions {
5302 amend: options.amend,
5303 signoff: options.signoff,
5304 }),
5305 askpass_id,
5306 })
5307 .await?;
5308
5309 Ok(())
5310 }
5311 }
5312 })
5313 }
5314
5315 pub fn fetch(
5316 &mut self,
5317 fetch_options: FetchOptions,
5318 askpass: AskPassDelegate,
5319 _cx: &mut App,
5320 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5321 let askpass_delegates = self.askpass_delegates.clone();
5322 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5323 let id = self.id;
5324
5325 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5326 match git_repo {
5327 RepositoryState::Local(LocalRepositoryState {
5328 backend,
5329 environment,
5330 ..
5331 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5332 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5333 askpass_delegates.lock().insert(askpass_id, askpass);
5334 let _defer = util::defer(|| {
5335 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5336 debug_assert!(askpass_delegate.is_some());
5337 });
5338
5339 let response = client
5340 .request(proto::Fetch {
5341 project_id: project_id.0,
5342 repository_id: id.to_proto(),
5343 askpass_id,
5344 remote: fetch_options.to_proto(),
5345 })
5346 .await?;
5347
5348 Ok(RemoteCommandOutput {
5349 stdout: response.stdout,
5350 stderr: response.stderr,
5351 })
5352 }
5353 }
5354 })
5355 }
5356
5357 pub fn push(
5358 &mut self,
5359 branch: SharedString,
5360 remote_branch: SharedString,
5361 remote: SharedString,
5362 options: Option<PushOptions>,
5363 askpass: AskPassDelegate,
5364 cx: &mut Context<Self>,
5365 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5366 let askpass_delegates = self.askpass_delegates.clone();
5367 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5368 let id = self.id;
5369
5370 let args = options
5371 .map(|option| match option {
5372 PushOptions::SetUpstream => " --set-upstream",
5373 PushOptions::Force => " --force-with-lease",
5374 })
5375 .unwrap_or("");
5376
5377 let updates_tx = self
5378 .git_store()
5379 .and_then(|git_store| match &git_store.read(cx).state {
5380 GitStoreState::Local { downstream, .. } => downstream
5381 .as_ref()
5382 .map(|downstream| downstream.updates_tx.clone()),
5383 _ => None,
5384 });
5385
5386 let this = cx.weak_entity();
5387 self.send_job(
5388 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5389 move |git_repo, mut cx| async move {
5390 match git_repo {
5391 RepositoryState::Local(LocalRepositoryState {
5392 backend,
5393 environment,
5394 ..
5395 }) => {
5396 let result = backend
5397 .push(
5398 branch.to_string(),
5399 remote_branch.to_string(),
5400 remote.to_string(),
5401 options,
5402 askpass,
5403 environment.clone(),
5404 cx.clone(),
5405 )
5406 .await;
5407 // TODO would be nice to not have to do this manually
5408 if result.is_ok() {
5409 let branches = backend.branches().await?;
5410 let branch = branches.into_iter().find(|branch| branch.is_head);
5411 log::info!("head branch after scan is {branch:?}");
5412 let snapshot = this.update(&mut cx, |this, cx| {
5413 this.snapshot.branch = branch;
5414 cx.emit(RepositoryEvent::BranchChanged);
5415 this.snapshot.clone()
5416 })?;
5417 if let Some(updates_tx) = updates_tx {
5418 updates_tx
5419 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5420 .ok();
5421 }
5422 }
5423 result
5424 }
5425 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5426 askpass_delegates.lock().insert(askpass_id, askpass);
5427 let _defer = util::defer(|| {
5428 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5429 debug_assert!(askpass_delegate.is_some());
5430 });
5431 let response = client
5432 .request(proto::Push {
5433 project_id: project_id.0,
5434 repository_id: id.to_proto(),
5435 askpass_id,
5436 branch_name: branch.to_string(),
5437 remote_branch_name: remote_branch.to_string(),
5438 remote_name: remote.to_string(),
5439 options: options.map(|options| match options {
5440 PushOptions::Force => proto::push::PushOptions::Force,
5441 PushOptions::SetUpstream => {
5442 proto::push::PushOptions::SetUpstream
5443 }
5444 }
5445 as i32),
5446 })
5447 .await?;
5448
5449 Ok(RemoteCommandOutput {
5450 stdout: response.stdout,
5451 stderr: response.stderr,
5452 })
5453 }
5454 }
5455 },
5456 )
5457 }
5458
5459 pub fn pull(
5460 &mut self,
5461 branch: Option<SharedString>,
5462 remote: SharedString,
5463 rebase: bool,
5464 askpass: AskPassDelegate,
5465 _cx: &mut App,
5466 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5467 let askpass_delegates = self.askpass_delegates.clone();
5468 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5469 let id = self.id;
5470
5471 let mut status = "git pull".to_string();
5472 if rebase {
5473 status.push_str(" --rebase");
5474 }
5475 status.push_str(&format!(" {}", remote));
5476 if let Some(b) = &branch {
5477 status.push_str(&format!(" {}", b));
5478 }
5479
5480 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5481 match git_repo {
5482 RepositoryState::Local(LocalRepositoryState {
5483 backend,
5484 environment,
5485 ..
5486 }) => {
5487 backend
5488 .pull(
5489 branch.as_ref().map(|b| b.to_string()),
5490 remote.to_string(),
5491 rebase,
5492 askpass,
5493 environment.clone(),
5494 cx,
5495 )
5496 .await
5497 }
5498 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5499 askpass_delegates.lock().insert(askpass_id, askpass);
5500 let _defer = util::defer(|| {
5501 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5502 debug_assert!(askpass_delegate.is_some());
5503 });
5504 let response = client
5505 .request(proto::Pull {
5506 project_id: project_id.0,
5507 repository_id: id.to_proto(),
5508 askpass_id,
5509 rebase,
5510 branch_name: branch.as_ref().map(|b| b.to_string()),
5511 remote_name: remote.to_string(),
5512 })
5513 .await?;
5514
5515 Ok(RemoteCommandOutput {
5516 stdout: response.stdout,
5517 stderr: response.stderr,
5518 })
5519 }
5520 }
5521 })
5522 }
5523
5524 fn spawn_set_index_text_job(
5525 &mut self,
5526 path: RepoPath,
5527 content: Option<String>,
5528 hunk_staging_operation_count: Option<usize>,
5529 cx: &mut Context<Self>,
5530 ) -> oneshot::Receiver<anyhow::Result<()>> {
5531 let id = self.id;
5532 let this = cx.weak_entity();
5533 let git_store = self.git_store.clone();
5534 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5535 self.send_keyed_job(
5536 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5537 None,
5538 move |git_repo, mut cx| async move {
5539 log::debug!(
5540 "start updating index text for buffer {}",
5541 path.as_unix_str()
5542 );
5543
5544 match git_repo {
5545 RepositoryState::Local(LocalRepositoryState {
5546 fs,
5547 backend,
5548 environment,
5549 ..
5550 }) => {
5551 let executable = match fs.metadata(&abs_path).await {
5552 Ok(Some(meta)) => meta.is_executable,
5553 Ok(None) => false,
5554 Err(_err) => false,
5555 };
5556 backend
5557 .set_index_text(path.clone(), content, environment.clone(), executable)
5558 .await?;
5559 }
5560 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5561 client
5562 .request(proto::SetIndexText {
5563 project_id: project_id.0,
5564 repository_id: id.to_proto(),
5565 path: path.to_proto(),
5566 text: content,
5567 })
5568 .await?;
5569 }
5570 }
5571 log::debug!(
5572 "finish updating index text for buffer {}",
5573 path.as_unix_str()
5574 );
5575
5576 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5577 let project_path = this
5578 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5579 .ok()
5580 .flatten();
5581 git_store
5582 .update(&mut cx, |git_store, cx| {
5583 let buffer_id = git_store
5584 .buffer_store
5585 .read(cx)
5586 .get_by_path(&project_path?)?
5587 .read(cx)
5588 .remote_id();
5589 let diff_state = git_store.diffs.get(&buffer_id)?;
5590 diff_state.update(cx, |diff_state, _| {
5591 diff_state.hunk_staging_operation_count_as_of_write =
5592 hunk_staging_operation_count;
5593 });
5594 Some(())
5595 })
5596 .context("Git store dropped")?;
5597 }
5598 Ok(())
5599 },
5600 )
5601 }
5602
5603 pub fn create_remote(
5604 &mut self,
5605 remote_name: String,
5606 remote_url: String,
5607 ) -> oneshot::Receiver<Result<()>> {
5608 let id = self.id;
5609 self.send_job(
5610 Some(format!("git remote add {remote_name} {remote_url}").into()),
5611 move |repo, _cx| async move {
5612 match repo {
5613 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5614 backend.create_remote(remote_name, remote_url).await
5615 }
5616 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5617 client
5618 .request(proto::GitCreateRemote {
5619 project_id: project_id.0,
5620 repository_id: id.to_proto(),
5621 remote_name,
5622 remote_url,
5623 })
5624 .await?;
5625
5626 Ok(())
5627 }
5628 }
5629 },
5630 )
5631 }
5632
5633 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5634 let id = self.id;
5635 self.send_job(
5636 Some(format!("git remove remote {remote_name}").into()),
5637 move |repo, _cx| async move {
5638 match repo {
5639 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5640 backend.remove_remote(remote_name).await
5641 }
5642 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5643 client
5644 .request(proto::GitRemoveRemote {
5645 project_id: project_id.0,
5646 repository_id: id.to_proto(),
5647 remote_name,
5648 })
5649 .await?;
5650
5651 Ok(())
5652 }
5653 }
5654 },
5655 )
5656 }
5657
5658 pub fn get_remotes(
5659 &mut self,
5660 branch_name: Option<String>,
5661 is_push: bool,
5662 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5663 let id = self.id;
5664 self.send_job(None, move |repo, _cx| async move {
5665 match repo {
5666 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5667 let remote = if let Some(branch_name) = branch_name {
5668 if is_push {
5669 backend.get_push_remote(branch_name).await?
5670 } else {
5671 backend.get_branch_remote(branch_name).await?
5672 }
5673 } else {
5674 None
5675 };
5676
5677 match remote {
5678 Some(remote) => Ok(vec![remote]),
5679 None => backend.get_all_remotes().await,
5680 }
5681 }
5682 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5683 let response = client
5684 .request(proto::GetRemotes {
5685 project_id: project_id.0,
5686 repository_id: id.to_proto(),
5687 branch_name,
5688 is_push,
5689 })
5690 .await?;
5691
5692 let remotes = response
5693 .remotes
5694 .into_iter()
5695 .map(|remotes| Remote {
5696 name: remotes.name.into(),
5697 })
5698 .collect();
5699
5700 Ok(remotes)
5701 }
5702 }
5703 })
5704 }
5705
5706 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5707 let id = self.id;
5708 self.send_job(None, move |repo, _| async move {
5709 match repo {
5710 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5711 backend.branches().await
5712 }
5713 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5714 let response = client
5715 .request(proto::GitGetBranches {
5716 project_id: project_id.0,
5717 repository_id: id.to_proto(),
5718 })
5719 .await?;
5720
5721 let branches = response
5722 .branches
5723 .into_iter()
5724 .map(|branch| proto_to_branch(&branch))
5725 .collect();
5726
5727 Ok(branches)
5728 }
5729 }
5730 })
5731 }
5732
5733 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5734 let id = self.id;
5735 self.send_job(None, move |repo, _| async move {
5736 match repo {
5737 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5738 backend.worktrees().await
5739 }
5740 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5741 let response = client
5742 .request(proto::GitGetWorktrees {
5743 project_id: project_id.0,
5744 repository_id: id.to_proto(),
5745 })
5746 .await?;
5747
5748 let worktrees = response
5749 .worktrees
5750 .into_iter()
5751 .map(|worktree| proto_to_worktree(&worktree))
5752 .collect();
5753
5754 Ok(worktrees)
5755 }
5756 }
5757 })
5758 }
5759
5760 pub fn create_worktree(
5761 &mut self,
5762 name: String,
5763 directory: PathBuf,
5764 commit: Option<String>,
5765 ) -> oneshot::Receiver<Result<()>> {
5766 let id = self.id;
5767 self.send_job(
5768 Some("git worktree add".into()),
5769 move |repo, _cx| async move {
5770 match repo {
5771 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5772 backend.create_worktree(name, directory, commit).await
5773 }
5774 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5775 client
5776 .request(proto::GitCreateWorktree {
5777 project_id: project_id.0,
5778 repository_id: id.to_proto(),
5779 name,
5780 directory: directory.to_string_lossy().to_string(),
5781 commit,
5782 })
5783 .await?;
5784
5785 Ok(())
5786 }
5787 }
5788 },
5789 )
5790 }
5791
5792 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5793 let id = self.id;
5794 self.send_job(
5795 Some(format!("git worktree remove: {}", path.display()).into()),
5796 move |repo, _cx| async move {
5797 match repo {
5798 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5799 backend.remove_worktree(path, force).await
5800 }
5801 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5802 client
5803 .request(proto::GitRemoveWorktree {
5804 project_id: project_id.0,
5805 repository_id: id.to_proto(),
5806 path: path.to_string_lossy().to_string(),
5807 force,
5808 })
5809 .await?;
5810
5811 Ok(())
5812 }
5813 }
5814 },
5815 )
5816 }
5817
5818 pub fn rename_worktree(
5819 &mut self,
5820 old_path: PathBuf,
5821 new_path: PathBuf,
5822 ) -> oneshot::Receiver<Result<()>> {
5823 let id = self.id;
5824 self.send_job(
5825 Some(format!("git worktree move: {}", old_path.display()).into()),
5826 move |repo, _cx| async move {
5827 match repo {
5828 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5829 backend.rename_worktree(old_path, new_path).await
5830 }
5831 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5832 client
5833 .request(proto::GitRenameWorktree {
5834 project_id: project_id.0,
5835 repository_id: id.to_proto(),
5836 old_path: old_path.to_string_lossy().to_string(),
5837 new_path: new_path.to_string_lossy().to_string(),
5838 })
5839 .await?;
5840
5841 Ok(())
5842 }
5843 }
5844 },
5845 )
5846 }
5847
5848 pub fn default_branch(
5849 &mut self,
5850 include_remote_name: bool,
5851 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5852 let id = self.id;
5853 self.send_job(None, move |repo, _| async move {
5854 match repo {
5855 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5856 backend.default_branch(include_remote_name).await
5857 }
5858 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5859 let response = client
5860 .request(proto::GetDefaultBranch {
5861 project_id: project_id.0,
5862 repository_id: id.to_proto(),
5863 })
5864 .await?;
5865
5866 anyhow::Ok(response.branch.map(SharedString::from))
5867 }
5868 }
5869 })
5870 }
5871
5872 pub fn diff_tree(
5873 &mut self,
5874 diff_type: DiffTreeType,
5875 _cx: &App,
5876 ) -> oneshot::Receiver<Result<TreeDiff>> {
5877 let repository_id = self.snapshot.id;
5878 self.send_job(None, move |repo, _cx| async move {
5879 match repo {
5880 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5881 backend.diff_tree(diff_type).await
5882 }
5883 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5884 let response = client
5885 .request(proto::GetTreeDiff {
5886 project_id: project_id.0,
5887 repository_id: repository_id.0,
5888 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5889 base: diff_type.base().to_string(),
5890 head: diff_type.head().to_string(),
5891 })
5892 .await?;
5893
5894 let entries = response
5895 .entries
5896 .into_iter()
5897 .filter_map(|entry| {
5898 let status = match entry.status() {
5899 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5900 proto::tree_diff_status::Status::Modified => {
5901 TreeDiffStatus::Modified {
5902 old: git::Oid::from_str(
5903 &entry.oid.context("missing oid").log_err()?,
5904 )
5905 .log_err()?,
5906 }
5907 }
5908 proto::tree_diff_status::Status::Deleted => {
5909 TreeDiffStatus::Deleted {
5910 old: git::Oid::from_str(
5911 &entry.oid.context("missing oid").log_err()?,
5912 )
5913 .log_err()?,
5914 }
5915 }
5916 };
5917 Some((
5918 RepoPath::from_rel_path(
5919 &RelPath::from_proto(&entry.path).log_err()?,
5920 ),
5921 status,
5922 ))
5923 })
5924 .collect();
5925
5926 Ok(TreeDiff { entries })
5927 }
5928 }
5929 })
5930 }
5931
5932 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5933 let id = self.id;
5934 self.send_job(None, move |repo, _cx| async move {
5935 match repo {
5936 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5937 backend.diff(diff_type).await
5938 }
5939 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5940 let (proto_diff_type, merge_base_ref) = match &diff_type {
5941 DiffType::HeadToIndex => {
5942 (proto::git_diff::DiffType::HeadToIndex.into(), None)
5943 }
5944 DiffType::HeadToWorktree => {
5945 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
5946 }
5947 DiffType::MergeBase { base_ref } => (
5948 proto::git_diff::DiffType::MergeBase.into(),
5949 Some(base_ref.to_string()),
5950 ),
5951 };
5952 let response = client
5953 .request(proto::GitDiff {
5954 project_id: project_id.0,
5955 repository_id: id.to_proto(),
5956 diff_type: proto_diff_type,
5957 merge_base_ref,
5958 })
5959 .await?;
5960
5961 Ok(response.diff)
5962 }
5963 }
5964 })
5965 }
5966
5967 pub fn create_branch(
5968 &mut self,
5969 branch_name: String,
5970 base_branch: Option<String>,
5971 ) -> oneshot::Receiver<Result<()>> {
5972 let id = self.id;
5973 let status_msg = if let Some(ref base) = base_branch {
5974 format!("git switch -c {branch_name} {base}").into()
5975 } else {
5976 format!("git switch -c {branch_name}").into()
5977 };
5978 self.send_job(Some(status_msg), move |repo, _cx| async move {
5979 match repo {
5980 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5981 backend.create_branch(branch_name, base_branch).await
5982 }
5983 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5984 client
5985 .request(proto::GitCreateBranch {
5986 project_id: project_id.0,
5987 repository_id: id.to_proto(),
5988 branch_name,
5989 })
5990 .await?;
5991
5992 Ok(())
5993 }
5994 }
5995 })
5996 }
5997
5998 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5999 let id = self.id;
6000 self.send_job(
6001 Some(format!("git switch {branch_name}").into()),
6002 move |repo, _cx| async move {
6003 match repo {
6004 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6005 backend.change_branch(branch_name).await
6006 }
6007 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6008 client
6009 .request(proto::GitChangeBranch {
6010 project_id: project_id.0,
6011 repository_id: id.to_proto(),
6012 branch_name,
6013 })
6014 .await?;
6015
6016 Ok(())
6017 }
6018 }
6019 },
6020 )
6021 }
6022
6023 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6024 let id = self.id;
6025 self.send_job(
6026 Some(format!("git branch -d {branch_name}").into()),
6027 move |repo, _cx| async move {
6028 match repo {
6029 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
6030 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6031 client
6032 .request(proto::GitDeleteBranch {
6033 project_id: project_id.0,
6034 repository_id: id.to_proto(),
6035 branch_name,
6036 })
6037 .await?;
6038
6039 Ok(())
6040 }
6041 }
6042 },
6043 )
6044 }
6045
6046 pub fn rename_branch(
6047 &mut self,
6048 branch: String,
6049 new_name: String,
6050 ) -> oneshot::Receiver<Result<()>> {
6051 let id = self.id;
6052 self.send_job(
6053 Some(format!("git branch -m {branch} {new_name}").into()),
6054 move |repo, _cx| async move {
6055 match repo {
6056 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6057 backend.rename_branch(branch, new_name).await
6058 }
6059 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6060 client
6061 .request(proto::GitRenameBranch {
6062 project_id: project_id.0,
6063 repository_id: id.to_proto(),
6064 branch,
6065 new_name,
6066 })
6067 .await?;
6068
6069 Ok(())
6070 }
6071 }
6072 },
6073 )
6074 }
6075
6076 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6077 let id = self.id;
6078 self.send_job(None, move |repo, _cx| async move {
6079 match repo {
6080 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6081 backend.check_for_pushed_commit().await
6082 }
6083 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6084 let response = client
6085 .request(proto::CheckForPushedCommits {
6086 project_id: project_id.0,
6087 repository_id: id.to_proto(),
6088 })
6089 .await?;
6090
6091 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6092
6093 Ok(branches)
6094 }
6095 }
6096 })
6097 }
6098
6099 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6100 self.send_job(None, |repo, _cx| async move {
6101 match repo {
6102 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6103 backend.checkpoint().await
6104 }
6105 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6106 }
6107 })
6108 }
6109
6110 pub fn restore_checkpoint(
6111 &mut self,
6112 checkpoint: GitRepositoryCheckpoint,
6113 ) -> oneshot::Receiver<Result<()>> {
6114 self.send_job(None, move |repo, _cx| async move {
6115 match repo {
6116 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6117 backend.restore_checkpoint(checkpoint).await
6118 }
6119 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6120 }
6121 })
6122 }
6123
6124 pub(crate) fn apply_remote_update(
6125 &mut self,
6126 update: proto::UpdateRepository,
6127 cx: &mut Context<Self>,
6128 ) -> Result<()> {
6129 if let Some(main_path) = &update.original_repo_abs_path {
6130 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6131 }
6132
6133 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6134 let new_head_commit = update
6135 .head_commit_details
6136 .as_ref()
6137 .map(proto_to_commit_details);
6138 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6139 cx.emit(RepositoryEvent::BranchChanged)
6140 }
6141 self.snapshot.branch = new_branch;
6142 self.snapshot.head_commit = new_head_commit;
6143
6144 // We don't store any merge head state for downstream projects; the upstream
6145 // will track it and we will just get the updated conflicts
6146 let new_merge_heads = TreeMap::from_ordered_entries(
6147 update
6148 .current_merge_conflicts
6149 .into_iter()
6150 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6151 );
6152 let conflicts_changed =
6153 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6154 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6155 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6156 let new_stash_entries = GitStash {
6157 entries: update
6158 .stash_entries
6159 .iter()
6160 .filter_map(|entry| proto_to_stash(entry).ok())
6161 .collect(),
6162 };
6163 if self.snapshot.stash_entries != new_stash_entries {
6164 cx.emit(RepositoryEvent::StashEntriesChanged)
6165 }
6166 self.snapshot.stash_entries = new_stash_entries;
6167 let new_linked_worktrees: Arc<[GitWorktree]> = update
6168 .linked_worktrees
6169 .iter()
6170 .map(proto_to_worktree)
6171 .collect();
6172 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6173 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6174 }
6175 self.snapshot.linked_worktrees = new_linked_worktrees;
6176 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6177 self.snapshot.remote_origin_url = update.remote_origin_url;
6178
6179 let edits = update
6180 .removed_statuses
6181 .into_iter()
6182 .filter_map(|path| {
6183 Some(sum_tree::Edit::Remove(PathKey(
6184 RelPath::from_proto(&path).log_err()?,
6185 )))
6186 })
6187 .chain(
6188 update
6189 .updated_statuses
6190 .into_iter()
6191 .filter_map(|updated_status| {
6192 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6193 }),
6194 )
6195 .collect::<Vec<_>>();
6196 if conflicts_changed || !edits.is_empty() {
6197 cx.emit(RepositoryEvent::StatusesChanged);
6198 }
6199 self.snapshot.statuses_by_path.edit(edits, ());
6200
6201 if update.is_last_update {
6202 self.snapshot.scan_id = update.scan_id;
6203 }
6204 self.clear_pending_ops(cx);
6205 Ok(())
6206 }
6207
6208 pub fn compare_checkpoints(
6209 &mut self,
6210 left: GitRepositoryCheckpoint,
6211 right: GitRepositoryCheckpoint,
6212 ) -> oneshot::Receiver<Result<bool>> {
6213 self.send_job(None, move |repo, _cx| async move {
6214 match repo {
6215 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6216 backend.compare_checkpoints(left, right).await
6217 }
6218 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6219 }
6220 })
6221 }
6222
6223 pub fn diff_checkpoints(
6224 &mut self,
6225 base_checkpoint: GitRepositoryCheckpoint,
6226 target_checkpoint: GitRepositoryCheckpoint,
6227 ) -> oneshot::Receiver<Result<String>> {
6228 self.send_job(None, move |repo, _cx| async move {
6229 match repo {
6230 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6231 backend
6232 .diff_checkpoints(base_checkpoint, target_checkpoint)
6233 .await
6234 }
6235 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6236 }
6237 })
6238 }
6239
6240 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6241 let updated = SumTree::from_iter(
6242 self.pending_ops.iter().filter_map(|ops| {
6243 let inner_ops: Vec<PendingOp> =
6244 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6245 if inner_ops.is_empty() {
6246 None
6247 } else {
6248 Some(PendingOps {
6249 repo_path: ops.repo_path.clone(),
6250 ops: inner_ops,
6251 })
6252 }
6253 }),
6254 (),
6255 );
6256
6257 if updated != self.pending_ops {
6258 cx.emit(RepositoryEvent::PendingOpsChanged {
6259 pending_ops: self.pending_ops.clone(),
6260 })
6261 }
6262
6263 self.pending_ops = updated;
6264 }
6265
6266 fn schedule_scan(
6267 &mut self,
6268 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6269 cx: &mut Context<Self>,
6270 ) {
6271 let this = cx.weak_entity();
6272 let _ = self.send_keyed_job(
6273 Some(GitJobKey::ReloadGitState),
6274 None,
6275 |state, mut cx| async move {
6276 log::debug!("run scheduled git status scan");
6277
6278 let Some(this) = this.upgrade() else {
6279 return Ok(());
6280 };
6281 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6282 bail!("not a local repository")
6283 };
6284 let compute_snapshot = this.update(&mut cx, |this, _| {
6285 this.paths_needing_status_update.clear();
6286 compute_snapshot(
6287 this.id,
6288 this.work_directory_abs_path.clone(),
6289 this.snapshot.clone(),
6290 backend.clone(),
6291 )
6292 });
6293 let (snapshot, events) = cx.background_spawn(compute_snapshot).await?;
6294 this.update(&mut cx, |this, cx| {
6295 this.snapshot = snapshot.clone();
6296 this.clear_pending_ops(cx);
6297 for event in events {
6298 cx.emit(event);
6299 }
6300 });
6301 if let Some(updates_tx) = updates_tx {
6302 updates_tx
6303 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6304 .ok();
6305 }
6306 Ok(())
6307 },
6308 );
6309 }
6310
6311 fn spawn_local_git_worker(
6312 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6313 cx: &mut Context<Self>,
6314 ) -> mpsc::UnboundedSender<GitJob> {
6315 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6316
6317 cx.spawn(async move |_, cx| {
6318 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6319 if let Some(git_hosting_provider_registry) =
6320 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6321 {
6322 git_hosting_providers::register_additional_providers(
6323 git_hosting_provider_registry,
6324 state.backend.clone(),
6325 )
6326 .await;
6327 }
6328 let state = RepositoryState::Local(state);
6329 let mut jobs = VecDeque::new();
6330 loop {
6331 while let Ok(Some(next_job)) = job_rx.try_next() {
6332 jobs.push_back(next_job);
6333 }
6334
6335 if let Some(job) = jobs.pop_front() {
6336 if let Some(current_key) = &job.key
6337 && jobs
6338 .iter()
6339 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6340 {
6341 continue;
6342 }
6343 (job.job)(state.clone(), cx).await;
6344 } else if let Some(job) = job_rx.next().await {
6345 jobs.push_back(job);
6346 } else {
6347 break;
6348 }
6349 }
6350 anyhow::Ok(())
6351 })
6352 .detach_and_log_err(cx);
6353
6354 job_tx
6355 }
6356
6357 fn spawn_remote_git_worker(
6358 state: RemoteRepositoryState,
6359 cx: &mut Context<Self>,
6360 ) -> mpsc::UnboundedSender<GitJob> {
6361 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6362
6363 cx.spawn(async move |_, cx| {
6364 let state = RepositoryState::Remote(state);
6365 let mut jobs = VecDeque::new();
6366 loop {
6367 while let Ok(Some(next_job)) = job_rx.try_next() {
6368 jobs.push_back(next_job);
6369 }
6370
6371 if let Some(job) = jobs.pop_front() {
6372 if let Some(current_key) = &job.key
6373 && jobs
6374 .iter()
6375 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6376 {
6377 continue;
6378 }
6379 (job.job)(state.clone(), cx).await;
6380 } else if let Some(job) = job_rx.next().await {
6381 jobs.push_back(job);
6382 } else {
6383 break;
6384 }
6385 }
6386 anyhow::Ok(())
6387 })
6388 .detach_and_log_err(cx);
6389
6390 job_tx
6391 }
6392
6393 fn load_staged_text(
6394 &mut self,
6395 buffer_id: BufferId,
6396 repo_path: RepoPath,
6397 cx: &App,
6398 ) -> Task<Result<Option<String>>> {
6399 let rx = self.send_job(None, move |state, _| async move {
6400 match state {
6401 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6402 anyhow::Ok(backend.load_index_text(repo_path).await)
6403 }
6404 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6405 let response = client
6406 .request(proto::OpenUnstagedDiff {
6407 project_id: project_id.to_proto(),
6408 buffer_id: buffer_id.to_proto(),
6409 })
6410 .await?;
6411 Ok(response.staged_text)
6412 }
6413 }
6414 });
6415 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6416 }
6417
6418 fn load_committed_text(
6419 &mut self,
6420 buffer_id: BufferId,
6421 repo_path: RepoPath,
6422 cx: &App,
6423 ) -> Task<Result<DiffBasesChange>> {
6424 let rx = self.send_job(None, move |state, _| async move {
6425 match state {
6426 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6427 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6428 let staged_text = backend.load_index_text(repo_path).await;
6429 let diff_bases_change = if committed_text == staged_text {
6430 DiffBasesChange::SetBoth(committed_text)
6431 } else {
6432 DiffBasesChange::SetEach {
6433 index: staged_text,
6434 head: committed_text,
6435 }
6436 };
6437 anyhow::Ok(diff_bases_change)
6438 }
6439 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6440 use proto::open_uncommitted_diff_response::Mode;
6441
6442 let response = client
6443 .request(proto::OpenUncommittedDiff {
6444 project_id: project_id.to_proto(),
6445 buffer_id: buffer_id.to_proto(),
6446 })
6447 .await?;
6448 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6449 let bases = match mode {
6450 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6451 Mode::IndexAndHead => DiffBasesChange::SetEach {
6452 head: response.committed_text,
6453 index: response.staged_text,
6454 },
6455 };
6456 Ok(bases)
6457 }
6458 }
6459 });
6460
6461 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6462 }
6463
6464 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6465 let repository_id = self.snapshot.id;
6466 let rx = self.send_job(None, move |state, _| async move {
6467 match state {
6468 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6469 backend.load_blob_content(oid).await
6470 }
6471 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6472 let response = client
6473 .request(proto::GetBlobContent {
6474 project_id: project_id.to_proto(),
6475 repository_id: repository_id.0,
6476 oid: oid.to_string(),
6477 })
6478 .await?;
6479 Ok(response.content)
6480 }
6481 }
6482 });
6483 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6484 }
6485
6486 fn paths_changed(
6487 &mut self,
6488 paths: Vec<RepoPath>,
6489 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6490 cx: &mut Context<Self>,
6491 ) {
6492 if !paths.is_empty() {
6493 self.paths_needing_status_update.push(paths);
6494 }
6495
6496 let this = cx.weak_entity();
6497 let _ = self.send_keyed_job(
6498 Some(GitJobKey::RefreshStatuses),
6499 None,
6500 |state, mut cx| async move {
6501 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6502 (
6503 this.snapshot.clone(),
6504 mem::take(&mut this.paths_needing_status_update),
6505 )
6506 })?;
6507 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6508 bail!("not a local repository")
6509 };
6510
6511 if changed_paths.is_empty() {
6512 return Ok(());
6513 }
6514
6515 let has_head = prev_snapshot.head_commit.is_some();
6516
6517 let stash_entries = backend.stash_entries().await?;
6518 let changed_path_statuses = cx
6519 .background_spawn(async move {
6520 let mut changed_paths =
6521 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6522 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6523
6524 let status_task = backend.status(&changed_paths_vec);
6525 let diff_stat_future = if has_head {
6526 backend.diff_stat(&changed_paths_vec)
6527 } else {
6528 future::ready(Ok(status::GitDiffStat {
6529 entries: Arc::default(),
6530 }))
6531 .boxed()
6532 };
6533
6534 let (statuses, diff_stats) =
6535 futures::future::try_join(status_task, diff_stat_future).await?;
6536
6537 let diff_stats: HashMap<RepoPath, DiffStat> =
6538 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6539
6540 let mut changed_path_statuses = Vec::new();
6541 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6542 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6543
6544 for (repo_path, status) in &*statuses.entries {
6545 let current_diff_stat = diff_stats.get(repo_path).copied();
6546
6547 changed_paths.remove(repo_path);
6548 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6549 && cursor.item().is_some_and(|entry| {
6550 entry.status == *status && entry.diff_stat == current_diff_stat
6551 })
6552 {
6553 continue;
6554 }
6555
6556 changed_path_statuses.push(Edit::Insert(StatusEntry {
6557 repo_path: repo_path.clone(),
6558 status: *status,
6559 diff_stat: current_diff_stat,
6560 }));
6561 }
6562 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6563 for path in changed_paths.into_iter() {
6564 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6565 changed_path_statuses
6566 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6567 }
6568 }
6569 anyhow::Ok(changed_path_statuses)
6570 })
6571 .await?;
6572
6573 this.update(&mut cx, |this, cx| {
6574 if this.snapshot.stash_entries != stash_entries {
6575 cx.emit(RepositoryEvent::StashEntriesChanged);
6576 this.snapshot.stash_entries = stash_entries;
6577 }
6578
6579 if !changed_path_statuses.is_empty() {
6580 cx.emit(RepositoryEvent::StatusesChanged);
6581 this.snapshot
6582 .statuses_by_path
6583 .edit(changed_path_statuses, ());
6584 this.snapshot.scan_id += 1;
6585 }
6586
6587 if let Some(updates_tx) = updates_tx {
6588 updates_tx
6589 .unbounded_send(DownstreamUpdate::UpdateRepository(
6590 this.snapshot.clone(),
6591 ))
6592 .ok();
6593 }
6594 })
6595 },
6596 );
6597 }
6598
6599 /// currently running git command and when it started
6600 pub fn current_job(&self) -> Option<JobInfo> {
6601 self.active_jobs.values().next().cloned()
6602 }
6603
6604 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6605 self.send_job(None, |_, _| async {})
6606 }
6607
6608 fn spawn_job_with_tracking<AsyncFn>(
6609 &mut self,
6610 paths: Vec<RepoPath>,
6611 git_status: pending_op::GitStatus,
6612 cx: &mut Context<Self>,
6613 f: AsyncFn,
6614 ) -> Task<Result<()>>
6615 where
6616 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6617 {
6618 let ids = self.new_pending_ops_for_paths(paths, git_status);
6619
6620 cx.spawn(async move |this, cx| {
6621 let (job_status, result) = match f(this.clone(), cx).await {
6622 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6623 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6624 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6625 };
6626
6627 this.update(cx, |this, _| {
6628 let mut edits = Vec::with_capacity(ids.len());
6629 for (id, entry) in ids {
6630 if let Some(mut ops) = this
6631 .pending_ops
6632 .get(&PathKey(entry.as_ref().clone()), ())
6633 .cloned()
6634 {
6635 if let Some(op) = ops.op_by_id_mut(id) {
6636 op.job_status = job_status;
6637 }
6638 edits.push(sum_tree::Edit::Insert(ops));
6639 }
6640 }
6641 this.pending_ops.edit(edits, ());
6642 })?;
6643
6644 result
6645 })
6646 }
6647
6648 fn new_pending_ops_for_paths(
6649 &mut self,
6650 paths: Vec<RepoPath>,
6651 git_status: pending_op::GitStatus,
6652 ) -> Vec<(PendingOpId, RepoPath)> {
6653 let mut edits = Vec::with_capacity(paths.len());
6654 let mut ids = Vec::with_capacity(paths.len());
6655 for path in paths {
6656 let mut ops = self
6657 .pending_ops
6658 .get(&PathKey(path.as_ref().clone()), ())
6659 .cloned()
6660 .unwrap_or_else(|| PendingOps::new(&path));
6661 let id = ops.max_id() + 1;
6662 ops.ops.push(PendingOp {
6663 id,
6664 git_status,
6665 job_status: pending_op::JobStatus::Running,
6666 });
6667 edits.push(sum_tree::Edit::Insert(ops));
6668 ids.push((id, path));
6669 }
6670 self.pending_ops.edit(edits, ());
6671 ids
6672 }
6673 pub fn default_remote_url(&self) -> Option<String> {
6674 self.remote_upstream_url
6675 .clone()
6676 .or(self.remote_origin_url.clone())
6677 }
6678}
6679
6680fn get_permalink_in_rust_registry_src(
6681 provider_registry: Arc<GitHostingProviderRegistry>,
6682 path: PathBuf,
6683 selection: Range<u32>,
6684) -> Result<url::Url> {
6685 #[derive(Deserialize)]
6686 struct CargoVcsGit {
6687 sha1: String,
6688 }
6689
6690 #[derive(Deserialize)]
6691 struct CargoVcsInfo {
6692 git: CargoVcsGit,
6693 path_in_vcs: String,
6694 }
6695
6696 #[derive(Deserialize)]
6697 struct CargoPackage {
6698 repository: String,
6699 }
6700
6701 #[derive(Deserialize)]
6702 struct CargoToml {
6703 package: CargoPackage,
6704 }
6705
6706 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6707 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6708 Some((dir, json))
6709 }) else {
6710 bail!("No .cargo_vcs_info.json found in parent directories")
6711 };
6712 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6713 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6714 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6715 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6716 .context("parsing package.repository field of manifest")?;
6717 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6718 let permalink = provider.build_permalink(
6719 remote,
6720 BuildPermalinkParams::new(
6721 &cargo_vcs_info.git.sha1,
6722 &RepoPath::from_rel_path(
6723 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6724 ),
6725 Some(selection),
6726 ),
6727 );
6728 Ok(permalink)
6729}
6730
6731fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6732 let Some(blame) = blame else {
6733 return proto::BlameBufferResponse {
6734 blame_response: None,
6735 };
6736 };
6737
6738 let entries = blame
6739 .entries
6740 .into_iter()
6741 .map(|entry| proto::BlameEntry {
6742 sha: entry.sha.as_bytes().into(),
6743 start_line: entry.range.start,
6744 end_line: entry.range.end,
6745 original_line_number: entry.original_line_number,
6746 author: entry.author,
6747 author_mail: entry.author_mail,
6748 author_time: entry.author_time,
6749 author_tz: entry.author_tz,
6750 committer: entry.committer_name,
6751 committer_mail: entry.committer_email,
6752 committer_time: entry.committer_time,
6753 committer_tz: entry.committer_tz,
6754 summary: entry.summary,
6755 previous: entry.previous,
6756 filename: entry.filename,
6757 })
6758 .collect::<Vec<_>>();
6759
6760 let messages = blame
6761 .messages
6762 .into_iter()
6763 .map(|(oid, message)| proto::CommitMessage {
6764 oid: oid.as_bytes().into(),
6765 message,
6766 })
6767 .collect::<Vec<_>>();
6768
6769 proto::BlameBufferResponse {
6770 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6771 }
6772}
6773
6774fn deserialize_blame_buffer_response(
6775 response: proto::BlameBufferResponse,
6776) -> Option<git::blame::Blame> {
6777 let response = response.blame_response?;
6778 let entries = response
6779 .entries
6780 .into_iter()
6781 .filter_map(|entry| {
6782 Some(git::blame::BlameEntry {
6783 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6784 range: entry.start_line..entry.end_line,
6785 original_line_number: entry.original_line_number,
6786 committer_name: entry.committer,
6787 committer_time: entry.committer_time,
6788 committer_tz: entry.committer_tz,
6789 committer_email: entry.committer_mail,
6790 author: entry.author,
6791 author_mail: entry.author_mail,
6792 author_time: entry.author_time,
6793 author_tz: entry.author_tz,
6794 summary: entry.summary,
6795 previous: entry.previous,
6796 filename: entry.filename,
6797 })
6798 })
6799 .collect::<Vec<_>>();
6800
6801 let messages = response
6802 .messages
6803 .into_iter()
6804 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6805 .collect::<HashMap<_, _>>();
6806
6807 Some(Blame { entries, messages })
6808}
6809
6810fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6811 proto::Branch {
6812 is_head: branch.is_head,
6813 ref_name: branch.ref_name.to_string(),
6814 unix_timestamp: branch
6815 .most_recent_commit
6816 .as_ref()
6817 .map(|commit| commit.commit_timestamp as u64),
6818 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6819 ref_name: upstream.ref_name.to_string(),
6820 tracking: upstream
6821 .tracking
6822 .status()
6823 .map(|upstream| proto::UpstreamTracking {
6824 ahead: upstream.ahead as u64,
6825 behind: upstream.behind as u64,
6826 }),
6827 }),
6828 most_recent_commit: branch
6829 .most_recent_commit
6830 .as_ref()
6831 .map(|commit| proto::CommitSummary {
6832 sha: commit.sha.to_string(),
6833 subject: commit.subject.to_string(),
6834 commit_timestamp: commit.commit_timestamp,
6835 author_name: commit.author_name.to_string(),
6836 }),
6837 }
6838}
6839
6840fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6841 proto::Worktree {
6842 path: worktree.path.to_string_lossy().to_string(),
6843 ref_name: worktree.ref_name.to_string(),
6844 sha: worktree.sha.to_string(),
6845 }
6846}
6847
6848fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6849 git::repository::Worktree {
6850 path: PathBuf::from(proto.path.clone()),
6851 ref_name: proto.ref_name.clone().into(),
6852 sha: proto.sha.clone().into(),
6853 }
6854}
6855
6856fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6857 git::repository::Branch {
6858 is_head: proto.is_head,
6859 ref_name: proto.ref_name.clone().into(),
6860 upstream: proto
6861 .upstream
6862 .as_ref()
6863 .map(|upstream| git::repository::Upstream {
6864 ref_name: upstream.ref_name.to_string().into(),
6865 tracking: upstream
6866 .tracking
6867 .as_ref()
6868 .map(|tracking| {
6869 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6870 ahead: tracking.ahead as u32,
6871 behind: tracking.behind as u32,
6872 })
6873 })
6874 .unwrap_or(git::repository::UpstreamTracking::Gone),
6875 }),
6876 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6877 git::repository::CommitSummary {
6878 sha: commit.sha.to_string().into(),
6879 subject: commit.subject.to_string().into(),
6880 commit_timestamp: commit.commit_timestamp,
6881 author_name: commit.author_name.to_string().into(),
6882 has_parent: true,
6883 }
6884 }),
6885 }
6886}
6887
6888fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6889 proto::GitCommitDetails {
6890 sha: commit.sha.to_string(),
6891 message: commit.message.to_string(),
6892 commit_timestamp: commit.commit_timestamp,
6893 author_email: commit.author_email.to_string(),
6894 author_name: commit.author_name.to_string(),
6895 }
6896}
6897
6898fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6899 CommitDetails {
6900 sha: proto.sha.clone().into(),
6901 message: proto.message.clone().into(),
6902 commit_timestamp: proto.commit_timestamp,
6903 author_email: proto.author_email.clone().into(),
6904 author_name: proto.author_name.clone().into(),
6905 }
6906}
6907
6908async fn compute_snapshot(
6909 id: RepositoryId,
6910 work_directory_abs_path: Arc<Path>,
6911 prev_snapshot: RepositorySnapshot,
6912 backend: Arc<dyn GitRepository>,
6913) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6914 let mut events = Vec::new();
6915 let branches = backend.branches().await?;
6916 let branch = branches.into_iter().find(|branch| branch.is_head);
6917
6918 // Useful when branch is None in detached head state
6919 let head_commit = match backend.head_sha().await {
6920 Some(head_sha) => backend.show(head_sha).await.log_err(),
6921 None => None,
6922 };
6923
6924 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> = if head_commit.is_some() {
6925 backend.diff_stat(&[])
6926 } else {
6927 future::ready(Ok(status::GitDiffStat {
6928 entries: Arc::default(),
6929 }))
6930 .boxed()
6931 };
6932 let (statuses, diff_stats, all_worktrees) = futures::future::try_join3(
6933 backend.status(&[RepoPath::from_rel_path(
6934 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6935 )]),
6936 diff_stat_future,
6937 backend.worktrees(),
6938 )
6939 .await?;
6940
6941 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
6942 .into_iter()
6943 .filter(|wt| wt.path != *work_directory_abs_path)
6944 .collect();
6945
6946 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
6947 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
6948 let stash_entries = backend.stash_entries().await?;
6949 let mut conflicted_paths = Vec::new();
6950 let statuses_by_path = SumTree::from_iter(
6951 statuses.entries.iter().map(|(repo_path, status)| {
6952 if status.is_conflicted() {
6953 conflicted_paths.push(repo_path.clone());
6954 }
6955 StatusEntry {
6956 repo_path: repo_path.clone(),
6957 status: *status,
6958 diff_stat: diff_stat_map.get(repo_path).copied(),
6959 }
6960 }),
6961 (),
6962 );
6963 let mut merge_details = prev_snapshot.merge;
6964 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
6965 log::debug!("new merge details: {merge_details:?}");
6966
6967 if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path {
6968 events.push(RepositoryEvent::StatusesChanged)
6969 }
6970
6971 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6972 events.push(RepositoryEvent::BranchChanged);
6973 }
6974
6975 if *linked_worktrees != *prev_snapshot.linked_worktrees {
6976 events.push(RepositoryEvent::GitWorktreeListChanged);
6977 }
6978
6979 let remote_origin_url = backend.remote_url("origin").await;
6980 let remote_upstream_url = backend.remote_url("upstream").await;
6981
6982 let snapshot = RepositorySnapshot {
6983 id,
6984 statuses_by_path,
6985 work_directory_abs_path,
6986 original_repo_abs_path: prev_snapshot.original_repo_abs_path,
6987 path_style: prev_snapshot.path_style,
6988 scan_id: prev_snapshot.scan_id + 1,
6989 branch,
6990 head_commit,
6991 merge: merge_details,
6992 remote_origin_url,
6993 remote_upstream_url,
6994 stash_entries,
6995 linked_worktrees,
6996 };
6997
6998 Ok((snapshot, events))
6999}
7000
7001fn status_from_proto(
7002 simple_status: i32,
7003 status: Option<proto::GitFileStatus>,
7004) -> anyhow::Result<FileStatus> {
7005 use proto::git_file_status::Variant;
7006
7007 let Some(variant) = status.and_then(|status| status.variant) else {
7008 let code = proto::GitStatus::from_i32(simple_status)
7009 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7010 let result = match code {
7011 proto::GitStatus::Added => TrackedStatus {
7012 worktree_status: StatusCode::Added,
7013 index_status: StatusCode::Unmodified,
7014 }
7015 .into(),
7016 proto::GitStatus::Modified => TrackedStatus {
7017 worktree_status: StatusCode::Modified,
7018 index_status: StatusCode::Unmodified,
7019 }
7020 .into(),
7021 proto::GitStatus::Conflict => UnmergedStatus {
7022 first_head: UnmergedStatusCode::Updated,
7023 second_head: UnmergedStatusCode::Updated,
7024 }
7025 .into(),
7026 proto::GitStatus::Deleted => TrackedStatus {
7027 worktree_status: StatusCode::Deleted,
7028 index_status: StatusCode::Unmodified,
7029 }
7030 .into(),
7031 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7032 };
7033 return Ok(result);
7034 };
7035
7036 let result = match variant {
7037 Variant::Untracked(_) => FileStatus::Untracked,
7038 Variant::Ignored(_) => FileStatus::Ignored,
7039 Variant::Unmerged(unmerged) => {
7040 let [first_head, second_head] =
7041 [unmerged.first_head, unmerged.second_head].map(|head| {
7042 let code = proto::GitStatus::from_i32(head)
7043 .with_context(|| format!("Invalid git status code: {head}"))?;
7044 let result = match code {
7045 proto::GitStatus::Added => UnmergedStatusCode::Added,
7046 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7047 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7048 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7049 };
7050 Ok(result)
7051 });
7052 let [first_head, second_head] = [first_head?, second_head?];
7053 UnmergedStatus {
7054 first_head,
7055 second_head,
7056 }
7057 .into()
7058 }
7059 Variant::Tracked(tracked) => {
7060 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7061 .map(|status| {
7062 let code = proto::GitStatus::from_i32(status)
7063 .with_context(|| format!("Invalid git status code: {status}"))?;
7064 let result = match code {
7065 proto::GitStatus::Modified => StatusCode::Modified,
7066 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7067 proto::GitStatus::Added => StatusCode::Added,
7068 proto::GitStatus::Deleted => StatusCode::Deleted,
7069 proto::GitStatus::Renamed => StatusCode::Renamed,
7070 proto::GitStatus::Copied => StatusCode::Copied,
7071 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7072 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7073 };
7074 Ok(result)
7075 });
7076 let [index_status, worktree_status] = [index_status?, worktree_status?];
7077 TrackedStatus {
7078 index_status,
7079 worktree_status,
7080 }
7081 .into()
7082 }
7083 };
7084 Ok(result)
7085}
7086
7087fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7088 use proto::git_file_status::{Tracked, Unmerged, Variant};
7089
7090 let variant = match status {
7091 FileStatus::Untracked => Variant::Untracked(Default::default()),
7092 FileStatus::Ignored => Variant::Ignored(Default::default()),
7093 FileStatus::Unmerged(UnmergedStatus {
7094 first_head,
7095 second_head,
7096 }) => Variant::Unmerged(Unmerged {
7097 first_head: unmerged_status_to_proto(first_head),
7098 second_head: unmerged_status_to_proto(second_head),
7099 }),
7100 FileStatus::Tracked(TrackedStatus {
7101 index_status,
7102 worktree_status,
7103 }) => Variant::Tracked(Tracked {
7104 index_status: tracked_status_to_proto(index_status),
7105 worktree_status: tracked_status_to_proto(worktree_status),
7106 }),
7107 };
7108 proto::GitFileStatus {
7109 variant: Some(variant),
7110 }
7111}
7112
7113fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7114 match code {
7115 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7116 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7117 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7118 }
7119}
7120
7121fn tracked_status_to_proto(code: StatusCode) -> i32 {
7122 match code {
7123 StatusCode::Added => proto::GitStatus::Added as _,
7124 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7125 StatusCode::Modified => proto::GitStatus::Modified as _,
7126 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7127 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7128 StatusCode::Copied => proto::GitStatus::Copied as _,
7129 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7130 }
7131}