1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let unstaged_diff = this
825 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
826 .await?;
827 buffer_diff.update(cx, |buffer_diff, _| {
828 buffer_diff.set_secondary_diff(unstaged_diff);
829 });
830
831 this.update(cx, |this, cx| {
832 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
833 .detach();
834
835 this.loading_diffs.remove(&(buffer_id, diff_kind));
836
837 let git_store = cx.weak_entity();
838 let diff_state = this
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842
843 diff_state.update(cx, |state, _| {
844 if let Some(oid) = oid {
845 if let Some(content) = content {
846 state.oid_texts.insert(oid, content);
847 }
848 }
849 state.oid_diffs.insert(oid, buffer_diff.downgrade());
850 });
851 })?;
852
853 Ok(buffer_diff)
854 }
855 .await;
856 result.map_err(Arc::new)
857 })
858 .shared();
859
860 self.loading_diffs
861 .insert((buffer_id, diff_kind), task.clone());
862 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
863 }
864
865 #[ztracing::instrument(skip_all)]
866 pub fn open_uncommitted_diff(
867 &mut self,
868 buffer: Entity<Buffer>,
869 cx: &mut Context<Self>,
870 ) -> Task<Result<Entity<BufferDiff>>> {
871 let buffer_id = buffer.read(cx).remote_id();
872
873 if let Some(diff_state) = self.diffs.get(&buffer_id)
874 && let Some(uncommitted_diff) = diff_state
875 .read(cx)
876 .uncommitted_diff
877 .as_ref()
878 .and_then(|weak| weak.upgrade())
879 {
880 if let Some(task) =
881 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
882 {
883 return cx.background_executor().spawn(async move {
884 task.await;
885 Ok(uncommitted_diff)
886 });
887 }
888 return Task::ready(Ok(uncommitted_diff));
889 }
890
891 let Some((repo, repo_path)) =
892 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
893 else {
894 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
895 };
896
897 let task = self
898 .loading_diffs
899 .entry((buffer_id, DiffKind::Uncommitted))
900 .or_insert_with(|| {
901 let changes = repo.update(cx, |repo, cx| {
902 repo.load_committed_text(buffer_id, repo_path, cx)
903 });
904
905 // todo(lw): hot foreground spawn
906 cx.spawn(async move |this, cx| {
907 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
908 .await
909 .map_err(Arc::new)
910 })
911 .shared()
912 })
913 .clone();
914
915 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
916 }
917
918 #[ztracing::instrument(skip_all)]
919 async fn open_diff_internal(
920 this: WeakEntity<Self>,
921 kind: DiffKind,
922 texts: Result<DiffBasesChange>,
923 buffer_entity: Entity<Buffer>,
924 cx: &mut AsyncApp,
925 ) -> Result<Entity<BufferDiff>> {
926 let diff_bases_change = match texts {
927 Err(e) => {
928 this.update(cx, |this, cx| {
929 let buffer = buffer_entity.read(cx);
930 let buffer_id = buffer.remote_id();
931 this.loading_diffs.remove(&(buffer_id, kind));
932 })?;
933 return Err(e);
934 }
935 Ok(change) => change,
936 };
937
938 this.update(cx, |this, cx| {
939 let buffer = buffer_entity.read(cx);
940 let buffer_id = buffer.remote_id();
941 let language = buffer.language().cloned();
942 let language_registry = buffer.language_registry();
943 let text_snapshot = buffer.text_snapshot();
944 this.loading_diffs.remove(&(buffer_id, kind));
945
946 let git_store = cx.weak_entity();
947 let diff_state = this
948 .diffs
949 .entry(buffer_id)
950 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
951
952 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953
954 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
955 diff_state.update(cx, |diff_state, cx| {
956 diff_state.language_changed = true;
957 diff_state.language = language;
958 diff_state.language_registry = language_registry;
959
960 match kind {
961 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
962 DiffKind::Uncommitted => {
963 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
964 diff
965 } else {
966 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
967 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
968 unstaged_diff
969 };
970
971 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
972 diff_state.uncommitted_diff = Some(diff.downgrade())
973 }
974 DiffKind::SinceOid(_) => {
975 unreachable!("open_diff_internal is not used for OID diffs")
976 }
977 }
978
979 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
980 let rx = diff_state.wait_for_recalculation();
981
982 anyhow::Ok(async move {
983 if let Some(rx) = rx {
984 rx.await;
985 }
986 Ok(diff)
987 })
988 })
989 })??
990 .await
991 }
992
993 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
994 let diff_state = self.diffs.get(&buffer_id)?;
995 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
996 }
997
998 pub fn get_uncommitted_diff(
999 &self,
1000 buffer_id: BufferId,
1001 cx: &App,
1002 ) -> Option<Entity<BufferDiff>> {
1003 let diff_state = self.diffs.get(&buffer_id)?;
1004 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1005 }
1006
1007 pub fn get_diff_since_oid(
1008 &self,
1009 buffer_id: BufferId,
1010 oid: Option<git::Oid>,
1011 cx: &App,
1012 ) -> Option<Entity<BufferDiff>> {
1013 let diff_state = self.diffs.get(&buffer_id)?;
1014 diff_state.read(cx).oid_diff(oid)
1015 }
1016
1017 pub fn open_conflict_set(
1018 &mut self,
1019 buffer: Entity<Buffer>,
1020 cx: &mut Context<Self>,
1021 ) -> Entity<ConflictSet> {
1022 log::debug!("open conflict set");
1023 let buffer_id = buffer.read(cx).remote_id();
1024
1025 if let Some(git_state) = self.diffs.get(&buffer_id)
1026 && let Some(conflict_set) = git_state
1027 .read(cx)
1028 .conflict_set
1029 .as_ref()
1030 .and_then(|weak| weak.upgrade())
1031 {
1032 let conflict_set = conflict_set;
1033 let buffer_snapshot = buffer.read(cx).text_snapshot();
1034
1035 git_state.update(cx, |state, cx| {
1036 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1037 });
1038
1039 return conflict_set;
1040 }
1041
1042 let is_unmerged = self
1043 .repository_and_path_for_buffer_id(buffer_id, cx)
1044 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1045 let git_store = cx.weak_entity();
1046 let buffer_git_state = self
1047 .diffs
1048 .entry(buffer_id)
1049 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1050 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1051
1052 self._subscriptions
1053 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1054 cx.emit(GitStoreEvent::ConflictsUpdated);
1055 }));
1056
1057 buffer_git_state.update(cx, |state, cx| {
1058 state.conflict_set = Some(conflict_set.downgrade());
1059 let buffer_snapshot = buffer.read(cx).text_snapshot();
1060 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1061 });
1062
1063 conflict_set
1064 }
1065
1066 pub fn project_path_git_status(
1067 &self,
1068 project_path: &ProjectPath,
1069 cx: &App,
1070 ) -> Option<FileStatus> {
1071 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1072 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1073 }
1074
1075 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1076 let mut work_directory_abs_paths = Vec::new();
1077 let mut checkpoints = Vec::new();
1078 for repository in self.repositories.values() {
1079 repository.update(cx, |repository, _| {
1080 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1081 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1082 });
1083 }
1084
1085 cx.background_executor().spawn(async move {
1086 let checkpoints = future::try_join_all(checkpoints).await?;
1087 Ok(GitStoreCheckpoint {
1088 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1089 .into_iter()
1090 .zip(checkpoints)
1091 .collect(),
1092 })
1093 })
1094 }
1095
1096 pub fn restore_checkpoint(
1097 &self,
1098 checkpoint: GitStoreCheckpoint,
1099 cx: &mut App,
1100 ) -> Task<Result<()>> {
1101 let repositories_by_work_dir_abs_path = self
1102 .repositories
1103 .values()
1104 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1105 .collect::<HashMap<_, _>>();
1106
1107 let mut tasks = Vec::new();
1108 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1109 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1110 let restore = repository.update(cx, |repository, _| {
1111 repository.restore_checkpoint(checkpoint)
1112 });
1113 tasks.push(async move { restore.await? });
1114 }
1115 }
1116 cx.background_spawn(async move {
1117 future::try_join_all(tasks).await?;
1118 Ok(())
1119 })
1120 }
1121
1122 /// Compares two checkpoints, returning true if they are equal.
1123 pub fn compare_checkpoints(
1124 &self,
1125 left: GitStoreCheckpoint,
1126 mut right: GitStoreCheckpoint,
1127 cx: &mut App,
1128 ) -> Task<Result<bool>> {
1129 let repositories_by_work_dir_abs_path = self
1130 .repositories
1131 .values()
1132 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1133 .collect::<HashMap<_, _>>();
1134
1135 let mut tasks = Vec::new();
1136 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1137 if let Some(right_checkpoint) = right
1138 .checkpoints_by_work_dir_abs_path
1139 .remove(&work_dir_abs_path)
1140 {
1141 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1142 {
1143 let compare = repository.update(cx, |repository, _| {
1144 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1145 });
1146
1147 tasks.push(async move { compare.await? });
1148 }
1149 } else {
1150 return Task::ready(Ok(false));
1151 }
1152 }
1153 cx.background_spawn(async move {
1154 Ok(future::try_join_all(tasks)
1155 .await?
1156 .into_iter()
1157 .all(|result| result))
1158 })
1159 }
1160
1161 /// Blames a buffer.
1162 pub fn blame_buffer(
1163 &self,
1164 buffer: &Entity<Buffer>,
1165 version: Option<clock::Global>,
1166 cx: &mut Context<Self>,
1167 ) -> Task<Result<Option<Blame>>> {
1168 let buffer = buffer.read(cx);
1169 let Some((repo, repo_path)) =
1170 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1171 else {
1172 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1173 };
1174 let content = match &version {
1175 Some(version) => buffer.rope_for_version(version),
1176 None => buffer.as_rope().clone(),
1177 };
1178 let line_ending = buffer.line_ending();
1179 let version = version.unwrap_or(buffer.version());
1180 let buffer_id = buffer.remote_id();
1181
1182 let repo = repo.downgrade();
1183 cx.spawn(async move |_, cx| {
1184 let repository_state = repo
1185 .update(cx, |repo, _| repo.repository_state.clone())?
1186 .await
1187 .map_err(|err| anyhow::anyhow!(err))?;
1188 match repository_state {
1189 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1190 .blame(repo_path.clone(), content, line_ending)
1191 .await
1192 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1193 .map(Some),
1194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1195 let response = client
1196 .request(proto::BlameBuffer {
1197 project_id: project_id.to_proto(),
1198 buffer_id: buffer_id.into(),
1199 version: serialize_version(&version),
1200 })
1201 .await?;
1202 Ok(deserialize_blame_buffer_response(response))
1203 }
1204 }
1205 })
1206 }
1207
1208 pub fn file_history(
1209 &self,
1210 repo: &Entity<Repository>,
1211 path: RepoPath,
1212 cx: &mut App,
1213 ) -> Task<Result<git::repository::FileHistory>> {
1214 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1215
1216 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1217 }
1218
1219 pub fn file_history_paginated(
1220 &self,
1221 repo: &Entity<Repository>,
1222 path: RepoPath,
1223 skip: usize,
1224 limit: Option<usize>,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn get_permalink_to_line(
1233 &self,
1234 buffer: &Entity<Buffer>,
1235 selection: Range<u32>,
1236 cx: &mut App,
1237 ) -> Task<Result<url::Url>> {
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 return Task::ready(Err(anyhow!("buffer has no file")));
1240 };
1241
1242 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1243 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1244 cx,
1245 ) else {
1246 // If we're not in a Git repo, check whether this is a Rust source
1247 // file in the Cargo registry (presumably opened with go-to-definition
1248 // from a normal Rust file). If so, we can put together a permalink
1249 // using crate metadata.
1250 if buffer
1251 .read(cx)
1252 .language()
1253 .is_none_or(|lang| lang.name() != "Rust")
1254 {
1255 return Task::ready(Err(anyhow!("no permalink available")));
1256 }
1257 let file_path = file.worktree.read(cx).absolutize(&file.path);
1258 return cx.spawn(async move |cx| {
1259 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1260 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1261 .context("no permalink available")
1262 });
1263 };
1264
1265 let buffer_id = buffer.read(cx).remote_id();
1266 let branch = repo.read(cx).branch.clone();
1267 let remote = branch
1268 .as_ref()
1269 .and_then(|b| b.upstream.as_ref())
1270 .and_then(|b| b.remote_name())
1271 .unwrap_or("origin")
1272 .to_string();
1273
1274 let rx = repo.update(cx, |repo, _| {
1275 repo.send_job(None, move |state, cx| async move {
1276 match state {
1277 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1278 let origin_url = backend
1279 .remote_url(&remote)
1280 .await
1281 .with_context(|| format!("remote \"{remote}\" not found"))?;
1282
1283 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1284
1285 let provider_registry =
1286 cx.update(GitHostingProviderRegistry::default_global);
1287
1288 let (provider, remote) =
1289 parse_git_remote_url(provider_registry, &origin_url)
1290 .context("parsing Git remote URL")?;
1291
1292 Ok(provider.build_permalink(
1293 remote,
1294 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1295 ))
1296 }
1297 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1298 let response = client
1299 .request(proto::GetPermalinkToLine {
1300 project_id: project_id.to_proto(),
1301 buffer_id: buffer_id.into(),
1302 selection: Some(proto::Range {
1303 start: selection.start as u64,
1304 end: selection.end as u64,
1305 }),
1306 })
1307 .await?;
1308
1309 url::Url::parse(&response.permalink).context("failed to parse permalink")
1310 }
1311 }
1312 })
1313 });
1314 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1315 }
1316
1317 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1318 match &self.state {
1319 GitStoreState::Local {
1320 downstream: downstream_client,
1321 ..
1322 } => downstream_client
1323 .as_ref()
1324 .map(|state| (state.client.clone(), state.project_id)),
1325 GitStoreState::Remote {
1326 downstream: downstream_client,
1327 ..
1328 } => downstream_client.clone(),
1329 }
1330 }
1331
1332 fn upstream_client(&self) -> Option<AnyProtoClient> {
1333 match &self.state {
1334 GitStoreState::Local { .. } => None,
1335 GitStoreState::Remote {
1336 upstream_client, ..
1337 } => Some(upstream_client.clone()),
1338 }
1339 }
1340
1341 fn on_worktree_store_event(
1342 &mut self,
1343 worktree_store: Entity<WorktreeStore>,
1344 event: &WorktreeStoreEvent,
1345 cx: &mut Context<Self>,
1346 ) {
1347 let GitStoreState::Local {
1348 project_environment,
1349 downstream,
1350 next_repository_id,
1351 fs,
1352 } = &self.state
1353 else {
1354 return;
1355 };
1356
1357 match event {
1358 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1359 if let Some(worktree) = self
1360 .worktree_store
1361 .read(cx)
1362 .worktree_for_id(*worktree_id, cx)
1363 {
1364 let paths_by_git_repo =
1365 self.process_updated_entries(&worktree, updated_entries, cx);
1366 let downstream = downstream
1367 .as_ref()
1368 .map(|downstream| downstream.updates_tx.clone());
1369 cx.spawn(async move |_, cx| {
1370 let paths_by_git_repo = paths_by_git_repo.await;
1371 for (repo, paths) in paths_by_git_repo {
1372 repo.update(cx, |repo, cx| {
1373 repo.paths_changed(paths, downstream.clone(), cx);
1374 });
1375 }
1376 })
1377 .detach();
1378 }
1379 }
1380 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1381 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1382 else {
1383 return;
1384 };
1385 if !worktree.read(cx).is_visible() {
1386 log::debug!(
1387 "not adding repositories for local worktree {:?} because it's not visible",
1388 worktree.read(cx).abs_path()
1389 );
1390 return;
1391 }
1392 self.update_repositories_from_worktree(
1393 *worktree_id,
1394 project_environment.clone(),
1395 next_repository_id.clone(),
1396 downstream
1397 .as_ref()
1398 .map(|downstream| downstream.updates_tx.clone()),
1399 changed_repos.clone(),
1400 fs.clone(),
1401 cx,
1402 );
1403 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1404 }
1405 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1406 let repos_without_worktree: Vec<RepositoryId> = self
1407 .worktree_ids
1408 .iter_mut()
1409 .filter_map(|(repo_id, worktree_ids)| {
1410 worktree_ids.remove(worktree_id);
1411 if worktree_ids.is_empty() {
1412 Some(*repo_id)
1413 } else {
1414 None
1415 }
1416 })
1417 .collect();
1418 let is_active_repo_removed = repos_without_worktree
1419 .iter()
1420 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1421
1422 for repo_id in repos_without_worktree {
1423 self.repositories.remove(&repo_id);
1424 self.worktree_ids.remove(&repo_id);
1425 if let Some(updates_tx) =
1426 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1427 {
1428 updates_tx
1429 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1430 .ok();
1431 }
1432 }
1433
1434 if is_active_repo_removed {
1435 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1436 self.active_repo_id = Some(repo_id);
1437 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1438 } else {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 }
1443 }
1444 _ => {}
1445 }
1446 }
1447 fn on_repository_event(
1448 &mut self,
1449 repo: Entity<Repository>,
1450 event: &RepositoryEvent,
1451 cx: &mut Context<Self>,
1452 ) {
1453 let id = repo.read(cx).id;
1454 let repo_snapshot = repo.read(cx).snapshot.clone();
1455 for (buffer_id, diff) in self.diffs.iter() {
1456 if let Some((buffer_repo, repo_path)) =
1457 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1458 && buffer_repo == repo
1459 {
1460 diff.update(cx, |diff, cx| {
1461 if let Some(conflict_set) = &diff.conflict_set {
1462 let conflict_status_changed =
1463 conflict_set.update(cx, |conflict_set, cx| {
1464 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1465 conflict_set.set_has_conflict(has_conflict, cx)
1466 })?;
1467 if conflict_status_changed {
1468 let buffer_store = self.buffer_store.read(cx);
1469 if let Some(buffer) = buffer_store.get(*buffer_id) {
1470 let _ = diff
1471 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1472 }
1473 }
1474 }
1475 anyhow::Ok(())
1476 })
1477 .ok();
1478 }
1479 }
1480 cx.emit(GitStoreEvent::RepositoryUpdated(
1481 id,
1482 event.clone(),
1483 self.active_repo_id == Some(id),
1484 ))
1485 }
1486
1487 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1488 cx.emit(GitStoreEvent::JobsUpdated)
1489 }
1490
1491 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1492 fn update_repositories_from_worktree(
1493 &mut self,
1494 worktree_id: WorktreeId,
1495 project_environment: Entity<ProjectEnvironment>,
1496 next_repository_id: Arc<AtomicU64>,
1497 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1498 updated_git_repositories: UpdatedGitRepositoriesSet,
1499 fs: Arc<dyn Fs>,
1500 cx: &mut Context<Self>,
1501 ) {
1502 let mut removed_ids = Vec::new();
1503 for update in updated_git_repositories.iter() {
1504 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1505 let existing_work_directory_abs_path =
1506 repo.read(cx).work_directory_abs_path.clone();
1507 Some(&existing_work_directory_abs_path)
1508 == update.old_work_directory_abs_path.as_ref()
1509 || Some(&existing_work_directory_abs_path)
1510 == update.new_work_directory_abs_path.as_ref()
1511 }) {
1512 let repo_id = *id;
1513 if let Some(new_work_directory_abs_path) =
1514 update.new_work_directory_abs_path.clone()
1515 {
1516 self.worktree_ids
1517 .entry(repo_id)
1518 .or_insert_with(HashSet::new)
1519 .insert(worktree_id);
1520 existing.update(cx, |existing, cx| {
1521 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1522 existing.schedule_scan(updates_tx.clone(), cx);
1523 });
1524 } else {
1525 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1526 worktree_ids.remove(&worktree_id);
1527 if worktree_ids.is_empty() {
1528 removed_ids.push(repo_id);
1529 }
1530 }
1531 }
1532 } else if let UpdatedGitRepository {
1533 new_work_directory_abs_path: Some(work_directory_abs_path),
1534 dot_git_abs_path: Some(dot_git_abs_path),
1535 repository_dir_abs_path: Some(_repository_dir_abs_path),
1536 common_dir_abs_path: Some(common_dir_abs_path),
1537 ..
1538 } = update
1539 {
1540 let original_repo_abs_path: Arc<Path> =
1541 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1542 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1543 let is_trusted = TrustedWorktrees::try_get_global(cx)
1544 .map(|trusted_worktrees| {
1545 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1546 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1547 })
1548 })
1549 .unwrap_or(false);
1550 let git_store = cx.weak_entity();
1551 let repo = cx.new(|cx| {
1552 let mut repo = Repository::local(
1553 id,
1554 work_directory_abs_path.clone(),
1555 original_repo_abs_path.clone(),
1556 dot_git_abs_path.clone(),
1557 project_environment.downgrade(),
1558 fs.clone(),
1559 is_trusted,
1560 git_store,
1561 cx,
1562 );
1563 if let Some(updates_tx) = updates_tx.as_ref() {
1564 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1565 updates_tx
1566 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1567 .ok();
1568 }
1569 repo.schedule_scan(updates_tx.clone(), cx);
1570 repo
1571 });
1572 self._subscriptions
1573 .push(cx.subscribe(&repo, Self::on_repository_event));
1574 self._subscriptions
1575 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1576 self.repositories.insert(id, repo);
1577 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1578 cx.emit(GitStoreEvent::RepositoryAdded);
1579 self.active_repo_id.get_or_insert_with(|| {
1580 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1581 id
1582 });
1583 }
1584 }
1585
1586 for id in removed_ids {
1587 if self.active_repo_id == Some(id) {
1588 self.active_repo_id = None;
1589 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1590 }
1591 self.repositories.remove(&id);
1592 if let Some(updates_tx) = updates_tx.as_ref() {
1593 updates_tx
1594 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1595 .ok();
1596 }
1597 }
1598 }
1599
1600 fn on_trusted_worktrees_event(
1601 &mut self,
1602 _: Entity<TrustedWorktreesStore>,
1603 event: &TrustedWorktreesEvent,
1604 cx: &mut Context<Self>,
1605 ) {
1606 if !matches!(self.state, GitStoreState::Local { .. }) {
1607 return;
1608 }
1609
1610 let (is_trusted, event_paths) = match event {
1611 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1612 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1613 };
1614
1615 for (repo_id, worktree_ids) in &self.worktree_ids {
1616 if worktree_ids
1617 .iter()
1618 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1619 {
1620 if let Some(repo) = self.repositories.get(repo_id) {
1621 let repository_state = repo.read(cx).repository_state.clone();
1622 cx.background_spawn(async move {
1623 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1624 state.backend.set_trusted(is_trusted);
1625 }
1626 })
1627 .detach();
1628 }
1629 }
1630 }
1631 }
1632
1633 fn on_buffer_store_event(
1634 &mut self,
1635 _: Entity<BufferStore>,
1636 event: &BufferStoreEvent,
1637 cx: &mut Context<Self>,
1638 ) {
1639 match event {
1640 BufferStoreEvent::BufferAdded(buffer) => {
1641 cx.subscribe(buffer, |this, buffer, event, cx| {
1642 if let BufferEvent::LanguageChanged(_) = event {
1643 let buffer_id = buffer.read(cx).remote_id();
1644 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1645 diff_state.update(cx, |diff_state, cx| {
1646 diff_state.buffer_language_changed(buffer, cx);
1647 });
1648 }
1649 }
1650 })
1651 .detach();
1652 }
1653 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1654 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1655 diffs.remove(buffer_id);
1656 }
1657 }
1658 BufferStoreEvent::BufferDropped(buffer_id) => {
1659 self.diffs.remove(buffer_id);
1660 for diffs in self.shared_diffs.values_mut() {
1661 diffs.remove(buffer_id);
1662 }
1663 }
1664 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1665 // Whenever a buffer's file path changes, it's possible that the
1666 // new path is actually a path that is being tracked by a git
1667 // repository. In that case, we'll want to update the buffer's
1668 // `BufferDiffState`, in case it already has one.
1669 let buffer_id = buffer.read(cx).remote_id();
1670 let diff_state = self.diffs.get(&buffer_id);
1671 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1672
1673 if let Some(diff_state) = diff_state
1674 && let Some((repo, repo_path)) = repo
1675 {
1676 let buffer = buffer.clone();
1677 let diff_state = diff_state.clone();
1678
1679 cx.spawn(async move |_git_store, cx| {
1680 async {
1681 let diff_bases_change = repo
1682 .update(cx, |repo, cx| {
1683 repo.load_committed_text(buffer_id, repo_path, cx)
1684 })
1685 .await?;
1686
1687 diff_state.update(cx, |diff_state, cx| {
1688 let buffer_snapshot = buffer.read(cx).text_snapshot();
1689 diff_state.diff_bases_changed(
1690 buffer_snapshot,
1691 Some(diff_bases_change),
1692 cx,
1693 );
1694 });
1695 anyhow::Ok(())
1696 }
1697 .await
1698 .log_err();
1699 })
1700 .detach();
1701 }
1702 }
1703 }
1704 }
1705
1706 pub fn recalculate_buffer_diffs(
1707 &mut self,
1708 buffers: Vec<Entity<Buffer>>,
1709 cx: &mut Context<Self>,
1710 ) -> impl Future<Output = ()> + use<> {
1711 let mut futures = Vec::new();
1712 for buffer in buffers {
1713 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1714 let buffer = buffer.read(cx).text_snapshot();
1715 diff_state.update(cx, |diff_state, cx| {
1716 diff_state.recalculate_diffs(buffer.clone(), cx);
1717 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1718 });
1719 futures.push(diff_state.update(cx, |diff_state, cx| {
1720 diff_state
1721 .reparse_conflict_markers(buffer, cx)
1722 .map(|_| {})
1723 .boxed()
1724 }));
1725 }
1726 }
1727 async move {
1728 futures::future::join_all(futures).await;
1729 }
1730 }
1731
1732 fn on_buffer_diff_event(
1733 &mut self,
1734 diff: Entity<buffer_diff::BufferDiff>,
1735 event: &BufferDiffEvent,
1736 cx: &mut Context<Self>,
1737 ) {
1738 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1739 let buffer_id = diff.read(cx).buffer_id;
1740 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1741 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1742 diff_state.hunk_staging_operation_count += 1;
1743 diff_state.hunk_staging_operation_count
1744 });
1745 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1746 let recv = repo.update(cx, |repo, cx| {
1747 log::debug!("hunks changed for {}", path.as_unix_str());
1748 repo.spawn_set_index_text_job(
1749 path,
1750 new_index_text.as_ref().map(|rope| rope.to_string()),
1751 Some(hunk_staging_operation_count),
1752 cx,
1753 )
1754 });
1755 let diff = diff.downgrade();
1756 cx.spawn(async move |this, cx| {
1757 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1758 diff.update(cx, |diff, cx| {
1759 diff.clear_pending_hunks(cx);
1760 })
1761 .ok();
1762 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1763 .ok();
1764 }
1765 })
1766 .detach();
1767 }
1768 }
1769 }
1770 }
1771
1772 fn local_worktree_git_repos_changed(
1773 &mut self,
1774 worktree: Entity<Worktree>,
1775 changed_repos: &UpdatedGitRepositoriesSet,
1776 cx: &mut Context<Self>,
1777 ) {
1778 log::debug!("local worktree repos changed");
1779 debug_assert!(worktree.read(cx).is_local());
1780
1781 for repository in self.repositories.values() {
1782 repository.update(cx, |repository, cx| {
1783 let repo_abs_path = &repository.work_directory_abs_path;
1784 if changed_repos.iter().any(|update| {
1785 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1786 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1787 }) {
1788 repository.reload_buffer_diff_bases(cx);
1789 }
1790 });
1791 }
1792 }
1793
1794 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1795 &self.repositories
1796 }
1797
1798 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1799 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1800 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1801 Some(status.status)
1802 }
1803
1804 pub fn repository_and_path_for_buffer_id(
1805 &self,
1806 buffer_id: BufferId,
1807 cx: &App,
1808 ) -> Option<(Entity<Repository>, RepoPath)> {
1809 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1810 let project_path = buffer.read(cx).project_path(cx)?;
1811 self.repository_and_path_for_project_path(&project_path, cx)
1812 }
1813
1814 pub fn repository_and_path_for_project_path(
1815 &self,
1816 path: &ProjectPath,
1817 cx: &App,
1818 ) -> Option<(Entity<Repository>, RepoPath)> {
1819 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1820 self.repositories
1821 .values()
1822 .filter_map(|repo| {
1823 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1824 Some((repo.clone(), repo_path))
1825 })
1826 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1827 }
1828
1829 pub fn git_init(
1830 &self,
1831 path: Arc<Path>,
1832 fallback_branch_name: String,
1833 cx: &App,
1834 ) -> Task<Result<()>> {
1835 match &self.state {
1836 GitStoreState::Local { fs, .. } => {
1837 let fs = fs.clone();
1838 cx.background_executor()
1839 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1840 }
1841 GitStoreState::Remote {
1842 upstream_client,
1843 upstream_project_id: project_id,
1844 ..
1845 } => {
1846 let client = upstream_client.clone();
1847 let project_id = *project_id;
1848 cx.background_executor().spawn(async move {
1849 client
1850 .request(proto::GitInit {
1851 project_id: project_id,
1852 abs_path: path.to_string_lossy().into_owned(),
1853 fallback_branch_name,
1854 })
1855 .await?;
1856 Ok(())
1857 })
1858 }
1859 }
1860 }
1861
1862 pub fn git_clone(
1863 &self,
1864 repo: String,
1865 path: impl Into<Arc<std::path::Path>>,
1866 cx: &App,
1867 ) -> Task<Result<()>> {
1868 let path = path.into();
1869 match &self.state {
1870 GitStoreState::Local { fs, .. } => {
1871 let fs = fs.clone();
1872 cx.background_executor()
1873 .spawn(async move { fs.git_clone(&repo, &path).await })
1874 }
1875 GitStoreState::Remote {
1876 upstream_client,
1877 upstream_project_id,
1878 ..
1879 } => {
1880 if upstream_client.is_via_collab() {
1881 return Task::ready(Err(anyhow!(
1882 "Git Clone isn't supported for project guests"
1883 )));
1884 }
1885 let request = upstream_client.request(proto::GitClone {
1886 project_id: *upstream_project_id,
1887 abs_path: path.to_string_lossy().into_owned(),
1888 remote_repo: repo,
1889 });
1890
1891 cx.background_spawn(async move {
1892 let result = request.await?;
1893
1894 match result.success {
1895 true => Ok(()),
1896 false => Err(anyhow!("Git Clone failed")),
1897 }
1898 })
1899 }
1900 }
1901 }
1902
1903 async fn handle_update_repository(
1904 this: Entity<Self>,
1905 envelope: TypedEnvelope<proto::UpdateRepository>,
1906 mut cx: AsyncApp,
1907 ) -> Result<()> {
1908 this.update(&mut cx, |this, cx| {
1909 let path_style = this.worktree_store.read(cx).path_style();
1910 let mut update = envelope.payload;
1911
1912 let id = RepositoryId::from_proto(update.id);
1913 let client = this.upstream_client().context("no upstream client")?;
1914
1915 let original_repo_abs_path: Option<Arc<Path>> = update
1916 .original_repo_abs_path
1917 .as_deref()
1918 .map(|p| Path::new(p).into());
1919
1920 let mut repo_subscription = None;
1921 let repo = this.repositories.entry(id).or_insert_with(|| {
1922 let git_store = cx.weak_entity();
1923 let repo = cx.new(|cx| {
1924 Repository::remote(
1925 id,
1926 Path::new(&update.abs_path).into(),
1927 original_repo_abs_path.clone(),
1928 path_style,
1929 ProjectId(update.project_id),
1930 client,
1931 git_store,
1932 cx,
1933 )
1934 });
1935 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1936 cx.emit(GitStoreEvent::RepositoryAdded);
1937 repo
1938 });
1939 this._subscriptions.extend(repo_subscription);
1940
1941 repo.update(cx, {
1942 let update = update.clone();
1943 |repo, cx| repo.apply_remote_update(update, cx)
1944 })?;
1945
1946 this.active_repo_id.get_or_insert_with(|| {
1947 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1948 id
1949 });
1950
1951 if let Some((client, project_id)) = this.downstream_client() {
1952 update.project_id = project_id.to_proto();
1953 client.send(update).log_err();
1954 }
1955 Ok(())
1956 })
1957 }
1958
1959 async fn handle_remove_repository(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::RemoveRepository>,
1962 mut cx: AsyncApp,
1963 ) -> Result<()> {
1964 this.update(&mut cx, |this, cx| {
1965 let mut update = envelope.payload;
1966 let id = RepositoryId::from_proto(update.id);
1967 this.repositories.remove(&id);
1968 if let Some((client, project_id)) = this.downstream_client() {
1969 update.project_id = project_id.to_proto();
1970 client.send(update).log_err();
1971 }
1972 if this.active_repo_id == Some(id) {
1973 this.active_repo_id = None;
1974 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1975 }
1976 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1977 });
1978 Ok(())
1979 }
1980
1981 async fn handle_git_init(
1982 this: Entity<Self>,
1983 envelope: TypedEnvelope<proto::GitInit>,
1984 cx: AsyncApp,
1985 ) -> Result<proto::Ack> {
1986 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1987 let name = envelope.payload.fallback_branch_name;
1988 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1989 .await?;
1990
1991 Ok(proto::Ack {})
1992 }
1993
1994 async fn handle_git_clone(
1995 this: Entity<Self>,
1996 envelope: TypedEnvelope<proto::GitClone>,
1997 cx: AsyncApp,
1998 ) -> Result<proto::GitCloneResponse> {
1999 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2000 let repo_name = envelope.payload.remote_repo;
2001 let result = cx
2002 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2003 .await;
2004
2005 Ok(proto::GitCloneResponse {
2006 success: result.is_ok(),
2007 })
2008 }
2009
2010 async fn handle_fetch(
2011 this: Entity<Self>,
2012 envelope: TypedEnvelope<proto::Fetch>,
2013 mut cx: AsyncApp,
2014 ) -> Result<proto::RemoteMessageResponse> {
2015 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2016 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2017 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2018 let askpass_id = envelope.payload.askpass_id;
2019
2020 let askpass = make_remote_delegate(
2021 this,
2022 envelope.payload.project_id,
2023 repository_id,
2024 askpass_id,
2025 &mut cx,
2026 );
2027
2028 let remote_output = repository_handle
2029 .update(&mut cx, |repository_handle, cx| {
2030 repository_handle.fetch(fetch_options, askpass, cx)
2031 })
2032 .await??;
2033
2034 Ok(proto::RemoteMessageResponse {
2035 stdout: remote_output.stdout,
2036 stderr: remote_output.stderr,
2037 })
2038 }
2039
2040 async fn handle_push(
2041 this: Entity<Self>,
2042 envelope: TypedEnvelope<proto::Push>,
2043 mut cx: AsyncApp,
2044 ) -> Result<proto::RemoteMessageResponse> {
2045 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2046 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2047
2048 let askpass_id = envelope.payload.askpass_id;
2049 let askpass = make_remote_delegate(
2050 this,
2051 envelope.payload.project_id,
2052 repository_id,
2053 askpass_id,
2054 &mut cx,
2055 );
2056
2057 let options = envelope
2058 .payload
2059 .options
2060 .as_ref()
2061 .map(|_| match envelope.payload.options() {
2062 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2063 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2064 });
2065
2066 let branch_name = envelope.payload.branch_name.into();
2067 let remote_branch_name = envelope.payload.remote_branch_name.into();
2068 let remote_name = envelope.payload.remote_name.into();
2069
2070 let remote_output = repository_handle
2071 .update(&mut cx, |repository_handle, cx| {
2072 repository_handle.push(
2073 branch_name,
2074 remote_branch_name,
2075 remote_name,
2076 options,
2077 askpass,
2078 cx,
2079 )
2080 })
2081 .await??;
2082 Ok(proto::RemoteMessageResponse {
2083 stdout: remote_output.stdout,
2084 stderr: remote_output.stderr,
2085 })
2086 }
2087
2088 async fn handle_pull(
2089 this: Entity<Self>,
2090 envelope: TypedEnvelope<proto::Pull>,
2091 mut cx: AsyncApp,
2092 ) -> Result<proto::RemoteMessageResponse> {
2093 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2094 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2095 let askpass_id = envelope.payload.askpass_id;
2096 let askpass = make_remote_delegate(
2097 this,
2098 envelope.payload.project_id,
2099 repository_id,
2100 askpass_id,
2101 &mut cx,
2102 );
2103
2104 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2105 let remote_name = envelope.payload.remote_name.into();
2106 let rebase = envelope.payload.rebase;
2107
2108 let remote_message = repository_handle
2109 .update(&mut cx, |repository_handle, cx| {
2110 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2111 })
2112 .await??;
2113
2114 Ok(proto::RemoteMessageResponse {
2115 stdout: remote_message.stdout,
2116 stderr: remote_message.stderr,
2117 })
2118 }
2119
2120 async fn handle_stage(
2121 this: Entity<Self>,
2122 envelope: TypedEnvelope<proto::Stage>,
2123 mut cx: AsyncApp,
2124 ) -> Result<proto::Ack> {
2125 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2126 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2127
2128 let entries = envelope
2129 .payload
2130 .paths
2131 .into_iter()
2132 .map(|path| RepoPath::new(&path))
2133 .collect::<Result<Vec<_>>>()?;
2134
2135 repository_handle
2136 .update(&mut cx, |repository_handle, cx| {
2137 repository_handle.stage_entries(entries, cx)
2138 })
2139 .await?;
2140 Ok(proto::Ack {})
2141 }
2142
2143 async fn handle_unstage(
2144 this: Entity<Self>,
2145 envelope: TypedEnvelope<proto::Unstage>,
2146 mut cx: AsyncApp,
2147 ) -> Result<proto::Ack> {
2148 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2149 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2150
2151 let entries = envelope
2152 .payload
2153 .paths
2154 .into_iter()
2155 .map(|path| RepoPath::new(&path))
2156 .collect::<Result<Vec<_>>>()?;
2157
2158 repository_handle
2159 .update(&mut cx, |repository_handle, cx| {
2160 repository_handle.unstage_entries(entries, cx)
2161 })
2162 .await?;
2163
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_stash(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::Stash>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::Ack> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174
2175 let entries = envelope
2176 .payload
2177 .paths
2178 .into_iter()
2179 .map(|path| RepoPath::new(&path))
2180 .collect::<Result<Vec<_>>>()?;
2181
2182 repository_handle
2183 .update(&mut cx, |repository_handle, cx| {
2184 repository_handle.stash_entries(entries, cx)
2185 })
2186 .await?;
2187
2188 Ok(proto::Ack {})
2189 }
2190
2191 async fn handle_stash_pop(
2192 this: Entity<Self>,
2193 envelope: TypedEnvelope<proto::StashPop>,
2194 mut cx: AsyncApp,
2195 ) -> Result<proto::Ack> {
2196 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2197 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2198 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2199
2200 repository_handle
2201 .update(&mut cx, |repository_handle, cx| {
2202 repository_handle.stash_pop(stash_index, cx)
2203 })
2204 .await?;
2205
2206 Ok(proto::Ack {})
2207 }
2208
2209 async fn handle_stash_apply(
2210 this: Entity<Self>,
2211 envelope: TypedEnvelope<proto::StashApply>,
2212 mut cx: AsyncApp,
2213 ) -> Result<proto::Ack> {
2214 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2215 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2216 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2217
2218 repository_handle
2219 .update(&mut cx, |repository_handle, cx| {
2220 repository_handle.stash_apply(stash_index, cx)
2221 })
2222 .await?;
2223
2224 Ok(proto::Ack {})
2225 }
2226
2227 async fn handle_stash_drop(
2228 this: Entity<Self>,
2229 envelope: TypedEnvelope<proto::StashDrop>,
2230 mut cx: AsyncApp,
2231 ) -> Result<proto::Ack> {
2232 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2233 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2234 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2235
2236 repository_handle
2237 .update(&mut cx, |repository_handle, cx| {
2238 repository_handle.stash_drop(stash_index, cx)
2239 })
2240 .await??;
2241
2242 Ok(proto::Ack {})
2243 }
2244
2245 async fn handle_set_index_text(
2246 this: Entity<Self>,
2247 envelope: TypedEnvelope<proto::SetIndexText>,
2248 mut cx: AsyncApp,
2249 ) -> Result<proto::Ack> {
2250 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2251 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2252 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2253
2254 repository_handle
2255 .update(&mut cx, |repository_handle, cx| {
2256 repository_handle.spawn_set_index_text_job(
2257 repo_path,
2258 envelope.payload.text,
2259 None,
2260 cx,
2261 )
2262 })
2263 .await??;
2264 Ok(proto::Ack {})
2265 }
2266
2267 async fn handle_run_hook(
2268 this: Entity<Self>,
2269 envelope: TypedEnvelope<proto::RunGitHook>,
2270 mut cx: AsyncApp,
2271 ) -> Result<proto::Ack> {
2272 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2273 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2274 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2275 repository_handle
2276 .update(&mut cx, |repository_handle, cx| {
2277 repository_handle.run_hook(hook, cx)
2278 })
2279 .await??;
2280 Ok(proto::Ack {})
2281 }
2282
2283 async fn handle_commit(
2284 this: Entity<Self>,
2285 envelope: TypedEnvelope<proto::Commit>,
2286 mut cx: AsyncApp,
2287 ) -> Result<proto::Ack> {
2288 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2289 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2290 let askpass_id = envelope.payload.askpass_id;
2291
2292 let askpass = make_remote_delegate(
2293 this,
2294 envelope.payload.project_id,
2295 repository_id,
2296 askpass_id,
2297 &mut cx,
2298 );
2299
2300 let message = SharedString::from(envelope.payload.message);
2301 let name = envelope.payload.name.map(SharedString::from);
2302 let email = envelope.payload.email.map(SharedString::from);
2303 let options = envelope.payload.options.unwrap_or_default();
2304
2305 repository_handle
2306 .update(&mut cx, |repository_handle, cx| {
2307 repository_handle.commit(
2308 message,
2309 name.zip(email),
2310 CommitOptions {
2311 amend: options.amend,
2312 signoff: options.signoff,
2313 },
2314 askpass,
2315 cx,
2316 )
2317 })
2318 .await??;
2319 Ok(proto::Ack {})
2320 }
2321
2322 async fn handle_get_remotes(
2323 this: Entity<Self>,
2324 envelope: TypedEnvelope<proto::GetRemotes>,
2325 mut cx: AsyncApp,
2326 ) -> Result<proto::GetRemotesResponse> {
2327 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2328 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2329
2330 let branch_name = envelope.payload.branch_name;
2331 let is_push = envelope.payload.is_push;
2332
2333 let remotes = repository_handle
2334 .update(&mut cx, |repository_handle, _| {
2335 repository_handle.get_remotes(branch_name, is_push)
2336 })
2337 .await??;
2338
2339 Ok(proto::GetRemotesResponse {
2340 remotes: remotes
2341 .into_iter()
2342 .map(|remotes| proto::get_remotes_response::Remote {
2343 name: remotes.name.to_string(),
2344 })
2345 .collect::<Vec<_>>(),
2346 })
2347 }
2348
2349 async fn handle_get_worktrees(
2350 this: Entity<Self>,
2351 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2352 mut cx: AsyncApp,
2353 ) -> Result<proto::GitWorktreesResponse> {
2354 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2355 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2356
2357 let worktrees = repository_handle
2358 .update(&mut cx, |repository_handle, _| {
2359 repository_handle.worktrees()
2360 })
2361 .await??;
2362
2363 Ok(proto::GitWorktreesResponse {
2364 worktrees: worktrees
2365 .into_iter()
2366 .map(|worktree| worktree_to_proto(&worktree))
2367 .collect::<Vec<_>>(),
2368 })
2369 }
2370
2371 async fn handle_create_worktree(
2372 this: Entity<Self>,
2373 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2374 mut cx: AsyncApp,
2375 ) -> Result<proto::Ack> {
2376 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2377 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2378 let directory = PathBuf::from(envelope.payload.directory);
2379 let name = envelope.payload.name;
2380 let commit = envelope.payload.commit;
2381
2382 repository_handle
2383 .update(&mut cx, |repository_handle, _| {
2384 repository_handle.create_worktree(name, directory, commit)
2385 })
2386 .await??;
2387
2388 Ok(proto::Ack {})
2389 }
2390
2391 async fn handle_remove_worktree(
2392 this: Entity<Self>,
2393 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2394 mut cx: AsyncApp,
2395 ) -> Result<proto::Ack> {
2396 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2397 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2398 let path = PathBuf::from(envelope.payload.path);
2399 let force = envelope.payload.force;
2400
2401 repository_handle
2402 .update(&mut cx, |repository_handle, _| {
2403 repository_handle.remove_worktree(path, force)
2404 })
2405 .await??;
2406
2407 Ok(proto::Ack {})
2408 }
2409
2410 async fn handle_rename_worktree(
2411 this: Entity<Self>,
2412 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2413 mut cx: AsyncApp,
2414 ) -> Result<proto::Ack> {
2415 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2416 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2417 let old_path = PathBuf::from(envelope.payload.old_path);
2418 let new_path = PathBuf::from(envelope.payload.new_path);
2419
2420 repository_handle
2421 .update(&mut cx, |repository_handle, _| {
2422 repository_handle.rename_worktree(old_path, new_path)
2423 })
2424 .await??;
2425
2426 Ok(proto::Ack {})
2427 }
2428
2429 async fn handle_get_branches(
2430 this: Entity<Self>,
2431 envelope: TypedEnvelope<proto::GitGetBranches>,
2432 mut cx: AsyncApp,
2433 ) -> Result<proto::GitBranchesResponse> {
2434 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2435 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2436
2437 let branches = repository_handle
2438 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2439 .await??;
2440
2441 Ok(proto::GitBranchesResponse {
2442 branches: branches
2443 .into_iter()
2444 .map(|branch| branch_to_proto(&branch))
2445 .collect::<Vec<_>>(),
2446 })
2447 }
2448 async fn handle_get_default_branch(
2449 this: Entity<Self>,
2450 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2451 mut cx: AsyncApp,
2452 ) -> Result<proto::GetDefaultBranchResponse> {
2453 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2454 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2455
2456 let branch = repository_handle
2457 .update(&mut cx, |repository_handle, _| {
2458 repository_handle.default_branch(false)
2459 })
2460 .await??
2461 .map(Into::into);
2462
2463 Ok(proto::GetDefaultBranchResponse { branch })
2464 }
2465 async fn handle_create_branch(
2466 this: Entity<Self>,
2467 envelope: TypedEnvelope<proto::GitCreateBranch>,
2468 mut cx: AsyncApp,
2469 ) -> Result<proto::Ack> {
2470 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2471 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2472 let branch_name = envelope.payload.branch_name;
2473
2474 repository_handle
2475 .update(&mut cx, |repository_handle, _| {
2476 repository_handle.create_branch(branch_name, None)
2477 })
2478 .await??;
2479
2480 Ok(proto::Ack {})
2481 }
2482
2483 async fn handle_change_branch(
2484 this: Entity<Self>,
2485 envelope: TypedEnvelope<proto::GitChangeBranch>,
2486 mut cx: AsyncApp,
2487 ) -> Result<proto::Ack> {
2488 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2489 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2490 let branch_name = envelope.payload.branch_name;
2491
2492 repository_handle
2493 .update(&mut cx, |repository_handle, _| {
2494 repository_handle.change_branch(branch_name)
2495 })
2496 .await??;
2497
2498 Ok(proto::Ack {})
2499 }
2500
2501 async fn handle_rename_branch(
2502 this: Entity<Self>,
2503 envelope: TypedEnvelope<proto::GitRenameBranch>,
2504 mut cx: AsyncApp,
2505 ) -> Result<proto::Ack> {
2506 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2507 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2508 let branch = envelope.payload.branch;
2509 let new_name = envelope.payload.new_name;
2510
2511 repository_handle
2512 .update(&mut cx, |repository_handle, _| {
2513 repository_handle.rename_branch(branch, new_name)
2514 })
2515 .await??;
2516
2517 Ok(proto::Ack {})
2518 }
2519
2520 async fn handle_create_remote(
2521 this: Entity<Self>,
2522 envelope: TypedEnvelope<proto::GitCreateRemote>,
2523 mut cx: AsyncApp,
2524 ) -> Result<proto::Ack> {
2525 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2526 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2527 let remote_name = envelope.payload.remote_name;
2528 let remote_url = envelope.payload.remote_url;
2529
2530 repository_handle
2531 .update(&mut cx, |repository_handle, _| {
2532 repository_handle.create_remote(remote_name, remote_url)
2533 })
2534 .await??;
2535
2536 Ok(proto::Ack {})
2537 }
2538
2539 async fn handle_delete_branch(
2540 this: Entity<Self>,
2541 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2542 mut cx: AsyncApp,
2543 ) -> Result<proto::Ack> {
2544 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2545 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2546 let branch_name = envelope.payload.branch_name;
2547
2548 repository_handle
2549 .update(&mut cx, |repository_handle, _| {
2550 repository_handle.delete_branch(branch_name)
2551 })
2552 .await??;
2553
2554 Ok(proto::Ack {})
2555 }
2556
2557 async fn handle_remove_remote(
2558 this: Entity<Self>,
2559 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2560 mut cx: AsyncApp,
2561 ) -> Result<proto::Ack> {
2562 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2563 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2564 let remote_name = envelope.payload.remote_name;
2565
2566 repository_handle
2567 .update(&mut cx, |repository_handle, _| {
2568 repository_handle.remove_remote(remote_name)
2569 })
2570 .await??;
2571
2572 Ok(proto::Ack {})
2573 }
2574
2575 async fn handle_show(
2576 this: Entity<Self>,
2577 envelope: TypedEnvelope<proto::GitShow>,
2578 mut cx: AsyncApp,
2579 ) -> Result<proto::GitCommitDetails> {
2580 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2581 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2582
2583 let commit = repository_handle
2584 .update(&mut cx, |repository_handle, _| {
2585 repository_handle.show(envelope.payload.commit)
2586 })
2587 .await??;
2588 Ok(proto::GitCommitDetails {
2589 sha: commit.sha.into(),
2590 message: commit.message.into(),
2591 commit_timestamp: commit.commit_timestamp,
2592 author_email: commit.author_email.into(),
2593 author_name: commit.author_name.into(),
2594 })
2595 }
2596
2597 async fn handle_load_commit_diff(
2598 this: Entity<Self>,
2599 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2600 mut cx: AsyncApp,
2601 ) -> Result<proto::LoadCommitDiffResponse> {
2602 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2603 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2604
2605 let commit_diff = repository_handle
2606 .update(&mut cx, |repository_handle, _| {
2607 repository_handle.load_commit_diff(envelope.payload.commit)
2608 })
2609 .await??;
2610 Ok(proto::LoadCommitDiffResponse {
2611 files: commit_diff
2612 .files
2613 .into_iter()
2614 .map(|file| proto::CommitFile {
2615 path: file.path.to_proto(),
2616 old_text: file.old_text,
2617 new_text: file.new_text,
2618 is_binary: file.is_binary,
2619 })
2620 .collect(),
2621 })
2622 }
2623
2624 async fn handle_file_history(
2625 this: Entity<Self>,
2626 envelope: TypedEnvelope<proto::GitFileHistory>,
2627 mut cx: AsyncApp,
2628 ) -> Result<proto::GitFileHistoryResponse> {
2629 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2630 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2631 let path = RepoPath::from_proto(&envelope.payload.path)?;
2632 let skip = envelope.payload.skip as usize;
2633 let limit = envelope.payload.limit.map(|l| l as usize);
2634
2635 let file_history = repository_handle
2636 .update(&mut cx, |repository_handle, _| {
2637 repository_handle.file_history_paginated(path, skip, limit)
2638 })
2639 .await??;
2640
2641 Ok(proto::GitFileHistoryResponse {
2642 entries: file_history
2643 .entries
2644 .into_iter()
2645 .map(|entry| proto::FileHistoryEntry {
2646 sha: entry.sha.to_string(),
2647 subject: entry.subject.to_string(),
2648 message: entry.message.to_string(),
2649 commit_timestamp: entry.commit_timestamp,
2650 author_name: entry.author_name.to_string(),
2651 author_email: entry.author_email.to_string(),
2652 })
2653 .collect(),
2654 path: file_history.path.to_proto(),
2655 })
2656 }
2657
2658 async fn handle_reset(
2659 this: Entity<Self>,
2660 envelope: TypedEnvelope<proto::GitReset>,
2661 mut cx: AsyncApp,
2662 ) -> Result<proto::Ack> {
2663 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2664 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2665
2666 let mode = match envelope.payload.mode() {
2667 git_reset::ResetMode::Soft => ResetMode::Soft,
2668 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2669 };
2670
2671 repository_handle
2672 .update(&mut cx, |repository_handle, cx| {
2673 repository_handle.reset(envelope.payload.commit, mode, cx)
2674 })
2675 .await??;
2676 Ok(proto::Ack {})
2677 }
2678
2679 async fn handle_checkout_files(
2680 this: Entity<Self>,
2681 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2682 mut cx: AsyncApp,
2683 ) -> Result<proto::Ack> {
2684 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2685 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2686 let paths = envelope
2687 .payload
2688 .paths
2689 .iter()
2690 .map(|s| RepoPath::from_proto(s))
2691 .collect::<Result<Vec<_>>>()?;
2692
2693 repository_handle
2694 .update(&mut cx, |repository_handle, cx| {
2695 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2696 })
2697 .await?;
2698 Ok(proto::Ack {})
2699 }
2700
2701 async fn handle_open_commit_message_buffer(
2702 this: Entity<Self>,
2703 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2704 mut cx: AsyncApp,
2705 ) -> Result<proto::OpenBufferResponse> {
2706 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2707 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2708 let buffer = repository
2709 .update(&mut cx, |repository, cx| {
2710 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2711 })
2712 .await?;
2713
2714 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2715 this.update(&mut cx, |this, cx| {
2716 this.buffer_store.update(cx, |buffer_store, cx| {
2717 buffer_store
2718 .create_buffer_for_peer(
2719 &buffer,
2720 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2721 cx,
2722 )
2723 .detach_and_log_err(cx);
2724 })
2725 });
2726
2727 Ok(proto::OpenBufferResponse {
2728 buffer_id: buffer_id.to_proto(),
2729 })
2730 }
2731
2732 async fn handle_askpass(
2733 this: Entity<Self>,
2734 envelope: TypedEnvelope<proto::AskPassRequest>,
2735 mut cx: AsyncApp,
2736 ) -> Result<proto::AskPassResponse> {
2737 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2738 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2739
2740 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2741 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2742 debug_panic!("no askpass found");
2743 anyhow::bail!("no askpass found");
2744 };
2745
2746 let response = askpass
2747 .ask_password(envelope.payload.prompt)
2748 .await
2749 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2750
2751 delegates
2752 .lock()
2753 .insert(envelope.payload.askpass_id, askpass);
2754
2755 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2756 Ok(proto::AskPassResponse {
2757 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2758 })
2759 }
2760
2761 async fn handle_check_for_pushed_commits(
2762 this: Entity<Self>,
2763 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2764 mut cx: AsyncApp,
2765 ) -> Result<proto::CheckForPushedCommitsResponse> {
2766 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2767 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2768
2769 let branches = repository_handle
2770 .update(&mut cx, |repository_handle, _| {
2771 repository_handle.check_for_pushed_commits()
2772 })
2773 .await??;
2774 Ok(proto::CheckForPushedCommitsResponse {
2775 pushed_to: branches
2776 .into_iter()
2777 .map(|commit| commit.to_string())
2778 .collect(),
2779 })
2780 }
2781
2782 async fn handle_git_diff(
2783 this: Entity<Self>,
2784 envelope: TypedEnvelope<proto::GitDiff>,
2785 mut cx: AsyncApp,
2786 ) -> Result<proto::GitDiffResponse> {
2787 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2788 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2789 let diff_type = match envelope.payload.diff_type() {
2790 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2791 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2792 proto::git_diff::DiffType::MergeBase => {
2793 let base_ref = envelope
2794 .payload
2795 .merge_base_ref
2796 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2797 DiffType::MergeBase {
2798 base_ref: base_ref.into(),
2799 }
2800 }
2801 };
2802
2803 let mut diff = repository_handle
2804 .update(&mut cx, |repository_handle, cx| {
2805 repository_handle.diff(diff_type, cx)
2806 })
2807 .await??;
2808 const ONE_MB: usize = 1_000_000;
2809 if diff.len() > ONE_MB {
2810 diff = diff.chars().take(ONE_MB).collect()
2811 }
2812
2813 Ok(proto::GitDiffResponse { diff })
2814 }
2815
2816 async fn handle_tree_diff(
2817 this: Entity<Self>,
2818 request: TypedEnvelope<proto::GetTreeDiff>,
2819 mut cx: AsyncApp,
2820 ) -> Result<proto::GetTreeDiffResponse> {
2821 let repository_id = RepositoryId(request.payload.repository_id);
2822 let diff_type = if request.payload.is_merge {
2823 DiffTreeType::MergeBase {
2824 base: request.payload.base.into(),
2825 head: request.payload.head.into(),
2826 }
2827 } else {
2828 DiffTreeType::Since {
2829 base: request.payload.base.into(),
2830 head: request.payload.head.into(),
2831 }
2832 };
2833
2834 let diff = this
2835 .update(&mut cx, |this, cx| {
2836 let repository = this.repositories().get(&repository_id)?;
2837 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2838 })
2839 .context("missing repository")?
2840 .await??;
2841
2842 Ok(proto::GetTreeDiffResponse {
2843 entries: diff
2844 .entries
2845 .into_iter()
2846 .map(|(path, status)| proto::TreeDiffStatus {
2847 path: path.as_ref().to_proto(),
2848 status: match status {
2849 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2850 TreeDiffStatus::Modified { .. } => {
2851 proto::tree_diff_status::Status::Modified.into()
2852 }
2853 TreeDiffStatus::Deleted { .. } => {
2854 proto::tree_diff_status::Status::Deleted.into()
2855 }
2856 },
2857 oid: match status {
2858 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2859 Some(old.to_string())
2860 }
2861 TreeDiffStatus::Added => None,
2862 },
2863 })
2864 .collect(),
2865 })
2866 }
2867
2868 async fn handle_get_blob_content(
2869 this: Entity<Self>,
2870 request: TypedEnvelope<proto::GetBlobContent>,
2871 mut cx: AsyncApp,
2872 ) -> Result<proto::GetBlobContentResponse> {
2873 let oid = git::Oid::from_str(&request.payload.oid)?;
2874 let repository_id = RepositoryId(request.payload.repository_id);
2875 let content = this
2876 .update(&mut cx, |this, cx| {
2877 let repository = this.repositories().get(&repository_id)?;
2878 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2879 })
2880 .context("missing repository")?
2881 .await?;
2882 Ok(proto::GetBlobContentResponse { content })
2883 }
2884
2885 async fn handle_open_unstaged_diff(
2886 this: Entity<Self>,
2887 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2888 mut cx: AsyncApp,
2889 ) -> Result<proto::OpenUnstagedDiffResponse> {
2890 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2891 let diff = this
2892 .update(&mut cx, |this, cx| {
2893 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2894 Some(this.open_unstaged_diff(buffer, cx))
2895 })
2896 .context("missing buffer")?
2897 .await?;
2898 this.update(&mut cx, |this, _| {
2899 let shared_diffs = this
2900 .shared_diffs
2901 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2902 .or_default();
2903 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2904 });
2905 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2906 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2907 }
2908
2909 async fn handle_open_uncommitted_diff(
2910 this: Entity<Self>,
2911 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2912 mut cx: AsyncApp,
2913 ) -> Result<proto::OpenUncommittedDiffResponse> {
2914 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2915 let diff = this
2916 .update(&mut cx, |this, cx| {
2917 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2918 Some(this.open_uncommitted_diff(buffer, cx))
2919 })
2920 .context("missing buffer")?
2921 .await?;
2922 this.update(&mut cx, |this, _| {
2923 let shared_diffs = this
2924 .shared_diffs
2925 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2926 .or_default();
2927 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2928 });
2929 Ok(diff.read_with(&cx, |diff, cx| {
2930 use proto::open_uncommitted_diff_response::Mode;
2931
2932 let unstaged_diff = diff.secondary_diff();
2933 let index_snapshot = unstaged_diff.and_then(|diff| {
2934 let diff = diff.read(cx);
2935 diff.base_text_exists().then(|| diff.base_text(cx))
2936 });
2937
2938 let mode;
2939 let staged_text;
2940 let committed_text;
2941 if diff.base_text_exists() {
2942 let committed_snapshot = diff.base_text(cx);
2943 committed_text = Some(committed_snapshot.text());
2944 if let Some(index_text) = index_snapshot {
2945 if index_text.remote_id() == committed_snapshot.remote_id() {
2946 mode = Mode::IndexMatchesHead;
2947 staged_text = None;
2948 } else {
2949 mode = Mode::IndexAndHead;
2950 staged_text = Some(index_text.text());
2951 }
2952 } else {
2953 mode = Mode::IndexAndHead;
2954 staged_text = None;
2955 }
2956 } else {
2957 mode = Mode::IndexAndHead;
2958 committed_text = None;
2959 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2960 }
2961
2962 proto::OpenUncommittedDiffResponse {
2963 committed_text,
2964 staged_text,
2965 mode: mode.into(),
2966 }
2967 }))
2968 }
2969
2970 async fn handle_update_diff_bases(
2971 this: Entity<Self>,
2972 request: TypedEnvelope<proto::UpdateDiffBases>,
2973 mut cx: AsyncApp,
2974 ) -> Result<()> {
2975 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2976 this.update(&mut cx, |this, cx| {
2977 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2978 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2979 {
2980 let buffer = buffer.read(cx).text_snapshot();
2981 diff_state.update(cx, |diff_state, cx| {
2982 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2983 })
2984 }
2985 });
2986 Ok(())
2987 }
2988
2989 async fn handle_blame_buffer(
2990 this: Entity<Self>,
2991 envelope: TypedEnvelope<proto::BlameBuffer>,
2992 mut cx: AsyncApp,
2993 ) -> Result<proto::BlameBufferResponse> {
2994 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2995 let version = deserialize_version(&envelope.payload.version);
2996 let buffer = this.read_with(&cx, |this, cx| {
2997 this.buffer_store.read(cx).get_existing(buffer_id)
2998 })?;
2999 buffer
3000 .update(&mut cx, |buffer, _| {
3001 buffer.wait_for_version(version.clone())
3002 })
3003 .await?;
3004 let blame = this
3005 .update(&mut cx, |this, cx| {
3006 this.blame_buffer(&buffer, Some(version), cx)
3007 })
3008 .await?;
3009 Ok(serialize_blame_buffer_response(blame))
3010 }
3011
3012 async fn handle_get_permalink_to_line(
3013 this: Entity<Self>,
3014 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3015 mut cx: AsyncApp,
3016 ) -> Result<proto::GetPermalinkToLineResponse> {
3017 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3018 // let version = deserialize_version(&envelope.payload.version);
3019 let selection = {
3020 let proto_selection = envelope
3021 .payload
3022 .selection
3023 .context("no selection to get permalink for defined")?;
3024 proto_selection.start as u32..proto_selection.end as u32
3025 };
3026 let buffer = this.read_with(&cx, |this, cx| {
3027 this.buffer_store.read(cx).get_existing(buffer_id)
3028 })?;
3029 let permalink = this
3030 .update(&mut cx, |this, cx| {
3031 this.get_permalink_to_line(&buffer, selection, cx)
3032 })
3033 .await?;
3034 Ok(proto::GetPermalinkToLineResponse {
3035 permalink: permalink.to_string(),
3036 })
3037 }
3038
3039 fn repository_for_request(
3040 this: &Entity<Self>,
3041 id: RepositoryId,
3042 cx: &mut AsyncApp,
3043 ) -> Result<Entity<Repository>> {
3044 this.read_with(cx, |this, _| {
3045 this.repositories
3046 .get(&id)
3047 .context("missing repository handle")
3048 .cloned()
3049 })
3050 }
3051
3052 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3053 self.repositories
3054 .iter()
3055 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3056 .collect()
3057 }
3058
3059 fn process_updated_entries(
3060 &self,
3061 worktree: &Entity<Worktree>,
3062 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3063 cx: &mut App,
3064 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3065 let path_style = worktree.read(cx).path_style();
3066 let mut repo_paths = self
3067 .repositories
3068 .values()
3069 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3070 .collect::<Vec<_>>();
3071 let mut entries: Vec<_> = updated_entries
3072 .iter()
3073 .map(|(path, _, _)| path.clone())
3074 .collect();
3075 entries.sort();
3076 let worktree = worktree.read(cx);
3077
3078 let entries = entries
3079 .into_iter()
3080 .map(|path| worktree.absolutize(&path))
3081 .collect::<Arc<[_]>>();
3082
3083 let executor = cx.background_executor().clone();
3084 cx.background_executor().spawn(async move {
3085 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3086 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3087 let mut tasks = FuturesOrdered::new();
3088 for (repo_path, repo) in repo_paths.into_iter().rev() {
3089 let entries = entries.clone();
3090 let task = executor.spawn(async move {
3091 // Find all repository paths that belong to this repo
3092 let mut ix = entries.partition_point(|path| path < &*repo_path);
3093 if ix == entries.len() {
3094 return None;
3095 };
3096
3097 let mut paths = Vec::new();
3098 // All paths prefixed by a given repo will constitute a continuous range.
3099 while let Some(path) = entries.get(ix)
3100 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3101 &repo_path, path, path_style,
3102 )
3103 {
3104 paths.push((repo_path, ix));
3105 ix += 1;
3106 }
3107 if paths.is_empty() {
3108 None
3109 } else {
3110 Some((repo, paths))
3111 }
3112 });
3113 tasks.push_back(task);
3114 }
3115
3116 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3117 let mut path_was_used = vec![false; entries.len()];
3118 let tasks = tasks.collect::<Vec<_>>().await;
3119 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3120 // We always want to assign a path to it's innermost repository.
3121 for t in tasks {
3122 let Some((repo, paths)) = t else {
3123 continue;
3124 };
3125 let entry = paths_by_git_repo.entry(repo).or_default();
3126 for (repo_path, ix) in paths {
3127 if path_was_used[ix] {
3128 continue;
3129 }
3130 path_was_used[ix] = true;
3131 entry.push(repo_path);
3132 }
3133 }
3134
3135 paths_by_git_repo
3136 })
3137 }
3138}
3139
3140impl BufferGitState {
3141 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3142 Self {
3143 unstaged_diff: Default::default(),
3144 uncommitted_diff: Default::default(),
3145 oid_diffs: Default::default(),
3146 recalculate_diff_task: Default::default(),
3147 language: Default::default(),
3148 language_registry: Default::default(),
3149 recalculating_tx: postage::watch::channel_with(false).0,
3150 hunk_staging_operation_count: 0,
3151 hunk_staging_operation_count_as_of_write: 0,
3152 head_text: Default::default(),
3153 index_text: Default::default(),
3154 oid_texts: Default::default(),
3155 head_changed: Default::default(),
3156 index_changed: Default::default(),
3157 language_changed: Default::default(),
3158 conflict_updated_futures: Default::default(),
3159 conflict_set: Default::default(),
3160 reparse_conflict_markers_task: Default::default(),
3161 }
3162 }
3163
3164 #[ztracing::instrument(skip_all)]
3165 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3166 self.language = buffer.read(cx).language().cloned();
3167 self.language_changed = true;
3168 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3169 }
3170
3171 fn reparse_conflict_markers(
3172 &mut self,
3173 buffer: text::BufferSnapshot,
3174 cx: &mut Context<Self>,
3175 ) -> oneshot::Receiver<()> {
3176 let (tx, rx) = oneshot::channel();
3177
3178 let Some(conflict_set) = self
3179 .conflict_set
3180 .as_ref()
3181 .and_then(|conflict_set| conflict_set.upgrade())
3182 else {
3183 return rx;
3184 };
3185
3186 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3187 if conflict_set.has_conflict {
3188 Some(conflict_set.snapshot())
3189 } else {
3190 None
3191 }
3192 });
3193
3194 if let Some(old_snapshot) = old_snapshot {
3195 self.conflict_updated_futures.push(tx);
3196 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3197 let (snapshot, changed_range) = cx
3198 .background_spawn(async move {
3199 let new_snapshot = ConflictSet::parse(&buffer);
3200 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3201 (new_snapshot, changed_range)
3202 })
3203 .await;
3204 this.update(cx, |this, cx| {
3205 if let Some(conflict_set) = &this.conflict_set {
3206 conflict_set
3207 .update(cx, |conflict_set, cx| {
3208 conflict_set.set_snapshot(snapshot, changed_range, cx);
3209 })
3210 .ok();
3211 }
3212 let futures = std::mem::take(&mut this.conflict_updated_futures);
3213 for tx in futures {
3214 tx.send(()).ok();
3215 }
3216 })
3217 }))
3218 }
3219
3220 rx
3221 }
3222
3223 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3224 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3225 }
3226
3227 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3228 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3229 }
3230
3231 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3232 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3233 }
3234
3235 fn handle_base_texts_updated(
3236 &mut self,
3237 buffer: text::BufferSnapshot,
3238 message: proto::UpdateDiffBases,
3239 cx: &mut Context<Self>,
3240 ) {
3241 use proto::update_diff_bases::Mode;
3242
3243 let Some(mode) = Mode::from_i32(message.mode) else {
3244 return;
3245 };
3246
3247 let diff_bases_change = match mode {
3248 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3249 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3250 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3251 Mode::IndexAndHead => DiffBasesChange::SetEach {
3252 index: message.staged_text,
3253 head: message.committed_text,
3254 },
3255 };
3256
3257 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3258 }
3259
3260 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3261 if *self.recalculating_tx.borrow() {
3262 let mut rx = self.recalculating_tx.subscribe();
3263 Some(async move {
3264 loop {
3265 let is_recalculating = rx.recv().await;
3266 if is_recalculating != Some(true) {
3267 break;
3268 }
3269 }
3270 })
3271 } else {
3272 None
3273 }
3274 }
3275
3276 fn diff_bases_changed(
3277 &mut self,
3278 buffer: text::BufferSnapshot,
3279 diff_bases_change: Option<DiffBasesChange>,
3280 cx: &mut Context<Self>,
3281 ) {
3282 match diff_bases_change {
3283 Some(DiffBasesChange::SetIndex(index)) => {
3284 self.index_text = index.map(|mut index| {
3285 text::LineEnding::normalize(&mut index);
3286 Arc::from(index.as_str())
3287 });
3288 self.index_changed = true;
3289 }
3290 Some(DiffBasesChange::SetHead(head)) => {
3291 self.head_text = head.map(|mut head| {
3292 text::LineEnding::normalize(&mut head);
3293 Arc::from(head.as_str())
3294 });
3295 self.head_changed = true;
3296 }
3297 Some(DiffBasesChange::SetBoth(text)) => {
3298 let text = text.map(|mut text| {
3299 text::LineEnding::normalize(&mut text);
3300 Arc::from(text.as_str())
3301 });
3302 self.head_text = text.clone();
3303 self.index_text = text;
3304 self.head_changed = true;
3305 self.index_changed = true;
3306 }
3307 Some(DiffBasesChange::SetEach { index, head }) => {
3308 self.index_text = index.map(|mut index| {
3309 text::LineEnding::normalize(&mut index);
3310 Arc::from(index.as_str())
3311 });
3312 self.index_changed = true;
3313 self.head_text = head.map(|mut head| {
3314 text::LineEnding::normalize(&mut head);
3315 Arc::from(head.as_str())
3316 });
3317 self.head_changed = true;
3318 }
3319 None => {}
3320 }
3321
3322 self.recalculate_diffs(buffer, cx)
3323 }
3324
3325 #[ztracing::instrument(skip_all)]
3326 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3327 *self.recalculating_tx.borrow_mut() = true;
3328
3329 let language = self.language.clone();
3330 let language_registry = self.language_registry.clone();
3331 let unstaged_diff = self.unstaged_diff();
3332 let uncommitted_diff = self.uncommitted_diff();
3333 let head = self.head_text.clone();
3334 let index = self.index_text.clone();
3335 let index_changed = self.index_changed;
3336 let head_changed = self.head_changed;
3337 let language_changed = self.language_changed;
3338 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3339 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3340 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3341 (None, None) => true,
3342 _ => false,
3343 };
3344
3345 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3346 .oid_diffs
3347 .iter()
3348 .filter_map(|(oid, weak)| {
3349 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3350 weak.upgrade().map(|diff| (*oid, diff, base_text))
3351 })
3352 .collect();
3353
3354 self.oid_diffs.retain(|oid, weak| {
3355 let alive = weak.upgrade().is_some();
3356 if !alive {
3357 if let Some(oid) = oid {
3358 self.oid_texts.remove(oid);
3359 }
3360 }
3361 alive
3362 });
3363 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3364 log::debug!(
3365 "start recalculating diffs for buffer {}",
3366 buffer.remote_id()
3367 );
3368
3369 let mut new_unstaged_diff = None;
3370 if let Some(unstaged_diff) = &unstaged_diff {
3371 new_unstaged_diff = Some(
3372 cx.update(|cx| {
3373 unstaged_diff.read(cx).update_diff(
3374 buffer.clone(),
3375 index,
3376 index_changed.then_some(false),
3377 language.clone(),
3378 cx,
3379 )
3380 })
3381 .await,
3382 );
3383 }
3384
3385 // Dropping BufferDiff can be expensive, so yield back to the event loop
3386 // for a bit
3387 yield_now().await;
3388
3389 let mut new_uncommitted_diff = None;
3390 if let Some(uncommitted_diff) = &uncommitted_diff {
3391 new_uncommitted_diff = if index_matches_head {
3392 new_unstaged_diff.clone()
3393 } else {
3394 Some(
3395 cx.update(|cx| {
3396 uncommitted_diff.read(cx).update_diff(
3397 buffer.clone(),
3398 head,
3399 head_changed.then_some(true),
3400 language.clone(),
3401 cx,
3402 )
3403 })
3404 .await,
3405 )
3406 }
3407 }
3408
3409 // Dropping BufferDiff can be expensive, so yield back to the event loop
3410 // for a bit
3411 yield_now().await;
3412
3413 let cancel = this.update(cx, |this, _| {
3414 // This checks whether all pending stage/unstage operations
3415 // have quiesced (i.e. both the corresponding write and the
3416 // read of that write have completed). If not, then we cancel
3417 // this recalculation attempt to avoid invalidating pending
3418 // state too quickly; another recalculation will come along
3419 // later and clear the pending state once the state of the index has settled.
3420 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3421 *this.recalculating_tx.borrow_mut() = false;
3422 true
3423 } else {
3424 false
3425 }
3426 })?;
3427 if cancel {
3428 log::debug!(
3429 concat!(
3430 "aborting recalculating diffs for buffer {}",
3431 "due to subsequent hunk operations",
3432 ),
3433 buffer.remote_id()
3434 );
3435 return Ok(());
3436 }
3437
3438 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3439 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3440 {
3441 let task = unstaged_diff.update(cx, |diff, cx| {
3442 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3443 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3444 // view multibuffers.
3445 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3446 });
3447 Some(task.await)
3448 } else {
3449 None
3450 };
3451
3452 yield_now().await;
3453
3454 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3455 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3456 {
3457 uncommitted_diff
3458 .update(cx, |diff, cx| {
3459 if language_changed {
3460 diff.language_changed(language.clone(), language_registry, cx);
3461 }
3462 diff.set_snapshot_with_secondary(
3463 new_uncommitted_diff,
3464 &buffer,
3465 unstaged_changed_range.flatten(),
3466 true,
3467 cx,
3468 )
3469 })
3470 .await;
3471 }
3472
3473 yield_now().await;
3474
3475 for (oid, oid_diff, base_text) in oid_diffs {
3476 let new_oid_diff = cx
3477 .update(|cx| {
3478 oid_diff.read(cx).update_diff(
3479 buffer.clone(),
3480 base_text,
3481 None,
3482 language.clone(),
3483 cx,
3484 )
3485 })
3486 .await;
3487
3488 oid_diff
3489 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3490 .await;
3491
3492 log::debug!(
3493 "finished recalculating oid diff for buffer {} oid {:?}",
3494 buffer.remote_id(),
3495 oid
3496 );
3497
3498 yield_now().await;
3499 }
3500
3501 log::debug!(
3502 "finished recalculating diffs for buffer {}",
3503 buffer.remote_id()
3504 );
3505
3506 if let Some(this) = this.upgrade() {
3507 this.update(cx, |this, _| {
3508 this.index_changed = false;
3509 this.head_changed = false;
3510 this.language_changed = false;
3511 *this.recalculating_tx.borrow_mut() = false;
3512 });
3513 }
3514
3515 Ok(())
3516 }));
3517 }
3518}
3519
3520fn make_remote_delegate(
3521 this: Entity<GitStore>,
3522 project_id: u64,
3523 repository_id: RepositoryId,
3524 askpass_id: u64,
3525 cx: &mut AsyncApp,
3526) -> AskPassDelegate {
3527 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3528 this.update(cx, |this, cx| {
3529 let Some((client, _)) = this.downstream_client() else {
3530 return;
3531 };
3532 let response = client.request(proto::AskPassRequest {
3533 project_id,
3534 repository_id: repository_id.to_proto(),
3535 askpass_id,
3536 prompt,
3537 });
3538 cx.spawn(async move |_, _| {
3539 let mut response = response.await?.response;
3540 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3541 .ok();
3542 response.zeroize();
3543 anyhow::Ok(())
3544 })
3545 .detach_and_log_err(cx);
3546 });
3547 })
3548}
3549
3550impl RepositoryId {
3551 pub fn to_proto(self) -> u64 {
3552 self.0
3553 }
3554
3555 pub fn from_proto(id: u64) -> Self {
3556 RepositoryId(id)
3557 }
3558}
3559
3560impl RepositorySnapshot {
3561 fn empty(
3562 id: RepositoryId,
3563 work_directory_abs_path: Arc<Path>,
3564 original_repo_abs_path: Option<Arc<Path>>,
3565 path_style: PathStyle,
3566 ) -> Self {
3567 Self {
3568 id,
3569 statuses_by_path: Default::default(),
3570 original_repo_abs_path: original_repo_abs_path
3571 .unwrap_or_else(|| work_directory_abs_path.clone()),
3572 work_directory_abs_path,
3573 branch: None,
3574 head_commit: None,
3575 scan_id: 0,
3576 merge: Default::default(),
3577 remote_origin_url: None,
3578 remote_upstream_url: None,
3579 stash_entries: Default::default(),
3580 linked_worktrees: Arc::from([]),
3581 path_style,
3582 }
3583 }
3584
3585 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3586 proto::UpdateRepository {
3587 branch_summary: self.branch.as_ref().map(branch_to_proto),
3588 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3589 updated_statuses: self
3590 .statuses_by_path
3591 .iter()
3592 .map(|entry| entry.to_proto())
3593 .collect(),
3594 removed_statuses: Default::default(),
3595 current_merge_conflicts: self
3596 .merge
3597 .merge_heads_by_conflicted_path
3598 .iter()
3599 .map(|(repo_path, _)| repo_path.to_proto())
3600 .collect(),
3601 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3602 project_id,
3603 id: self.id.to_proto(),
3604 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3605 entry_ids: vec![self.id.to_proto()],
3606 scan_id: self.scan_id,
3607 is_last_update: true,
3608 stash_entries: self
3609 .stash_entries
3610 .entries
3611 .iter()
3612 .map(stash_to_proto)
3613 .collect(),
3614 remote_upstream_url: self.remote_upstream_url.clone(),
3615 remote_origin_url: self.remote_origin_url.clone(),
3616 original_repo_abs_path: Some(
3617 self.original_repo_abs_path.to_string_lossy().into_owned(),
3618 ),
3619 linked_worktrees: self
3620 .linked_worktrees
3621 .iter()
3622 .map(worktree_to_proto)
3623 .collect(),
3624 }
3625 }
3626
3627 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3628 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3629 let mut removed_statuses: Vec<String> = Vec::new();
3630
3631 let mut new_statuses = self.statuses_by_path.iter().peekable();
3632 let mut old_statuses = old.statuses_by_path.iter().peekable();
3633
3634 let mut current_new_entry = new_statuses.next();
3635 let mut current_old_entry = old_statuses.next();
3636 loop {
3637 match (current_new_entry, current_old_entry) {
3638 (Some(new_entry), Some(old_entry)) => {
3639 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3640 Ordering::Less => {
3641 updated_statuses.push(new_entry.to_proto());
3642 current_new_entry = new_statuses.next();
3643 }
3644 Ordering::Equal => {
3645 if new_entry.status != old_entry.status
3646 || new_entry.diff_stat != old_entry.diff_stat
3647 {
3648 updated_statuses.push(new_entry.to_proto());
3649 }
3650 current_old_entry = old_statuses.next();
3651 current_new_entry = new_statuses.next();
3652 }
3653 Ordering::Greater => {
3654 removed_statuses.push(old_entry.repo_path.to_proto());
3655 current_old_entry = old_statuses.next();
3656 }
3657 }
3658 }
3659 (None, Some(old_entry)) => {
3660 removed_statuses.push(old_entry.repo_path.to_proto());
3661 current_old_entry = old_statuses.next();
3662 }
3663 (Some(new_entry), None) => {
3664 updated_statuses.push(new_entry.to_proto());
3665 current_new_entry = new_statuses.next();
3666 }
3667 (None, None) => break,
3668 }
3669 }
3670
3671 proto::UpdateRepository {
3672 branch_summary: self.branch.as_ref().map(branch_to_proto),
3673 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3674 updated_statuses,
3675 removed_statuses,
3676 current_merge_conflicts: self
3677 .merge
3678 .merge_heads_by_conflicted_path
3679 .iter()
3680 .map(|(path, _)| path.to_proto())
3681 .collect(),
3682 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3683 project_id,
3684 id: self.id.to_proto(),
3685 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3686 entry_ids: vec![],
3687 scan_id: self.scan_id,
3688 is_last_update: true,
3689 stash_entries: self
3690 .stash_entries
3691 .entries
3692 .iter()
3693 .map(stash_to_proto)
3694 .collect(),
3695 remote_upstream_url: self.remote_upstream_url.clone(),
3696 remote_origin_url: self.remote_origin_url.clone(),
3697 original_repo_abs_path: Some(
3698 self.original_repo_abs_path.to_string_lossy().into_owned(),
3699 ),
3700 linked_worktrees: self
3701 .linked_worktrees
3702 .iter()
3703 .map(worktree_to_proto)
3704 .collect(),
3705 }
3706 }
3707
3708 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3709 &self.linked_worktrees
3710 }
3711
3712 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3713 self.statuses_by_path.iter().cloned()
3714 }
3715
3716 pub fn status_summary(&self) -> GitSummary {
3717 self.statuses_by_path.summary().item_summary
3718 }
3719
3720 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3721 self.statuses_by_path
3722 .get(&PathKey(path.as_ref().clone()), ())
3723 .cloned()
3724 }
3725
3726 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3727 self.statuses_by_path
3728 .get(&PathKey(path.as_ref().clone()), ())
3729 .and_then(|entry| entry.diff_stat)
3730 }
3731
3732 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3733 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3734 }
3735
3736 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3737 self.path_style
3738 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3739 .unwrap()
3740 .into()
3741 }
3742
3743 #[inline]
3744 fn abs_path_to_repo_path_inner(
3745 work_directory_abs_path: &Path,
3746 abs_path: &Path,
3747 path_style: PathStyle,
3748 ) -> Option<RepoPath> {
3749 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3750 Some(RepoPath::from_rel_path(&rel_path))
3751 }
3752
3753 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3754 self.merge
3755 .merge_heads_by_conflicted_path
3756 .contains_key(repo_path)
3757 }
3758
3759 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3760 let had_conflict_on_last_merge_head_change = self
3761 .merge
3762 .merge_heads_by_conflicted_path
3763 .contains_key(repo_path);
3764 let has_conflict_currently = self
3765 .status_for_path(repo_path)
3766 .is_some_and(|entry| entry.status.is_conflicted());
3767 had_conflict_on_last_merge_head_change || has_conflict_currently
3768 }
3769
3770 /// This is the name that will be displayed in the repository selector for this repository.
3771 pub fn display_name(&self) -> SharedString {
3772 self.work_directory_abs_path
3773 .file_name()
3774 .unwrap_or_default()
3775 .to_string_lossy()
3776 .to_string()
3777 .into()
3778 }
3779}
3780
3781pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3782 proto::StashEntry {
3783 oid: entry.oid.as_bytes().to_vec(),
3784 message: entry.message.clone(),
3785 branch: entry.branch.clone(),
3786 index: entry.index as u64,
3787 timestamp: entry.timestamp,
3788 }
3789}
3790
3791pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3792 Ok(StashEntry {
3793 oid: Oid::from_bytes(&entry.oid)?,
3794 message: entry.message.clone(),
3795 index: entry.index as usize,
3796 branch: entry.branch.clone(),
3797 timestamp: entry.timestamp,
3798 })
3799}
3800
3801impl MergeDetails {
3802 async fn update(
3803 &mut self,
3804 backend: &Arc<dyn GitRepository>,
3805 current_conflicted_paths: Vec<RepoPath>,
3806 ) -> Result<bool> {
3807 log::debug!("load merge details");
3808 self.message = backend.merge_message().await.map(SharedString::from);
3809 let heads = backend
3810 .revparse_batch(vec![
3811 "MERGE_HEAD".into(),
3812 "CHERRY_PICK_HEAD".into(),
3813 "REBASE_HEAD".into(),
3814 "REVERT_HEAD".into(),
3815 "APPLY_HEAD".into(),
3816 ])
3817 .await
3818 .log_err()
3819 .unwrap_or_default()
3820 .into_iter()
3821 .map(|opt| opt.map(SharedString::from))
3822 .collect::<Vec<_>>();
3823
3824 let mut conflicts_changed = false;
3825
3826 // Record the merge state for newly conflicted paths
3827 for path in ¤t_conflicted_paths {
3828 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3829 conflicts_changed = true;
3830 self.merge_heads_by_conflicted_path
3831 .insert(path.clone(), heads.clone());
3832 }
3833 }
3834
3835 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3836 self.merge_heads_by_conflicted_path
3837 .retain(|path, old_merge_heads| {
3838 let keep = current_conflicted_paths.contains(path)
3839 || (old_merge_heads == &heads
3840 && old_merge_heads.iter().any(|head| head.is_some()));
3841 if !keep {
3842 conflicts_changed = true;
3843 }
3844 keep
3845 });
3846
3847 Ok(conflicts_changed)
3848 }
3849}
3850
3851impl Repository {
3852 pub fn is_trusted(&self) -> bool {
3853 match self.repository_state.peek() {
3854 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3855 _ => false,
3856 }
3857 }
3858
3859 pub fn snapshot(&self) -> RepositorySnapshot {
3860 self.snapshot.clone()
3861 }
3862
3863 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3864 self.pending_ops.iter().cloned()
3865 }
3866
3867 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3868 self.pending_ops.summary().clone()
3869 }
3870
3871 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3872 self.pending_ops
3873 .get(&PathKey(path.as_ref().clone()), ())
3874 .cloned()
3875 }
3876
3877 fn local(
3878 id: RepositoryId,
3879 work_directory_abs_path: Arc<Path>,
3880 original_repo_abs_path: Arc<Path>,
3881 dot_git_abs_path: Arc<Path>,
3882 project_environment: WeakEntity<ProjectEnvironment>,
3883 fs: Arc<dyn Fs>,
3884 is_trusted: bool,
3885 git_store: WeakEntity<GitStore>,
3886 cx: &mut Context<Self>,
3887 ) -> Self {
3888 let snapshot = RepositorySnapshot::empty(
3889 id,
3890 work_directory_abs_path.clone(),
3891 Some(original_repo_abs_path),
3892 PathStyle::local(),
3893 );
3894 let state = cx
3895 .spawn(async move |_, cx| {
3896 LocalRepositoryState::new(
3897 work_directory_abs_path,
3898 dot_git_abs_path,
3899 project_environment,
3900 fs,
3901 is_trusted,
3902 cx,
3903 )
3904 .await
3905 .map_err(|err| err.to_string())
3906 })
3907 .shared();
3908 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3909 let state = cx
3910 .spawn(async move |_, _| {
3911 let state = state.await?;
3912 Ok(RepositoryState::Local(state))
3913 })
3914 .shared();
3915
3916 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3917 RepositoryEvent::BranchChanged => {
3918 if this.scan_id > 1 {
3919 this.initial_graph_data.clear();
3920 }
3921 }
3922 _ => {}
3923 })
3924 .detach();
3925
3926 Repository {
3927 this: cx.weak_entity(),
3928 git_store,
3929 snapshot,
3930 pending_ops: Default::default(),
3931 repository_state: state,
3932 commit_message_buffer: None,
3933 askpass_delegates: Default::default(),
3934 paths_needing_status_update: Default::default(),
3935 latest_askpass_id: 0,
3936 job_sender,
3937 job_id: 0,
3938 active_jobs: Default::default(),
3939 initial_graph_data: Default::default(),
3940 commit_data: Default::default(),
3941 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3942 }
3943 }
3944
3945 fn remote(
3946 id: RepositoryId,
3947 work_directory_abs_path: Arc<Path>,
3948 original_repo_abs_path: Option<Arc<Path>>,
3949 path_style: PathStyle,
3950 project_id: ProjectId,
3951 client: AnyProtoClient,
3952 git_store: WeakEntity<GitStore>,
3953 cx: &mut Context<Self>,
3954 ) -> Self {
3955 let snapshot = RepositorySnapshot::empty(
3956 id,
3957 work_directory_abs_path,
3958 original_repo_abs_path,
3959 path_style,
3960 );
3961 let repository_state = RemoteRepositoryState { project_id, client };
3962 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3963 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3964 Self {
3965 this: cx.weak_entity(),
3966 snapshot,
3967 commit_message_buffer: None,
3968 git_store,
3969 pending_ops: Default::default(),
3970 paths_needing_status_update: Default::default(),
3971 job_sender,
3972 repository_state,
3973 askpass_delegates: Default::default(),
3974 latest_askpass_id: 0,
3975 active_jobs: Default::default(),
3976 job_id: 0,
3977 initial_graph_data: Default::default(),
3978 commit_data: Default::default(),
3979 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3980 }
3981 }
3982
3983 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3984 self.git_store.upgrade()
3985 }
3986
3987 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3988 let this = cx.weak_entity();
3989 let git_store = self.git_store.clone();
3990 let _ = self.send_keyed_job(
3991 Some(GitJobKey::ReloadBufferDiffBases),
3992 None,
3993 |state, mut cx| async move {
3994 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3995 log::error!("tried to recompute diffs for a non-local repository");
3996 return Ok(());
3997 };
3998
3999 let Some(this) = this.upgrade() else {
4000 return Ok(());
4001 };
4002
4003 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4004 git_store.update(cx, |git_store, cx| {
4005 git_store
4006 .diffs
4007 .iter()
4008 .filter_map(|(buffer_id, diff_state)| {
4009 let buffer_store = git_store.buffer_store.read(cx);
4010 let buffer = buffer_store.get(*buffer_id)?;
4011 let file = File::from_dyn(buffer.read(cx).file())?;
4012 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4013 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4014 log::debug!(
4015 "start reload diff bases for repo path {}",
4016 repo_path.as_unix_str()
4017 );
4018 diff_state.update(cx, |diff_state, _| {
4019 let has_unstaged_diff = diff_state
4020 .unstaged_diff
4021 .as_ref()
4022 .is_some_and(|diff| diff.is_upgradable());
4023 let has_uncommitted_diff = diff_state
4024 .uncommitted_diff
4025 .as_ref()
4026 .is_some_and(|set| set.is_upgradable());
4027
4028 Some((
4029 buffer,
4030 repo_path,
4031 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4032 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4033 ))
4034 })
4035 })
4036 .collect::<Vec<_>>()
4037 })
4038 })?;
4039
4040 let buffer_diff_base_changes = cx
4041 .background_spawn(async move {
4042 let mut changes = Vec::new();
4043 for (buffer, repo_path, current_index_text, current_head_text) in
4044 &repo_diff_state_updates
4045 {
4046 let index_text = if current_index_text.is_some() {
4047 backend.load_index_text(repo_path.clone()).await
4048 } else {
4049 None
4050 };
4051 let head_text = if current_head_text.is_some() {
4052 backend.load_committed_text(repo_path.clone()).await
4053 } else {
4054 None
4055 };
4056
4057 let change =
4058 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4059 (Some(current_index), Some(current_head)) => {
4060 let index_changed =
4061 index_text.as_deref() != current_index.as_deref();
4062 let head_changed =
4063 head_text.as_deref() != current_head.as_deref();
4064 if index_changed && head_changed {
4065 if index_text == head_text {
4066 Some(DiffBasesChange::SetBoth(head_text))
4067 } else {
4068 Some(DiffBasesChange::SetEach {
4069 index: index_text,
4070 head: head_text,
4071 })
4072 }
4073 } else if index_changed {
4074 Some(DiffBasesChange::SetIndex(index_text))
4075 } else if head_changed {
4076 Some(DiffBasesChange::SetHead(head_text))
4077 } else {
4078 None
4079 }
4080 }
4081 (Some(current_index), None) => {
4082 let index_changed =
4083 index_text.as_deref() != current_index.as_deref();
4084 index_changed
4085 .then_some(DiffBasesChange::SetIndex(index_text))
4086 }
4087 (None, Some(current_head)) => {
4088 let head_changed =
4089 head_text.as_deref() != current_head.as_deref();
4090 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4091 }
4092 (None, None) => None,
4093 };
4094
4095 changes.push((buffer.clone(), change))
4096 }
4097 changes
4098 })
4099 .await;
4100
4101 git_store.update(&mut cx, |git_store, cx| {
4102 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4103 let buffer_snapshot = buffer.read(cx).text_snapshot();
4104 let buffer_id = buffer_snapshot.remote_id();
4105 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4106 continue;
4107 };
4108
4109 let downstream_client = git_store.downstream_client();
4110 diff_state.update(cx, |diff_state, cx| {
4111 use proto::update_diff_bases::Mode;
4112
4113 if let Some((diff_bases_change, (client, project_id))) =
4114 diff_bases_change.clone().zip(downstream_client)
4115 {
4116 let (staged_text, committed_text, mode) = match diff_bases_change {
4117 DiffBasesChange::SetIndex(index) => {
4118 (index, None, Mode::IndexOnly)
4119 }
4120 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4121 DiffBasesChange::SetEach { index, head } => {
4122 (index, head, Mode::IndexAndHead)
4123 }
4124 DiffBasesChange::SetBoth(text) => {
4125 (None, text, Mode::IndexMatchesHead)
4126 }
4127 };
4128 client
4129 .send(proto::UpdateDiffBases {
4130 project_id: project_id.to_proto(),
4131 buffer_id: buffer_id.to_proto(),
4132 staged_text,
4133 committed_text,
4134 mode: mode as i32,
4135 })
4136 .log_err();
4137 }
4138
4139 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4140 });
4141 }
4142 })
4143 },
4144 );
4145 }
4146
4147 pub fn send_job<F, Fut, R>(
4148 &mut self,
4149 status: Option<SharedString>,
4150 job: F,
4151 ) -> oneshot::Receiver<R>
4152 where
4153 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4154 Fut: Future<Output = R> + 'static,
4155 R: Send + 'static,
4156 {
4157 self.send_keyed_job(None, status, job)
4158 }
4159
4160 fn send_keyed_job<F, Fut, R>(
4161 &mut self,
4162 key: Option<GitJobKey>,
4163 status: Option<SharedString>,
4164 job: F,
4165 ) -> oneshot::Receiver<R>
4166 where
4167 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4168 Fut: Future<Output = R> + 'static,
4169 R: Send + 'static,
4170 {
4171 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4172 let job_id = post_inc(&mut self.job_id);
4173 let this = self.this.clone();
4174 self.job_sender
4175 .unbounded_send(GitJob {
4176 key,
4177 job: Box::new(move |state, cx: &mut AsyncApp| {
4178 let job = job(state, cx.clone());
4179 cx.spawn(async move |cx| {
4180 if let Some(s) = status.clone() {
4181 this.update(cx, |this, cx| {
4182 this.active_jobs.insert(
4183 job_id,
4184 JobInfo {
4185 start: Instant::now(),
4186 message: s.clone(),
4187 },
4188 );
4189
4190 cx.notify();
4191 })
4192 .ok();
4193 }
4194 let result = job.await;
4195
4196 this.update(cx, |this, cx| {
4197 this.active_jobs.remove(&job_id);
4198 cx.notify();
4199 })
4200 .ok();
4201
4202 result_tx.send(result).ok();
4203 })
4204 }),
4205 })
4206 .ok();
4207 result_rx
4208 }
4209
4210 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4211 let Some(git_store) = self.git_store.upgrade() else {
4212 return;
4213 };
4214 let entity = cx.entity();
4215 git_store.update(cx, |git_store, cx| {
4216 let Some((&id, _)) = git_store
4217 .repositories
4218 .iter()
4219 .find(|(_, handle)| *handle == &entity)
4220 else {
4221 return;
4222 };
4223 git_store.active_repo_id = Some(id);
4224 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4225 });
4226 }
4227
4228 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4229 self.snapshot.status()
4230 }
4231
4232 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4233 self.snapshot.diff_stat_for_path(path)
4234 }
4235
4236 pub fn cached_stash(&self) -> GitStash {
4237 self.snapshot.stash_entries.clone()
4238 }
4239
4240 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4241 let git_store = self.git_store.upgrade()?;
4242 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4243 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4244 let abs_path = SanitizedPath::new(&abs_path);
4245 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4246 Some(ProjectPath {
4247 worktree_id: worktree.read(cx).id(),
4248 path: relative_path,
4249 })
4250 }
4251
4252 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4253 let git_store = self.git_store.upgrade()?;
4254 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4255 let abs_path = worktree_store.absolutize(path, cx)?;
4256 self.snapshot.abs_path_to_repo_path(&abs_path)
4257 }
4258
4259 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4260 other
4261 .read(cx)
4262 .snapshot
4263 .work_directory_abs_path
4264 .starts_with(&self.snapshot.work_directory_abs_path)
4265 }
4266
4267 pub fn open_commit_buffer(
4268 &mut self,
4269 languages: Option<Arc<LanguageRegistry>>,
4270 buffer_store: Entity<BufferStore>,
4271 cx: &mut Context<Self>,
4272 ) -> Task<Result<Entity<Buffer>>> {
4273 let id = self.id;
4274 if let Some(buffer) = self.commit_message_buffer.clone() {
4275 return Task::ready(Ok(buffer));
4276 }
4277 let this = cx.weak_entity();
4278
4279 let rx = self.send_job(None, move |state, mut cx| async move {
4280 let Some(this) = this.upgrade() else {
4281 bail!("git store was dropped");
4282 };
4283 match state {
4284 RepositoryState::Local(..) => {
4285 this.update(&mut cx, |_, cx| {
4286 Self::open_local_commit_buffer(languages, buffer_store, cx)
4287 })
4288 .await
4289 }
4290 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4291 let request = client.request(proto::OpenCommitMessageBuffer {
4292 project_id: project_id.0,
4293 repository_id: id.to_proto(),
4294 });
4295 let response = request.await.context("requesting to open commit buffer")?;
4296 let buffer_id = BufferId::new(response.buffer_id)?;
4297 let buffer = buffer_store
4298 .update(&mut cx, |buffer_store, cx| {
4299 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4300 })
4301 .await?;
4302 if let Some(language_registry) = languages {
4303 let git_commit_language =
4304 language_registry.language_for_name("Git Commit").await?;
4305 buffer.update(&mut cx, |buffer, cx| {
4306 buffer.set_language(Some(git_commit_language), cx);
4307 });
4308 }
4309 this.update(&mut cx, |this, _| {
4310 this.commit_message_buffer = Some(buffer.clone());
4311 });
4312 Ok(buffer)
4313 }
4314 }
4315 });
4316
4317 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4318 }
4319
4320 fn open_local_commit_buffer(
4321 language_registry: Option<Arc<LanguageRegistry>>,
4322 buffer_store: Entity<BufferStore>,
4323 cx: &mut Context<Self>,
4324 ) -> Task<Result<Entity<Buffer>>> {
4325 cx.spawn(async move |repository, cx| {
4326 let git_commit_language = match language_registry {
4327 Some(language_registry) => {
4328 Some(language_registry.language_for_name("Git Commit").await?)
4329 }
4330 None => None,
4331 };
4332 let buffer = buffer_store
4333 .update(cx, |buffer_store, cx| {
4334 buffer_store.create_buffer(git_commit_language, false, cx)
4335 })
4336 .await?;
4337
4338 repository.update(cx, |repository, _| {
4339 repository.commit_message_buffer = Some(buffer.clone());
4340 })?;
4341 Ok(buffer)
4342 })
4343 }
4344
4345 pub fn checkout_files(
4346 &mut self,
4347 commit: &str,
4348 paths: Vec<RepoPath>,
4349 cx: &mut Context<Self>,
4350 ) -> Task<Result<()>> {
4351 let commit = commit.to_string();
4352 let id = self.id;
4353
4354 self.spawn_job_with_tracking(
4355 paths.clone(),
4356 pending_op::GitStatus::Reverted,
4357 cx,
4358 async move |this, cx| {
4359 this.update(cx, |this, _cx| {
4360 this.send_job(
4361 Some(format!("git checkout {}", commit).into()),
4362 move |git_repo, _| async move {
4363 match git_repo {
4364 RepositoryState::Local(LocalRepositoryState {
4365 backend,
4366 environment,
4367 ..
4368 }) => {
4369 backend
4370 .checkout_files(commit, paths, environment.clone())
4371 .await
4372 }
4373 RepositoryState::Remote(RemoteRepositoryState {
4374 project_id,
4375 client,
4376 }) => {
4377 client
4378 .request(proto::GitCheckoutFiles {
4379 project_id: project_id.0,
4380 repository_id: id.to_proto(),
4381 commit,
4382 paths: paths
4383 .into_iter()
4384 .map(|p| p.to_proto())
4385 .collect(),
4386 })
4387 .await?;
4388
4389 Ok(())
4390 }
4391 }
4392 },
4393 )
4394 })?
4395 .await?
4396 },
4397 )
4398 }
4399
4400 pub fn reset(
4401 &mut self,
4402 commit: String,
4403 reset_mode: ResetMode,
4404 _cx: &mut App,
4405 ) -> oneshot::Receiver<Result<()>> {
4406 let id = self.id;
4407
4408 self.send_job(None, move |git_repo, _| async move {
4409 match git_repo {
4410 RepositoryState::Local(LocalRepositoryState {
4411 backend,
4412 environment,
4413 ..
4414 }) => backend.reset(commit, reset_mode, environment).await,
4415 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4416 client
4417 .request(proto::GitReset {
4418 project_id: project_id.0,
4419 repository_id: id.to_proto(),
4420 commit,
4421 mode: match reset_mode {
4422 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4423 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4424 },
4425 })
4426 .await?;
4427
4428 Ok(())
4429 }
4430 }
4431 })
4432 }
4433
4434 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4435 let id = self.id;
4436 self.send_job(None, move |git_repo, _cx| async move {
4437 match git_repo {
4438 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4439 backend.show(commit).await
4440 }
4441 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4442 let resp = client
4443 .request(proto::GitShow {
4444 project_id: project_id.0,
4445 repository_id: id.to_proto(),
4446 commit,
4447 })
4448 .await?;
4449
4450 Ok(CommitDetails {
4451 sha: resp.sha.into(),
4452 message: resp.message.into(),
4453 commit_timestamp: resp.commit_timestamp,
4454 author_email: resp.author_email.into(),
4455 author_name: resp.author_name.into(),
4456 })
4457 }
4458 }
4459 })
4460 }
4461
4462 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4463 let id = self.id;
4464 self.send_job(None, move |git_repo, cx| async move {
4465 match git_repo {
4466 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4467 backend.load_commit(commit, cx).await
4468 }
4469 RepositoryState::Remote(RemoteRepositoryState {
4470 client, project_id, ..
4471 }) => {
4472 let response = client
4473 .request(proto::LoadCommitDiff {
4474 project_id: project_id.0,
4475 repository_id: id.to_proto(),
4476 commit,
4477 })
4478 .await?;
4479 Ok(CommitDiff {
4480 files: response
4481 .files
4482 .into_iter()
4483 .map(|file| {
4484 Ok(CommitFile {
4485 path: RepoPath::from_proto(&file.path)?,
4486 old_text: file.old_text,
4487 new_text: file.new_text,
4488 is_binary: file.is_binary,
4489 })
4490 })
4491 .collect::<Result<Vec<_>>>()?,
4492 })
4493 }
4494 }
4495 })
4496 }
4497
4498 pub fn file_history(
4499 &mut self,
4500 path: RepoPath,
4501 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4502 self.file_history_paginated(path, 0, None)
4503 }
4504
4505 pub fn file_history_paginated(
4506 &mut self,
4507 path: RepoPath,
4508 skip: usize,
4509 limit: Option<usize>,
4510 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4511 let id = self.id;
4512 self.send_job(None, move |git_repo, _cx| async move {
4513 match git_repo {
4514 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4515 backend.file_history_paginated(path, skip, limit).await
4516 }
4517 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4518 let response = client
4519 .request(proto::GitFileHistory {
4520 project_id: project_id.0,
4521 repository_id: id.to_proto(),
4522 path: path.to_proto(),
4523 skip: skip as u64,
4524 limit: limit.map(|l| l as u64),
4525 })
4526 .await?;
4527 Ok(git::repository::FileHistory {
4528 entries: response
4529 .entries
4530 .into_iter()
4531 .map(|entry| git::repository::FileHistoryEntry {
4532 sha: entry.sha.into(),
4533 subject: entry.subject.into(),
4534 message: entry.message.into(),
4535 commit_timestamp: entry.commit_timestamp,
4536 author_name: entry.author_name.into(),
4537 author_email: entry.author_email.into(),
4538 })
4539 .collect(),
4540 path: RepoPath::from_proto(&response.path)?,
4541 })
4542 }
4543 }
4544 })
4545 }
4546
4547 pub fn get_graph_data(
4548 &self,
4549 log_source: LogSource,
4550 log_order: LogOrder,
4551 ) -> Option<&InitialGitGraphData> {
4552 self.initial_graph_data.get(&(log_source, log_order))
4553 }
4554
4555 pub fn graph_data(
4556 &mut self,
4557 log_source: LogSource,
4558 log_order: LogOrder,
4559 range: Range<usize>,
4560 cx: &mut Context<Self>,
4561 ) -> GraphDataResponse<'_> {
4562 let initial_commit_data = self
4563 .initial_graph_data
4564 .entry((log_source.clone(), log_order))
4565 .or_insert_with(|| {
4566 let state = self.repository_state.clone();
4567 let log_source = log_source.clone();
4568
4569 let fetch_task = cx.spawn(async move |repository, cx| {
4570 let state = state.await;
4571 let result = match state {
4572 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4573 Self::local_git_graph_data(
4574 repository.clone(),
4575 backend,
4576 log_source.clone(),
4577 log_order,
4578 cx,
4579 )
4580 .await
4581 }
4582 Ok(RepositoryState::Remote(_)) => {
4583 Err("Git graph is not supported for collab yet".into())
4584 }
4585 Err(e) => Err(SharedString::from(e)),
4586 };
4587
4588 if let Err(fetch_task_error) = result {
4589 repository
4590 .update(cx, |repository, _| {
4591 if let Some(data) = repository
4592 .initial_graph_data
4593 .get_mut(&(log_source, log_order))
4594 {
4595 data.error = Some(fetch_task_error);
4596 } else {
4597 debug_panic!(
4598 "This task would be dropped if this entry doesn't exist"
4599 );
4600 }
4601 })
4602 .ok();
4603 }
4604 });
4605
4606 InitialGitGraphData {
4607 fetch_task,
4608 error: None,
4609 commit_data: Vec::new(),
4610 commit_oid_to_index: HashMap::default(),
4611 }
4612 });
4613
4614 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4615 let max_end = initial_commit_data.commit_data.len();
4616
4617 GraphDataResponse {
4618 commits: &initial_commit_data.commit_data
4619 [range.start.min(max_start)..range.end.min(max_end)],
4620 is_loading: !initial_commit_data.fetch_task.is_ready(),
4621 error: initial_commit_data.error.clone(),
4622 }
4623 }
4624
4625 async fn local_git_graph_data(
4626 this: WeakEntity<Self>,
4627 backend: Arc<dyn GitRepository>,
4628 log_source: LogSource,
4629 log_order: LogOrder,
4630 cx: &mut AsyncApp,
4631 ) -> Result<(), SharedString> {
4632 let (request_tx, request_rx) =
4633 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4634
4635 let task = cx.background_executor().spawn({
4636 let log_source = log_source.clone();
4637 async move {
4638 backend
4639 .initial_graph_data(log_source, log_order, request_tx)
4640 .await
4641 .map_err(|err| SharedString::from(err.to_string()))
4642 }
4643 });
4644
4645 let graph_data_key = (log_source, log_order);
4646
4647 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4648 this.update(cx, |repository, cx| {
4649 let graph_data = repository
4650 .initial_graph_data
4651 .entry(graph_data_key.clone())
4652 .and_modify(|graph_data| {
4653 for commit_data in initial_graph_commit_data {
4654 graph_data
4655 .commit_oid_to_index
4656 .insert(commit_data.sha, graph_data.commit_data.len());
4657 graph_data.commit_data.push(commit_data);
4658
4659 cx.emit(RepositoryEvent::GraphEvent(
4660 graph_data_key.clone(),
4661 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4662 ));
4663 }
4664 });
4665
4666 match &graph_data {
4667 Entry::Occupied(_) => {}
4668 Entry::Vacant(_) => {
4669 debug_panic!("This task should be dropped if data doesn't exist");
4670 }
4671 }
4672 })
4673 .ok();
4674 }
4675
4676 task.await?;
4677 Ok(())
4678 }
4679
4680 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4681 if !self.commit_data.contains_key(&sha) {
4682 match &self.graph_commit_data_handler {
4683 GraphCommitHandlerState::Open(handler) => {
4684 if handler.commit_data_request.try_send(sha).is_ok() {
4685 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4686 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4687 }
4688 }
4689 GraphCommitHandlerState::Closed => {
4690 self.open_graph_commit_data_handler(cx);
4691 }
4692 GraphCommitHandlerState::Starting => {}
4693 }
4694 }
4695
4696 self.commit_data
4697 .get(&sha)
4698 .unwrap_or(&CommitDataState::Loading)
4699 }
4700
4701 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4702 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4703
4704 let state = self.repository_state.clone();
4705 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4706 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4707
4708 let foreground_task = cx.spawn(async move |this, cx| {
4709 while let Ok((sha, commit_data)) = result_rx.recv().await {
4710 let result = this.update(cx, |this, cx| {
4711 let old_value = this
4712 .commit_data
4713 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4714 debug_assert!(
4715 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4716 "We should never overwrite commit data"
4717 );
4718
4719 cx.notify();
4720 });
4721 if result.is_err() {
4722 break;
4723 }
4724 }
4725
4726 this.update(cx, |this, _cx| {
4727 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4728 })
4729 .ok();
4730 });
4731
4732 let request_tx_for_handler = request_tx;
4733 let background_executor = cx.background_executor().clone();
4734
4735 cx.background_spawn(async move {
4736 let backend = match state.await {
4737 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4738 Ok(RepositoryState::Remote(_)) => {
4739 log::error!("commit_data_reader not supported for remote repositories");
4740 return;
4741 }
4742 Err(error) => {
4743 log::error!("failed to get repository state: {error}");
4744 return;
4745 }
4746 };
4747
4748 let reader = match backend.commit_data_reader() {
4749 Ok(reader) => reader,
4750 Err(error) => {
4751 log::error!("failed to create commit data reader: {error:?}");
4752 return;
4753 }
4754 };
4755
4756 loop {
4757 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4758
4759 futures::select_biased! {
4760 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4761 let Ok(sha) = sha else {
4762 break;
4763 };
4764
4765 match reader.read(sha).await {
4766 Ok(commit_data) => {
4767 if result_tx.send((sha, commit_data)).await.is_err() {
4768 break;
4769 }
4770 }
4771 Err(error) => {
4772 log::error!("failed to read commit data for {sha}: {error:?}");
4773 }
4774 }
4775 }
4776 _ = futures::FutureExt::fuse(timeout) => {
4777 break;
4778 }
4779 }
4780 }
4781
4782 drop(result_tx);
4783 })
4784 .detach();
4785
4786 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4787 _task: foreground_task,
4788 commit_data_request: request_tx_for_handler,
4789 });
4790 }
4791
4792 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4793 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4794 }
4795
4796 fn save_buffers<'a>(
4797 &self,
4798 entries: impl IntoIterator<Item = &'a RepoPath>,
4799 cx: &mut Context<Self>,
4800 ) -> Vec<Task<anyhow::Result<()>>> {
4801 let mut save_futures = Vec::new();
4802 if let Some(buffer_store) = self.buffer_store(cx) {
4803 buffer_store.update(cx, |buffer_store, cx| {
4804 for path in entries {
4805 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4806 continue;
4807 };
4808 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4809 && buffer
4810 .read(cx)
4811 .file()
4812 .is_some_and(|file| file.disk_state().exists())
4813 && buffer.read(cx).has_unsaved_edits()
4814 {
4815 save_futures.push(buffer_store.save_buffer(buffer, cx));
4816 }
4817 }
4818 })
4819 }
4820 save_futures
4821 }
4822
4823 pub fn stage_entries(
4824 &mut self,
4825 entries: Vec<RepoPath>,
4826 cx: &mut Context<Self>,
4827 ) -> Task<anyhow::Result<()>> {
4828 self.stage_or_unstage_entries(true, entries, cx)
4829 }
4830
4831 pub fn unstage_entries(
4832 &mut self,
4833 entries: Vec<RepoPath>,
4834 cx: &mut Context<Self>,
4835 ) -> Task<anyhow::Result<()>> {
4836 self.stage_or_unstage_entries(false, entries, cx)
4837 }
4838
4839 fn stage_or_unstage_entries(
4840 &mut self,
4841 stage: bool,
4842 entries: Vec<RepoPath>,
4843 cx: &mut Context<Self>,
4844 ) -> Task<anyhow::Result<()>> {
4845 if entries.is_empty() {
4846 return Task::ready(Ok(()));
4847 }
4848 let Some(git_store) = self.git_store.upgrade() else {
4849 return Task::ready(Ok(()));
4850 };
4851 let id = self.id;
4852 let save_tasks = self.save_buffers(&entries, cx);
4853 let paths = entries
4854 .iter()
4855 .map(|p| p.as_unix_str())
4856 .collect::<Vec<_>>()
4857 .join(" ");
4858 let status = if stage {
4859 format!("git add {paths}")
4860 } else {
4861 format!("git reset {paths}")
4862 };
4863 let job_key = GitJobKey::WriteIndex(entries.clone());
4864
4865 self.spawn_job_with_tracking(
4866 entries.clone(),
4867 if stage {
4868 pending_op::GitStatus::Staged
4869 } else {
4870 pending_op::GitStatus::Unstaged
4871 },
4872 cx,
4873 async move |this, cx| {
4874 for save_task in save_tasks {
4875 save_task.await?;
4876 }
4877
4878 this.update(cx, |this, cx| {
4879 let weak_this = cx.weak_entity();
4880 this.send_keyed_job(
4881 Some(job_key),
4882 Some(status.into()),
4883 move |git_repo, mut cx| async move {
4884 let hunk_staging_operation_counts = weak_this
4885 .update(&mut cx, |this, cx| {
4886 let mut hunk_staging_operation_counts = HashMap::default();
4887 for path in &entries {
4888 let Some(project_path) =
4889 this.repo_path_to_project_path(path, cx)
4890 else {
4891 continue;
4892 };
4893 let Some(buffer) = git_store
4894 .read(cx)
4895 .buffer_store
4896 .read(cx)
4897 .get_by_path(&project_path)
4898 else {
4899 continue;
4900 };
4901 let Some(diff_state) = git_store
4902 .read(cx)
4903 .diffs
4904 .get(&buffer.read(cx).remote_id())
4905 .cloned()
4906 else {
4907 continue;
4908 };
4909 let Some(uncommitted_diff) =
4910 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4911 |uncommitted_diff| uncommitted_diff.upgrade(),
4912 )
4913 else {
4914 continue;
4915 };
4916 let buffer_snapshot = buffer.read(cx).text_snapshot();
4917 let file_exists = buffer
4918 .read(cx)
4919 .file()
4920 .is_some_and(|file| file.disk_state().exists());
4921 let hunk_staging_operation_count =
4922 diff_state.update(cx, |diff_state, cx| {
4923 uncommitted_diff.update(
4924 cx,
4925 |uncommitted_diff, cx| {
4926 uncommitted_diff
4927 .stage_or_unstage_all_hunks(
4928 stage,
4929 &buffer_snapshot,
4930 file_exists,
4931 cx,
4932 );
4933 },
4934 );
4935
4936 diff_state.hunk_staging_operation_count += 1;
4937 diff_state.hunk_staging_operation_count
4938 });
4939 hunk_staging_operation_counts.insert(
4940 diff_state.downgrade(),
4941 hunk_staging_operation_count,
4942 );
4943 }
4944 hunk_staging_operation_counts
4945 })
4946 .unwrap_or_default();
4947
4948 let result = match git_repo {
4949 RepositoryState::Local(LocalRepositoryState {
4950 backend,
4951 environment,
4952 ..
4953 }) => {
4954 if stage {
4955 backend.stage_paths(entries, environment.clone()).await
4956 } else {
4957 backend.unstage_paths(entries, environment.clone()).await
4958 }
4959 }
4960 RepositoryState::Remote(RemoteRepositoryState {
4961 project_id,
4962 client,
4963 }) => {
4964 if stage {
4965 client
4966 .request(proto::Stage {
4967 project_id: project_id.0,
4968 repository_id: id.to_proto(),
4969 paths: entries
4970 .into_iter()
4971 .map(|repo_path| repo_path.to_proto())
4972 .collect(),
4973 })
4974 .await
4975 .context("sending stage request")
4976 .map(|_| ())
4977 } else {
4978 client
4979 .request(proto::Unstage {
4980 project_id: project_id.0,
4981 repository_id: id.to_proto(),
4982 paths: entries
4983 .into_iter()
4984 .map(|repo_path| repo_path.to_proto())
4985 .collect(),
4986 })
4987 .await
4988 .context("sending unstage request")
4989 .map(|_| ())
4990 }
4991 }
4992 };
4993
4994 for (diff_state, hunk_staging_operation_count) in
4995 hunk_staging_operation_counts
4996 {
4997 diff_state
4998 .update(&mut cx, |diff_state, cx| {
4999 if result.is_ok() {
5000 diff_state.hunk_staging_operation_count_as_of_write =
5001 hunk_staging_operation_count;
5002 } else if let Some(uncommitted_diff) =
5003 &diff_state.uncommitted_diff
5004 {
5005 uncommitted_diff
5006 .update(cx, |uncommitted_diff, cx| {
5007 uncommitted_diff.clear_pending_hunks(cx);
5008 })
5009 .ok();
5010 }
5011 })
5012 .ok();
5013 }
5014
5015 result
5016 },
5017 )
5018 })?
5019 .await?
5020 },
5021 )
5022 }
5023
5024 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5025 let to_stage = self
5026 .cached_status()
5027 .filter_map(|entry| {
5028 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
5029 if ops.staging() || ops.staged() {
5030 None
5031 } else {
5032 Some(entry.repo_path)
5033 }
5034 } else if entry.status.staging().is_fully_staged() {
5035 None
5036 } else {
5037 Some(entry.repo_path)
5038 }
5039 })
5040 .collect();
5041 self.stage_or_unstage_entries(true, to_stage, cx)
5042 }
5043
5044 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5045 let to_unstage = self
5046 .cached_status()
5047 .filter_map(|entry| {
5048 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
5049 if !ops.staging() && !ops.staged() {
5050 None
5051 } else {
5052 Some(entry.repo_path)
5053 }
5054 } else if entry.status.staging().is_fully_unstaged() {
5055 None
5056 } else {
5057 Some(entry.repo_path)
5058 }
5059 })
5060 .collect();
5061 self.stage_or_unstage_entries(false, to_unstage, cx)
5062 }
5063
5064 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5065 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5066
5067 self.stash_entries(to_stash, cx)
5068 }
5069
5070 pub fn stash_entries(
5071 &mut self,
5072 entries: Vec<RepoPath>,
5073 cx: &mut Context<Self>,
5074 ) -> Task<anyhow::Result<()>> {
5075 let id = self.id;
5076
5077 cx.spawn(async move |this, cx| {
5078 this.update(cx, |this, _| {
5079 this.send_job(None, move |git_repo, _cx| async move {
5080 match git_repo {
5081 RepositoryState::Local(LocalRepositoryState {
5082 backend,
5083 environment,
5084 ..
5085 }) => backend.stash_paths(entries, environment).await,
5086 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5087 client
5088 .request(proto::Stash {
5089 project_id: project_id.0,
5090 repository_id: id.to_proto(),
5091 paths: entries
5092 .into_iter()
5093 .map(|repo_path| repo_path.to_proto())
5094 .collect(),
5095 })
5096 .await?;
5097 Ok(())
5098 }
5099 }
5100 })
5101 })?
5102 .await??;
5103 Ok(())
5104 })
5105 }
5106
5107 pub fn stash_pop(
5108 &mut self,
5109 index: Option<usize>,
5110 cx: &mut Context<Self>,
5111 ) -> Task<anyhow::Result<()>> {
5112 let id = self.id;
5113 cx.spawn(async move |this, cx| {
5114 this.update(cx, |this, _| {
5115 this.send_job(None, move |git_repo, _cx| async move {
5116 match git_repo {
5117 RepositoryState::Local(LocalRepositoryState {
5118 backend,
5119 environment,
5120 ..
5121 }) => backend.stash_pop(index, environment).await,
5122 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5123 client
5124 .request(proto::StashPop {
5125 project_id: project_id.0,
5126 repository_id: id.to_proto(),
5127 stash_index: index.map(|i| i as u64),
5128 })
5129 .await
5130 .context("sending stash pop request")?;
5131 Ok(())
5132 }
5133 }
5134 })
5135 })?
5136 .await??;
5137 Ok(())
5138 })
5139 }
5140
5141 pub fn stash_apply(
5142 &mut self,
5143 index: Option<usize>,
5144 cx: &mut Context<Self>,
5145 ) -> Task<anyhow::Result<()>> {
5146 let id = self.id;
5147 cx.spawn(async move |this, cx| {
5148 this.update(cx, |this, _| {
5149 this.send_job(None, move |git_repo, _cx| async move {
5150 match git_repo {
5151 RepositoryState::Local(LocalRepositoryState {
5152 backend,
5153 environment,
5154 ..
5155 }) => backend.stash_apply(index, environment).await,
5156 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5157 client
5158 .request(proto::StashApply {
5159 project_id: project_id.0,
5160 repository_id: id.to_proto(),
5161 stash_index: index.map(|i| i as u64),
5162 })
5163 .await
5164 .context("sending stash apply request")?;
5165 Ok(())
5166 }
5167 }
5168 })
5169 })?
5170 .await??;
5171 Ok(())
5172 })
5173 }
5174
5175 pub fn stash_drop(
5176 &mut self,
5177 index: Option<usize>,
5178 cx: &mut Context<Self>,
5179 ) -> oneshot::Receiver<anyhow::Result<()>> {
5180 let id = self.id;
5181 let updates_tx = self
5182 .git_store()
5183 .and_then(|git_store| match &git_store.read(cx).state {
5184 GitStoreState::Local { downstream, .. } => downstream
5185 .as_ref()
5186 .map(|downstream| downstream.updates_tx.clone()),
5187 _ => None,
5188 });
5189 let this = cx.weak_entity();
5190 self.send_job(None, move |git_repo, mut cx| async move {
5191 match git_repo {
5192 RepositoryState::Local(LocalRepositoryState {
5193 backend,
5194 environment,
5195 ..
5196 }) => {
5197 // TODO would be nice to not have to do this manually
5198 let result = backend.stash_drop(index, environment).await;
5199 if result.is_ok()
5200 && let Ok(stash_entries) = backend.stash_entries().await
5201 {
5202 let snapshot = this.update(&mut cx, |this, cx| {
5203 this.snapshot.stash_entries = stash_entries;
5204 cx.emit(RepositoryEvent::StashEntriesChanged);
5205 this.snapshot.clone()
5206 })?;
5207 if let Some(updates_tx) = updates_tx {
5208 updates_tx
5209 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5210 .ok();
5211 }
5212 }
5213
5214 result
5215 }
5216 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5217 client
5218 .request(proto::StashDrop {
5219 project_id: project_id.0,
5220 repository_id: id.to_proto(),
5221 stash_index: index.map(|i| i as u64),
5222 })
5223 .await
5224 .context("sending stash pop request")?;
5225 Ok(())
5226 }
5227 }
5228 })
5229 }
5230
5231 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5232 let id = self.id;
5233 self.send_job(
5234 Some(format!("git hook {}", hook.as_str()).into()),
5235 move |git_repo, _cx| async move {
5236 match git_repo {
5237 RepositoryState::Local(LocalRepositoryState {
5238 backend,
5239 environment,
5240 ..
5241 }) => backend.run_hook(hook, environment.clone()).await,
5242 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5243 client
5244 .request(proto::RunGitHook {
5245 project_id: project_id.0,
5246 repository_id: id.to_proto(),
5247 hook: hook.to_proto(),
5248 })
5249 .await?;
5250
5251 Ok(())
5252 }
5253 }
5254 },
5255 )
5256 }
5257
5258 pub fn commit(
5259 &mut self,
5260 message: SharedString,
5261 name_and_email: Option<(SharedString, SharedString)>,
5262 options: CommitOptions,
5263 askpass: AskPassDelegate,
5264 cx: &mut App,
5265 ) -> oneshot::Receiver<Result<()>> {
5266 let id = self.id;
5267 let askpass_delegates = self.askpass_delegates.clone();
5268 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5269
5270 let rx = self.run_hook(RunHook::PreCommit, cx);
5271
5272 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5273 rx.await??;
5274
5275 match git_repo {
5276 RepositoryState::Local(LocalRepositoryState {
5277 backend,
5278 environment,
5279 ..
5280 }) => {
5281 backend
5282 .commit(message, name_and_email, options, askpass, environment)
5283 .await
5284 }
5285 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5286 askpass_delegates.lock().insert(askpass_id, askpass);
5287 let _defer = util::defer(|| {
5288 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5289 debug_assert!(askpass_delegate.is_some());
5290 });
5291 let (name, email) = name_and_email.unzip();
5292 client
5293 .request(proto::Commit {
5294 project_id: project_id.0,
5295 repository_id: id.to_proto(),
5296 message: String::from(message),
5297 name: name.map(String::from),
5298 email: email.map(String::from),
5299 options: Some(proto::commit::CommitOptions {
5300 amend: options.amend,
5301 signoff: options.signoff,
5302 }),
5303 askpass_id,
5304 })
5305 .await?;
5306
5307 Ok(())
5308 }
5309 }
5310 })
5311 }
5312
5313 pub fn fetch(
5314 &mut self,
5315 fetch_options: FetchOptions,
5316 askpass: AskPassDelegate,
5317 _cx: &mut App,
5318 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5319 let askpass_delegates = self.askpass_delegates.clone();
5320 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5321 let id = self.id;
5322
5323 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5324 match git_repo {
5325 RepositoryState::Local(LocalRepositoryState {
5326 backend,
5327 environment,
5328 ..
5329 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5330 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5331 askpass_delegates.lock().insert(askpass_id, askpass);
5332 let _defer = util::defer(|| {
5333 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5334 debug_assert!(askpass_delegate.is_some());
5335 });
5336
5337 let response = client
5338 .request(proto::Fetch {
5339 project_id: project_id.0,
5340 repository_id: id.to_proto(),
5341 askpass_id,
5342 remote: fetch_options.to_proto(),
5343 })
5344 .await?;
5345
5346 Ok(RemoteCommandOutput {
5347 stdout: response.stdout,
5348 stderr: response.stderr,
5349 })
5350 }
5351 }
5352 })
5353 }
5354
5355 pub fn push(
5356 &mut self,
5357 branch: SharedString,
5358 remote_branch: SharedString,
5359 remote: SharedString,
5360 options: Option<PushOptions>,
5361 askpass: AskPassDelegate,
5362 cx: &mut Context<Self>,
5363 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5364 let askpass_delegates = self.askpass_delegates.clone();
5365 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5366 let id = self.id;
5367
5368 let args = options
5369 .map(|option| match option {
5370 PushOptions::SetUpstream => " --set-upstream",
5371 PushOptions::Force => " --force-with-lease",
5372 })
5373 .unwrap_or("");
5374
5375 let updates_tx = self
5376 .git_store()
5377 .and_then(|git_store| match &git_store.read(cx).state {
5378 GitStoreState::Local { downstream, .. } => downstream
5379 .as_ref()
5380 .map(|downstream| downstream.updates_tx.clone()),
5381 _ => None,
5382 });
5383
5384 let this = cx.weak_entity();
5385 self.send_job(
5386 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5387 move |git_repo, mut cx| async move {
5388 match git_repo {
5389 RepositoryState::Local(LocalRepositoryState {
5390 backend,
5391 environment,
5392 ..
5393 }) => {
5394 let result = backend
5395 .push(
5396 branch.to_string(),
5397 remote_branch.to_string(),
5398 remote.to_string(),
5399 options,
5400 askpass,
5401 environment.clone(),
5402 cx.clone(),
5403 )
5404 .await;
5405 // TODO would be nice to not have to do this manually
5406 if result.is_ok() {
5407 let branches = backend.branches().await?;
5408 let branch = branches.into_iter().find(|branch| branch.is_head);
5409 log::info!("head branch after scan is {branch:?}");
5410 let snapshot = this.update(&mut cx, |this, cx| {
5411 this.snapshot.branch = branch;
5412 cx.emit(RepositoryEvent::BranchChanged);
5413 this.snapshot.clone()
5414 })?;
5415 if let Some(updates_tx) = updates_tx {
5416 updates_tx
5417 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5418 .ok();
5419 }
5420 }
5421 result
5422 }
5423 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5424 askpass_delegates.lock().insert(askpass_id, askpass);
5425 let _defer = util::defer(|| {
5426 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5427 debug_assert!(askpass_delegate.is_some());
5428 });
5429 let response = client
5430 .request(proto::Push {
5431 project_id: project_id.0,
5432 repository_id: id.to_proto(),
5433 askpass_id,
5434 branch_name: branch.to_string(),
5435 remote_branch_name: remote_branch.to_string(),
5436 remote_name: remote.to_string(),
5437 options: options.map(|options| match options {
5438 PushOptions::Force => proto::push::PushOptions::Force,
5439 PushOptions::SetUpstream => {
5440 proto::push::PushOptions::SetUpstream
5441 }
5442 }
5443 as i32),
5444 })
5445 .await?;
5446
5447 Ok(RemoteCommandOutput {
5448 stdout: response.stdout,
5449 stderr: response.stderr,
5450 })
5451 }
5452 }
5453 },
5454 )
5455 }
5456
5457 pub fn pull(
5458 &mut self,
5459 branch: Option<SharedString>,
5460 remote: SharedString,
5461 rebase: bool,
5462 askpass: AskPassDelegate,
5463 _cx: &mut App,
5464 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5465 let askpass_delegates = self.askpass_delegates.clone();
5466 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5467 let id = self.id;
5468
5469 let mut status = "git pull".to_string();
5470 if rebase {
5471 status.push_str(" --rebase");
5472 }
5473 status.push_str(&format!(" {}", remote));
5474 if let Some(b) = &branch {
5475 status.push_str(&format!(" {}", b));
5476 }
5477
5478 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5479 match git_repo {
5480 RepositoryState::Local(LocalRepositoryState {
5481 backend,
5482 environment,
5483 ..
5484 }) => {
5485 backend
5486 .pull(
5487 branch.as_ref().map(|b| b.to_string()),
5488 remote.to_string(),
5489 rebase,
5490 askpass,
5491 environment.clone(),
5492 cx,
5493 )
5494 .await
5495 }
5496 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5497 askpass_delegates.lock().insert(askpass_id, askpass);
5498 let _defer = util::defer(|| {
5499 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5500 debug_assert!(askpass_delegate.is_some());
5501 });
5502 let response = client
5503 .request(proto::Pull {
5504 project_id: project_id.0,
5505 repository_id: id.to_proto(),
5506 askpass_id,
5507 rebase,
5508 branch_name: branch.as_ref().map(|b| b.to_string()),
5509 remote_name: remote.to_string(),
5510 })
5511 .await?;
5512
5513 Ok(RemoteCommandOutput {
5514 stdout: response.stdout,
5515 stderr: response.stderr,
5516 })
5517 }
5518 }
5519 })
5520 }
5521
5522 fn spawn_set_index_text_job(
5523 &mut self,
5524 path: RepoPath,
5525 content: Option<String>,
5526 hunk_staging_operation_count: Option<usize>,
5527 cx: &mut Context<Self>,
5528 ) -> oneshot::Receiver<anyhow::Result<()>> {
5529 let id = self.id;
5530 let this = cx.weak_entity();
5531 let git_store = self.git_store.clone();
5532 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5533 self.send_keyed_job(
5534 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5535 None,
5536 move |git_repo, mut cx| async move {
5537 log::debug!(
5538 "start updating index text for buffer {}",
5539 path.as_unix_str()
5540 );
5541
5542 match git_repo {
5543 RepositoryState::Local(LocalRepositoryState {
5544 fs,
5545 backend,
5546 environment,
5547 ..
5548 }) => {
5549 let executable = match fs.metadata(&abs_path).await {
5550 Ok(Some(meta)) => meta.is_executable,
5551 Ok(None) => false,
5552 Err(_err) => false,
5553 };
5554 backend
5555 .set_index_text(path.clone(), content, environment.clone(), executable)
5556 .await?;
5557 }
5558 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5559 client
5560 .request(proto::SetIndexText {
5561 project_id: project_id.0,
5562 repository_id: id.to_proto(),
5563 path: path.to_proto(),
5564 text: content,
5565 })
5566 .await?;
5567 }
5568 }
5569 log::debug!(
5570 "finish updating index text for buffer {}",
5571 path.as_unix_str()
5572 );
5573
5574 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5575 let project_path = this
5576 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5577 .ok()
5578 .flatten();
5579 git_store
5580 .update(&mut cx, |git_store, cx| {
5581 let buffer_id = git_store
5582 .buffer_store
5583 .read(cx)
5584 .get_by_path(&project_path?)?
5585 .read(cx)
5586 .remote_id();
5587 let diff_state = git_store.diffs.get(&buffer_id)?;
5588 diff_state.update(cx, |diff_state, _| {
5589 diff_state.hunk_staging_operation_count_as_of_write =
5590 hunk_staging_operation_count;
5591 });
5592 Some(())
5593 })
5594 .context("Git store dropped")?;
5595 }
5596 Ok(())
5597 },
5598 )
5599 }
5600
5601 pub fn create_remote(
5602 &mut self,
5603 remote_name: String,
5604 remote_url: String,
5605 ) -> oneshot::Receiver<Result<()>> {
5606 let id = self.id;
5607 self.send_job(
5608 Some(format!("git remote add {remote_name} {remote_url}").into()),
5609 move |repo, _cx| async move {
5610 match repo {
5611 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5612 backend.create_remote(remote_name, remote_url).await
5613 }
5614 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5615 client
5616 .request(proto::GitCreateRemote {
5617 project_id: project_id.0,
5618 repository_id: id.to_proto(),
5619 remote_name,
5620 remote_url,
5621 })
5622 .await?;
5623
5624 Ok(())
5625 }
5626 }
5627 },
5628 )
5629 }
5630
5631 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5632 let id = self.id;
5633 self.send_job(
5634 Some(format!("git remove remote {remote_name}").into()),
5635 move |repo, _cx| async move {
5636 match repo {
5637 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5638 backend.remove_remote(remote_name).await
5639 }
5640 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5641 client
5642 .request(proto::GitRemoveRemote {
5643 project_id: project_id.0,
5644 repository_id: id.to_proto(),
5645 remote_name,
5646 })
5647 .await?;
5648
5649 Ok(())
5650 }
5651 }
5652 },
5653 )
5654 }
5655
5656 pub fn get_remotes(
5657 &mut self,
5658 branch_name: Option<String>,
5659 is_push: bool,
5660 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5661 let id = self.id;
5662 self.send_job(None, move |repo, _cx| async move {
5663 match repo {
5664 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5665 let remote = if let Some(branch_name) = branch_name {
5666 if is_push {
5667 backend.get_push_remote(branch_name).await?
5668 } else {
5669 backend.get_branch_remote(branch_name).await?
5670 }
5671 } else {
5672 None
5673 };
5674
5675 match remote {
5676 Some(remote) => Ok(vec![remote]),
5677 None => backend.get_all_remotes().await,
5678 }
5679 }
5680 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5681 let response = client
5682 .request(proto::GetRemotes {
5683 project_id: project_id.0,
5684 repository_id: id.to_proto(),
5685 branch_name,
5686 is_push,
5687 })
5688 .await?;
5689
5690 let remotes = response
5691 .remotes
5692 .into_iter()
5693 .map(|remotes| Remote {
5694 name: remotes.name.into(),
5695 })
5696 .collect();
5697
5698 Ok(remotes)
5699 }
5700 }
5701 })
5702 }
5703
5704 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5705 let id = self.id;
5706 self.send_job(None, move |repo, _| async move {
5707 match repo {
5708 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5709 backend.branches().await
5710 }
5711 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5712 let response = client
5713 .request(proto::GitGetBranches {
5714 project_id: project_id.0,
5715 repository_id: id.to_proto(),
5716 })
5717 .await?;
5718
5719 let branches = response
5720 .branches
5721 .into_iter()
5722 .map(|branch| proto_to_branch(&branch))
5723 .collect();
5724
5725 Ok(branches)
5726 }
5727 }
5728 })
5729 }
5730
5731 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5732 let id = self.id;
5733 self.send_job(None, move |repo, _| async move {
5734 match repo {
5735 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5736 backend.worktrees().await
5737 }
5738 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5739 let response = client
5740 .request(proto::GitGetWorktrees {
5741 project_id: project_id.0,
5742 repository_id: id.to_proto(),
5743 })
5744 .await?;
5745
5746 let worktrees = response
5747 .worktrees
5748 .into_iter()
5749 .map(|worktree| proto_to_worktree(&worktree))
5750 .collect();
5751
5752 Ok(worktrees)
5753 }
5754 }
5755 })
5756 }
5757
5758 pub fn create_worktree(
5759 &mut self,
5760 name: String,
5761 directory: PathBuf,
5762 commit: Option<String>,
5763 ) -> oneshot::Receiver<Result<()>> {
5764 let id = self.id;
5765 self.send_job(
5766 Some("git worktree add".into()),
5767 move |repo, _cx| async move {
5768 match repo {
5769 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5770 backend.create_worktree(name, directory, commit).await
5771 }
5772 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5773 client
5774 .request(proto::GitCreateWorktree {
5775 project_id: project_id.0,
5776 repository_id: id.to_proto(),
5777 name,
5778 directory: directory.to_string_lossy().to_string(),
5779 commit,
5780 })
5781 .await?;
5782
5783 Ok(())
5784 }
5785 }
5786 },
5787 )
5788 }
5789
5790 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5791 let id = self.id;
5792 self.send_job(
5793 Some(format!("git worktree remove: {}", path.display()).into()),
5794 move |repo, _cx| async move {
5795 match repo {
5796 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5797 backend.remove_worktree(path, force).await
5798 }
5799 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5800 client
5801 .request(proto::GitRemoveWorktree {
5802 project_id: project_id.0,
5803 repository_id: id.to_proto(),
5804 path: path.to_string_lossy().to_string(),
5805 force,
5806 })
5807 .await?;
5808
5809 Ok(())
5810 }
5811 }
5812 },
5813 )
5814 }
5815
5816 pub fn rename_worktree(
5817 &mut self,
5818 old_path: PathBuf,
5819 new_path: PathBuf,
5820 ) -> oneshot::Receiver<Result<()>> {
5821 let id = self.id;
5822 self.send_job(
5823 Some(format!("git worktree move: {}", old_path.display()).into()),
5824 move |repo, _cx| async move {
5825 match repo {
5826 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5827 backend.rename_worktree(old_path, new_path).await
5828 }
5829 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5830 client
5831 .request(proto::GitRenameWorktree {
5832 project_id: project_id.0,
5833 repository_id: id.to_proto(),
5834 old_path: old_path.to_string_lossy().to_string(),
5835 new_path: new_path.to_string_lossy().to_string(),
5836 })
5837 .await?;
5838
5839 Ok(())
5840 }
5841 }
5842 },
5843 )
5844 }
5845
5846 pub fn default_branch(
5847 &mut self,
5848 include_remote_name: bool,
5849 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5850 let id = self.id;
5851 self.send_job(None, move |repo, _| async move {
5852 match repo {
5853 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5854 backend.default_branch(include_remote_name).await
5855 }
5856 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5857 let response = client
5858 .request(proto::GetDefaultBranch {
5859 project_id: project_id.0,
5860 repository_id: id.to_proto(),
5861 })
5862 .await?;
5863
5864 anyhow::Ok(response.branch.map(SharedString::from))
5865 }
5866 }
5867 })
5868 }
5869
5870 pub fn diff_tree(
5871 &mut self,
5872 diff_type: DiffTreeType,
5873 _cx: &App,
5874 ) -> oneshot::Receiver<Result<TreeDiff>> {
5875 let repository_id = self.snapshot.id;
5876 self.send_job(None, move |repo, _cx| async move {
5877 match repo {
5878 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5879 backend.diff_tree(diff_type).await
5880 }
5881 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5882 let response = client
5883 .request(proto::GetTreeDiff {
5884 project_id: project_id.0,
5885 repository_id: repository_id.0,
5886 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5887 base: diff_type.base().to_string(),
5888 head: diff_type.head().to_string(),
5889 })
5890 .await?;
5891
5892 let entries = response
5893 .entries
5894 .into_iter()
5895 .filter_map(|entry| {
5896 let status = match entry.status() {
5897 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5898 proto::tree_diff_status::Status::Modified => {
5899 TreeDiffStatus::Modified {
5900 old: git::Oid::from_str(
5901 &entry.oid.context("missing oid").log_err()?,
5902 )
5903 .log_err()?,
5904 }
5905 }
5906 proto::tree_diff_status::Status::Deleted => {
5907 TreeDiffStatus::Deleted {
5908 old: git::Oid::from_str(
5909 &entry.oid.context("missing oid").log_err()?,
5910 )
5911 .log_err()?,
5912 }
5913 }
5914 };
5915 Some((
5916 RepoPath::from_rel_path(
5917 &RelPath::from_proto(&entry.path).log_err()?,
5918 ),
5919 status,
5920 ))
5921 })
5922 .collect();
5923
5924 Ok(TreeDiff { entries })
5925 }
5926 }
5927 })
5928 }
5929
5930 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5931 let id = self.id;
5932 self.send_job(None, move |repo, _cx| async move {
5933 match repo {
5934 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5935 backend.diff(diff_type).await
5936 }
5937 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5938 let (proto_diff_type, merge_base_ref) = match &diff_type {
5939 DiffType::HeadToIndex => {
5940 (proto::git_diff::DiffType::HeadToIndex.into(), None)
5941 }
5942 DiffType::HeadToWorktree => {
5943 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
5944 }
5945 DiffType::MergeBase { base_ref } => (
5946 proto::git_diff::DiffType::MergeBase.into(),
5947 Some(base_ref.to_string()),
5948 ),
5949 };
5950 let response = client
5951 .request(proto::GitDiff {
5952 project_id: project_id.0,
5953 repository_id: id.to_proto(),
5954 diff_type: proto_diff_type,
5955 merge_base_ref,
5956 })
5957 .await?;
5958
5959 Ok(response.diff)
5960 }
5961 }
5962 })
5963 }
5964
5965 pub fn create_branch(
5966 &mut self,
5967 branch_name: String,
5968 base_branch: Option<String>,
5969 ) -> oneshot::Receiver<Result<()>> {
5970 let id = self.id;
5971 let status_msg = if let Some(ref base) = base_branch {
5972 format!("git switch -c {branch_name} {base}").into()
5973 } else {
5974 format!("git switch -c {branch_name}").into()
5975 };
5976 self.send_job(Some(status_msg), move |repo, _cx| async move {
5977 match repo {
5978 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5979 backend.create_branch(branch_name, base_branch).await
5980 }
5981 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5982 client
5983 .request(proto::GitCreateBranch {
5984 project_id: project_id.0,
5985 repository_id: id.to_proto(),
5986 branch_name,
5987 })
5988 .await?;
5989
5990 Ok(())
5991 }
5992 }
5993 })
5994 }
5995
5996 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5997 let id = self.id;
5998 self.send_job(
5999 Some(format!("git switch {branch_name}").into()),
6000 move |repo, _cx| async move {
6001 match repo {
6002 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6003 backend.change_branch(branch_name).await
6004 }
6005 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6006 client
6007 .request(proto::GitChangeBranch {
6008 project_id: project_id.0,
6009 repository_id: id.to_proto(),
6010 branch_name,
6011 })
6012 .await?;
6013
6014 Ok(())
6015 }
6016 }
6017 },
6018 )
6019 }
6020
6021 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6022 let id = self.id;
6023 self.send_job(
6024 Some(format!("git branch -d {branch_name}").into()),
6025 move |repo, _cx| async move {
6026 match repo {
6027 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
6028 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6029 client
6030 .request(proto::GitDeleteBranch {
6031 project_id: project_id.0,
6032 repository_id: id.to_proto(),
6033 branch_name,
6034 })
6035 .await?;
6036
6037 Ok(())
6038 }
6039 }
6040 },
6041 )
6042 }
6043
6044 pub fn rename_branch(
6045 &mut self,
6046 branch: String,
6047 new_name: String,
6048 ) -> oneshot::Receiver<Result<()>> {
6049 let id = self.id;
6050 self.send_job(
6051 Some(format!("git branch -m {branch} {new_name}").into()),
6052 move |repo, _cx| async move {
6053 match repo {
6054 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6055 backend.rename_branch(branch, new_name).await
6056 }
6057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6058 client
6059 .request(proto::GitRenameBranch {
6060 project_id: project_id.0,
6061 repository_id: id.to_proto(),
6062 branch,
6063 new_name,
6064 })
6065 .await?;
6066
6067 Ok(())
6068 }
6069 }
6070 },
6071 )
6072 }
6073
6074 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6075 let id = self.id;
6076 self.send_job(None, move |repo, _cx| async move {
6077 match repo {
6078 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6079 backend.check_for_pushed_commit().await
6080 }
6081 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6082 let response = client
6083 .request(proto::CheckForPushedCommits {
6084 project_id: project_id.0,
6085 repository_id: id.to_proto(),
6086 })
6087 .await?;
6088
6089 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6090
6091 Ok(branches)
6092 }
6093 }
6094 })
6095 }
6096
6097 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6098 self.send_job(None, |repo, _cx| async move {
6099 match repo {
6100 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6101 backend.checkpoint().await
6102 }
6103 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6104 }
6105 })
6106 }
6107
6108 pub fn restore_checkpoint(
6109 &mut self,
6110 checkpoint: GitRepositoryCheckpoint,
6111 ) -> oneshot::Receiver<Result<()>> {
6112 self.send_job(None, move |repo, _cx| async move {
6113 match repo {
6114 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6115 backend.restore_checkpoint(checkpoint).await
6116 }
6117 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6118 }
6119 })
6120 }
6121
6122 pub(crate) fn apply_remote_update(
6123 &mut self,
6124 update: proto::UpdateRepository,
6125 cx: &mut Context<Self>,
6126 ) -> Result<()> {
6127 if let Some(main_path) = &update.original_repo_abs_path {
6128 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6129 }
6130
6131 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6132 let new_head_commit = update
6133 .head_commit_details
6134 .as_ref()
6135 .map(proto_to_commit_details);
6136 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6137 cx.emit(RepositoryEvent::BranchChanged)
6138 }
6139 self.snapshot.branch = new_branch;
6140 self.snapshot.head_commit = new_head_commit;
6141
6142 // We don't store any merge head state for downstream projects; the upstream
6143 // will track it and we will just get the updated conflicts
6144 let new_merge_heads = TreeMap::from_ordered_entries(
6145 update
6146 .current_merge_conflicts
6147 .into_iter()
6148 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6149 );
6150 let conflicts_changed =
6151 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6152 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6153 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6154 let new_stash_entries = GitStash {
6155 entries: update
6156 .stash_entries
6157 .iter()
6158 .filter_map(|entry| proto_to_stash(entry).ok())
6159 .collect(),
6160 };
6161 if self.snapshot.stash_entries != new_stash_entries {
6162 cx.emit(RepositoryEvent::StashEntriesChanged)
6163 }
6164 self.snapshot.stash_entries = new_stash_entries;
6165 let new_linked_worktrees: Arc<[GitWorktree]> = update
6166 .linked_worktrees
6167 .iter()
6168 .map(proto_to_worktree)
6169 .collect();
6170 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6171 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6172 }
6173 self.snapshot.linked_worktrees = new_linked_worktrees;
6174 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6175 self.snapshot.remote_origin_url = update.remote_origin_url;
6176
6177 let edits = update
6178 .removed_statuses
6179 .into_iter()
6180 .filter_map(|path| {
6181 Some(sum_tree::Edit::Remove(PathKey(
6182 RelPath::from_proto(&path).log_err()?,
6183 )))
6184 })
6185 .chain(
6186 update
6187 .updated_statuses
6188 .into_iter()
6189 .filter_map(|updated_status| {
6190 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6191 }),
6192 )
6193 .collect::<Vec<_>>();
6194 if conflicts_changed || !edits.is_empty() {
6195 cx.emit(RepositoryEvent::StatusesChanged);
6196 }
6197 self.snapshot.statuses_by_path.edit(edits, ());
6198
6199 if update.is_last_update {
6200 self.snapshot.scan_id = update.scan_id;
6201 }
6202 self.clear_pending_ops(cx);
6203 Ok(())
6204 }
6205
6206 pub fn compare_checkpoints(
6207 &mut self,
6208 left: GitRepositoryCheckpoint,
6209 right: GitRepositoryCheckpoint,
6210 ) -> oneshot::Receiver<Result<bool>> {
6211 self.send_job(None, move |repo, _cx| async move {
6212 match repo {
6213 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6214 backend.compare_checkpoints(left, right).await
6215 }
6216 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6217 }
6218 })
6219 }
6220
6221 pub fn diff_checkpoints(
6222 &mut self,
6223 base_checkpoint: GitRepositoryCheckpoint,
6224 target_checkpoint: GitRepositoryCheckpoint,
6225 ) -> oneshot::Receiver<Result<String>> {
6226 self.send_job(None, move |repo, _cx| async move {
6227 match repo {
6228 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6229 backend
6230 .diff_checkpoints(base_checkpoint, target_checkpoint)
6231 .await
6232 }
6233 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6234 }
6235 })
6236 }
6237
6238 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6239 let updated = SumTree::from_iter(
6240 self.pending_ops.iter().filter_map(|ops| {
6241 let inner_ops: Vec<PendingOp> =
6242 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6243 if inner_ops.is_empty() {
6244 None
6245 } else {
6246 Some(PendingOps {
6247 repo_path: ops.repo_path.clone(),
6248 ops: inner_ops,
6249 })
6250 }
6251 }),
6252 (),
6253 );
6254
6255 if updated != self.pending_ops {
6256 cx.emit(RepositoryEvent::PendingOpsChanged {
6257 pending_ops: self.pending_ops.clone(),
6258 })
6259 }
6260
6261 self.pending_ops = updated;
6262 }
6263
6264 fn schedule_scan(
6265 &mut self,
6266 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6267 cx: &mut Context<Self>,
6268 ) {
6269 let this = cx.weak_entity();
6270 let _ = self.send_keyed_job(
6271 Some(GitJobKey::ReloadGitState),
6272 None,
6273 |state, mut cx| async move {
6274 log::debug!("run scheduled git status scan");
6275
6276 let Some(this) = this.upgrade() else {
6277 return Ok(());
6278 };
6279 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6280 bail!("not a local repository")
6281 };
6282 let compute_snapshot = this.update(&mut cx, |this, _| {
6283 this.paths_needing_status_update.clear();
6284 compute_snapshot(
6285 this.id,
6286 this.work_directory_abs_path.clone(),
6287 this.snapshot.clone(),
6288 backend.clone(),
6289 )
6290 });
6291 let (snapshot, events) = cx.background_spawn(compute_snapshot).await?;
6292 this.update(&mut cx, |this, cx| {
6293 this.snapshot = snapshot.clone();
6294 this.clear_pending_ops(cx);
6295 for event in events {
6296 cx.emit(event);
6297 }
6298 });
6299 if let Some(updates_tx) = updates_tx {
6300 updates_tx
6301 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6302 .ok();
6303 }
6304 Ok(())
6305 },
6306 );
6307 }
6308
6309 fn spawn_local_git_worker(
6310 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6311 cx: &mut Context<Self>,
6312 ) -> mpsc::UnboundedSender<GitJob> {
6313 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6314
6315 cx.spawn(async move |_, cx| {
6316 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6317 if let Some(git_hosting_provider_registry) =
6318 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6319 {
6320 git_hosting_providers::register_additional_providers(
6321 git_hosting_provider_registry,
6322 state.backend.clone(),
6323 )
6324 .await;
6325 }
6326 let state = RepositoryState::Local(state);
6327 let mut jobs = VecDeque::new();
6328 loop {
6329 while let Ok(Some(next_job)) = job_rx.try_next() {
6330 jobs.push_back(next_job);
6331 }
6332
6333 if let Some(job) = jobs.pop_front() {
6334 if let Some(current_key) = &job.key
6335 && jobs
6336 .iter()
6337 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6338 {
6339 continue;
6340 }
6341 (job.job)(state.clone(), cx).await;
6342 } else if let Some(job) = job_rx.next().await {
6343 jobs.push_back(job);
6344 } else {
6345 break;
6346 }
6347 }
6348 anyhow::Ok(())
6349 })
6350 .detach_and_log_err(cx);
6351
6352 job_tx
6353 }
6354
6355 fn spawn_remote_git_worker(
6356 state: RemoteRepositoryState,
6357 cx: &mut Context<Self>,
6358 ) -> mpsc::UnboundedSender<GitJob> {
6359 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6360
6361 cx.spawn(async move |_, cx| {
6362 let state = RepositoryState::Remote(state);
6363 let mut jobs = VecDeque::new();
6364 loop {
6365 while let Ok(Some(next_job)) = job_rx.try_next() {
6366 jobs.push_back(next_job);
6367 }
6368
6369 if let Some(job) = jobs.pop_front() {
6370 if let Some(current_key) = &job.key
6371 && jobs
6372 .iter()
6373 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6374 {
6375 continue;
6376 }
6377 (job.job)(state.clone(), cx).await;
6378 } else if let Some(job) = job_rx.next().await {
6379 jobs.push_back(job);
6380 } else {
6381 break;
6382 }
6383 }
6384 anyhow::Ok(())
6385 })
6386 .detach_and_log_err(cx);
6387
6388 job_tx
6389 }
6390
6391 fn load_staged_text(
6392 &mut self,
6393 buffer_id: BufferId,
6394 repo_path: RepoPath,
6395 cx: &App,
6396 ) -> Task<Result<Option<String>>> {
6397 let rx = self.send_job(None, move |state, _| async move {
6398 match state {
6399 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6400 anyhow::Ok(backend.load_index_text(repo_path).await)
6401 }
6402 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6403 let response = client
6404 .request(proto::OpenUnstagedDiff {
6405 project_id: project_id.to_proto(),
6406 buffer_id: buffer_id.to_proto(),
6407 })
6408 .await?;
6409 Ok(response.staged_text)
6410 }
6411 }
6412 });
6413 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6414 }
6415
6416 fn load_committed_text(
6417 &mut self,
6418 buffer_id: BufferId,
6419 repo_path: RepoPath,
6420 cx: &App,
6421 ) -> Task<Result<DiffBasesChange>> {
6422 let rx = self.send_job(None, move |state, _| async move {
6423 match state {
6424 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6425 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6426 let staged_text = backend.load_index_text(repo_path).await;
6427 let diff_bases_change = if committed_text == staged_text {
6428 DiffBasesChange::SetBoth(committed_text)
6429 } else {
6430 DiffBasesChange::SetEach {
6431 index: staged_text,
6432 head: committed_text,
6433 }
6434 };
6435 anyhow::Ok(diff_bases_change)
6436 }
6437 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6438 use proto::open_uncommitted_diff_response::Mode;
6439
6440 let response = client
6441 .request(proto::OpenUncommittedDiff {
6442 project_id: project_id.to_proto(),
6443 buffer_id: buffer_id.to_proto(),
6444 })
6445 .await?;
6446 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6447 let bases = match mode {
6448 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6449 Mode::IndexAndHead => DiffBasesChange::SetEach {
6450 head: response.committed_text,
6451 index: response.staged_text,
6452 },
6453 };
6454 Ok(bases)
6455 }
6456 }
6457 });
6458
6459 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6460 }
6461
6462 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6463 let repository_id = self.snapshot.id;
6464 let rx = self.send_job(None, move |state, _| async move {
6465 match state {
6466 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6467 backend.load_blob_content(oid).await
6468 }
6469 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6470 let response = client
6471 .request(proto::GetBlobContent {
6472 project_id: project_id.to_proto(),
6473 repository_id: repository_id.0,
6474 oid: oid.to_string(),
6475 })
6476 .await?;
6477 Ok(response.content)
6478 }
6479 }
6480 });
6481 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6482 }
6483
6484 fn paths_changed(
6485 &mut self,
6486 paths: Vec<RepoPath>,
6487 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6488 cx: &mut Context<Self>,
6489 ) {
6490 if !paths.is_empty() {
6491 self.paths_needing_status_update.push(paths);
6492 }
6493
6494 let this = cx.weak_entity();
6495 let _ = self.send_keyed_job(
6496 Some(GitJobKey::RefreshStatuses),
6497 None,
6498 |state, mut cx| async move {
6499 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6500 (
6501 this.snapshot.clone(),
6502 mem::take(&mut this.paths_needing_status_update),
6503 )
6504 })?;
6505 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6506 bail!("not a local repository")
6507 };
6508
6509 if changed_paths.is_empty() {
6510 return Ok(());
6511 }
6512
6513 let has_head = prev_snapshot.head_commit.is_some();
6514
6515 let stash_entries = backend.stash_entries().await?;
6516 let changed_path_statuses = cx
6517 .background_spawn(async move {
6518 let mut changed_paths =
6519 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6520 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6521
6522 let status_task = backend.status(&changed_paths_vec);
6523 let diff_stat_future = if has_head {
6524 backend.diff_stat(&changed_paths_vec)
6525 } else {
6526 future::ready(Ok(status::GitDiffStat {
6527 entries: Arc::default(),
6528 }))
6529 .boxed()
6530 };
6531
6532 let (statuses, diff_stats) =
6533 futures::future::try_join(status_task, diff_stat_future).await?;
6534
6535 let diff_stats: HashMap<RepoPath, DiffStat> =
6536 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6537
6538 let mut changed_path_statuses = Vec::new();
6539 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6540 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6541
6542 for (repo_path, status) in &*statuses.entries {
6543 let current_diff_stat = diff_stats.get(repo_path).copied();
6544
6545 changed_paths.remove(repo_path);
6546 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6547 && cursor.item().is_some_and(|entry| {
6548 entry.status == *status && entry.diff_stat == current_diff_stat
6549 })
6550 {
6551 continue;
6552 }
6553
6554 changed_path_statuses.push(Edit::Insert(StatusEntry {
6555 repo_path: repo_path.clone(),
6556 status: *status,
6557 diff_stat: current_diff_stat,
6558 }));
6559 }
6560 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6561 for path in changed_paths.into_iter() {
6562 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6563 changed_path_statuses
6564 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6565 }
6566 }
6567 anyhow::Ok(changed_path_statuses)
6568 })
6569 .await?;
6570
6571 this.update(&mut cx, |this, cx| {
6572 if this.snapshot.stash_entries != stash_entries {
6573 cx.emit(RepositoryEvent::StashEntriesChanged);
6574 this.snapshot.stash_entries = stash_entries;
6575 }
6576
6577 if !changed_path_statuses.is_empty() {
6578 cx.emit(RepositoryEvent::StatusesChanged);
6579 this.snapshot
6580 .statuses_by_path
6581 .edit(changed_path_statuses, ());
6582 this.snapshot.scan_id += 1;
6583 }
6584
6585 if let Some(updates_tx) = updates_tx {
6586 updates_tx
6587 .unbounded_send(DownstreamUpdate::UpdateRepository(
6588 this.snapshot.clone(),
6589 ))
6590 .ok();
6591 }
6592 })
6593 },
6594 );
6595 }
6596
6597 /// currently running git command and when it started
6598 pub fn current_job(&self) -> Option<JobInfo> {
6599 self.active_jobs.values().next().cloned()
6600 }
6601
6602 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6603 self.send_job(None, |_, _| async {})
6604 }
6605
6606 fn spawn_job_with_tracking<AsyncFn>(
6607 &mut self,
6608 paths: Vec<RepoPath>,
6609 git_status: pending_op::GitStatus,
6610 cx: &mut Context<Self>,
6611 f: AsyncFn,
6612 ) -> Task<Result<()>>
6613 where
6614 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6615 {
6616 let ids = self.new_pending_ops_for_paths(paths, git_status);
6617
6618 cx.spawn(async move |this, cx| {
6619 let (job_status, result) = match f(this.clone(), cx).await {
6620 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6621 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6622 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6623 };
6624
6625 this.update(cx, |this, _| {
6626 let mut edits = Vec::with_capacity(ids.len());
6627 for (id, entry) in ids {
6628 if let Some(mut ops) = this
6629 .pending_ops
6630 .get(&PathKey(entry.as_ref().clone()), ())
6631 .cloned()
6632 {
6633 if let Some(op) = ops.op_by_id_mut(id) {
6634 op.job_status = job_status;
6635 }
6636 edits.push(sum_tree::Edit::Insert(ops));
6637 }
6638 }
6639 this.pending_ops.edit(edits, ());
6640 })?;
6641
6642 result
6643 })
6644 }
6645
6646 fn new_pending_ops_for_paths(
6647 &mut self,
6648 paths: Vec<RepoPath>,
6649 git_status: pending_op::GitStatus,
6650 ) -> Vec<(PendingOpId, RepoPath)> {
6651 let mut edits = Vec::with_capacity(paths.len());
6652 let mut ids = Vec::with_capacity(paths.len());
6653 for path in paths {
6654 let mut ops = self
6655 .pending_ops
6656 .get(&PathKey(path.as_ref().clone()), ())
6657 .cloned()
6658 .unwrap_or_else(|| PendingOps::new(&path));
6659 let id = ops.max_id() + 1;
6660 ops.ops.push(PendingOp {
6661 id,
6662 git_status,
6663 job_status: pending_op::JobStatus::Running,
6664 });
6665 edits.push(sum_tree::Edit::Insert(ops));
6666 ids.push((id, path));
6667 }
6668 self.pending_ops.edit(edits, ());
6669 ids
6670 }
6671 pub fn default_remote_url(&self) -> Option<String> {
6672 self.remote_upstream_url
6673 .clone()
6674 .or(self.remote_origin_url.clone())
6675 }
6676}
6677
6678fn get_permalink_in_rust_registry_src(
6679 provider_registry: Arc<GitHostingProviderRegistry>,
6680 path: PathBuf,
6681 selection: Range<u32>,
6682) -> Result<url::Url> {
6683 #[derive(Deserialize)]
6684 struct CargoVcsGit {
6685 sha1: String,
6686 }
6687
6688 #[derive(Deserialize)]
6689 struct CargoVcsInfo {
6690 git: CargoVcsGit,
6691 path_in_vcs: String,
6692 }
6693
6694 #[derive(Deserialize)]
6695 struct CargoPackage {
6696 repository: String,
6697 }
6698
6699 #[derive(Deserialize)]
6700 struct CargoToml {
6701 package: CargoPackage,
6702 }
6703
6704 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6705 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6706 Some((dir, json))
6707 }) else {
6708 bail!("No .cargo_vcs_info.json found in parent directories")
6709 };
6710 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6711 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6712 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6713 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6714 .context("parsing package.repository field of manifest")?;
6715 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6716 let permalink = provider.build_permalink(
6717 remote,
6718 BuildPermalinkParams::new(
6719 &cargo_vcs_info.git.sha1,
6720 &RepoPath::from_rel_path(
6721 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6722 ),
6723 Some(selection),
6724 ),
6725 );
6726 Ok(permalink)
6727}
6728
6729fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6730 let Some(blame) = blame else {
6731 return proto::BlameBufferResponse {
6732 blame_response: None,
6733 };
6734 };
6735
6736 let entries = blame
6737 .entries
6738 .into_iter()
6739 .map(|entry| proto::BlameEntry {
6740 sha: entry.sha.as_bytes().into(),
6741 start_line: entry.range.start,
6742 end_line: entry.range.end,
6743 original_line_number: entry.original_line_number,
6744 author: entry.author,
6745 author_mail: entry.author_mail,
6746 author_time: entry.author_time,
6747 author_tz: entry.author_tz,
6748 committer: entry.committer_name,
6749 committer_mail: entry.committer_email,
6750 committer_time: entry.committer_time,
6751 committer_tz: entry.committer_tz,
6752 summary: entry.summary,
6753 previous: entry.previous,
6754 filename: entry.filename,
6755 })
6756 .collect::<Vec<_>>();
6757
6758 let messages = blame
6759 .messages
6760 .into_iter()
6761 .map(|(oid, message)| proto::CommitMessage {
6762 oid: oid.as_bytes().into(),
6763 message,
6764 })
6765 .collect::<Vec<_>>();
6766
6767 proto::BlameBufferResponse {
6768 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6769 }
6770}
6771
6772fn deserialize_blame_buffer_response(
6773 response: proto::BlameBufferResponse,
6774) -> Option<git::blame::Blame> {
6775 let response = response.blame_response?;
6776 let entries = response
6777 .entries
6778 .into_iter()
6779 .filter_map(|entry| {
6780 Some(git::blame::BlameEntry {
6781 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6782 range: entry.start_line..entry.end_line,
6783 original_line_number: entry.original_line_number,
6784 committer_name: entry.committer,
6785 committer_time: entry.committer_time,
6786 committer_tz: entry.committer_tz,
6787 committer_email: entry.committer_mail,
6788 author: entry.author,
6789 author_mail: entry.author_mail,
6790 author_time: entry.author_time,
6791 author_tz: entry.author_tz,
6792 summary: entry.summary,
6793 previous: entry.previous,
6794 filename: entry.filename,
6795 })
6796 })
6797 .collect::<Vec<_>>();
6798
6799 let messages = response
6800 .messages
6801 .into_iter()
6802 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6803 .collect::<HashMap<_, _>>();
6804
6805 Some(Blame { entries, messages })
6806}
6807
6808fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6809 proto::Branch {
6810 is_head: branch.is_head,
6811 ref_name: branch.ref_name.to_string(),
6812 unix_timestamp: branch
6813 .most_recent_commit
6814 .as_ref()
6815 .map(|commit| commit.commit_timestamp as u64),
6816 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6817 ref_name: upstream.ref_name.to_string(),
6818 tracking: upstream
6819 .tracking
6820 .status()
6821 .map(|upstream| proto::UpstreamTracking {
6822 ahead: upstream.ahead as u64,
6823 behind: upstream.behind as u64,
6824 }),
6825 }),
6826 most_recent_commit: branch
6827 .most_recent_commit
6828 .as_ref()
6829 .map(|commit| proto::CommitSummary {
6830 sha: commit.sha.to_string(),
6831 subject: commit.subject.to_string(),
6832 commit_timestamp: commit.commit_timestamp,
6833 author_name: commit.author_name.to_string(),
6834 }),
6835 }
6836}
6837
6838fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6839 proto::Worktree {
6840 path: worktree.path.to_string_lossy().to_string(),
6841 ref_name: worktree.ref_name.to_string(),
6842 sha: worktree.sha.to_string(),
6843 }
6844}
6845
6846fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6847 git::repository::Worktree {
6848 path: PathBuf::from(proto.path.clone()),
6849 ref_name: proto.ref_name.clone().into(),
6850 sha: proto.sha.clone().into(),
6851 }
6852}
6853
6854fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6855 git::repository::Branch {
6856 is_head: proto.is_head,
6857 ref_name: proto.ref_name.clone().into(),
6858 upstream: proto
6859 .upstream
6860 .as_ref()
6861 .map(|upstream| git::repository::Upstream {
6862 ref_name: upstream.ref_name.to_string().into(),
6863 tracking: upstream
6864 .tracking
6865 .as_ref()
6866 .map(|tracking| {
6867 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6868 ahead: tracking.ahead as u32,
6869 behind: tracking.behind as u32,
6870 })
6871 })
6872 .unwrap_or(git::repository::UpstreamTracking::Gone),
6873 }),
6874 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6875 git::repository::CommitSummary {
6876 sha: commit.sha.to_string().into(),
6877 subject: commit.subject.to_string().into(),
6878 commit_timestamp: commit.commit_timestamp,
6879 author_name: commit.author_name.to_string().into(),
6880 has_parent: true,
6881 }
6882 }),
6883 }
6884}
6885
6886fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6887 proto::GitCommitDetails {
6888 sha: commit.sha.to_string(),
6889 message: commit.message.to_string(),
6890 commit_timestamp: commit.commit_timestamp,
6891 author_email: commit.author_email.to_string(),
6892 author_name: commit.author_name.to_string(),
6893 }
6894}
6895
6896fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6897 CommitDetails {
6898 sha: proto.sha.clone().into(),
6899 message: proto.message.clone().into(),
6900 commit_timestamp: proto.commit_timestamp,
6901 author_email: proto.author_email.clone().into(),
6902 author_name: proto.author_name.clone().into(),
6903 }
6904}
6905
6906async fn compute_snapshot(
6907 id: RepositoryId,
6908 work_directory_abs_path: Arc<Path>,
6909 prev_snapshot: RepositorySnapshot,
6910 backend: Arc<dyn GitRepository>,
6911) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6912 let mut events = Vec::new();
6913 let branches = backend.branches().await?;
6914 let branch = branches.into_iter().find(|branch| branch.is_head);
6915
6916 // Useful when branch is None in detached head state
6917 let head_commit = match backend.head_sha().await {
6918 Some(head_sha) => backend.show(head_sha).await.log_err(),
6919 None => None,
6920 };
6921
6922 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> = if head_commit.is_some() {
6923 backend.diff_stat(&[])
6924 } else {
6925 future::ready(Ok(status::GitDiffStat {
6926 entries: Arc::default(),
6927 }))
6928 .boxed()
6929 };
6930 let (statuses, diff_stats, all_worktrees) = futures::future::try_join3(
6931 backend.status(&[RepoPath::from_rel_path(
6932 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6933 )]),
6934 diff_stat_future,
6935 backend.worktrees(),
6936 )
6937 .await?;
6938
6939 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
6940 .into_iter()
6941 .filter(|wt| wt.path != *work_directory_abs_path)
6942 .collect();
6943
6944 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
6945 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
6946 let stash_entries = backend.stash_entries().await?;
6947 let mut conflicted_paths = Vec::new();
6948 let statuses_by_path = SumTree::from_iter(
6949 statuses.entries.iter().map(|(repo_path, status)| {
6950 if status.is_conflicted() {
6951 conflicted_paths.push(repo_path.clone());
6952 }
6953 StatusEntry {
6954 repo_path: repo_path.clone(),
6955 status: *status,
6956 diff_stat: diff_stat_map.get(repo_path).copied(),
6957 }
6958 }),
6959 (),
6960 );
6961 let mut merge_details = prev_snapshot.merge;
6962 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
6963 log::debug!("new merge details: {merge_details:?}");
6964
6965 if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path {
6966 events.push(RepositoryEvent::StatusesChanged)
6967 }
6968
6969 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6970 events.push(RepositoryEvent::BranchChanged);
6971 }
6972
6973 if *linked_worktrees != *prev_snapshot.linked_worktrees {
6974 events.push(RepositoryEvent::GitWorktreeListChanged);
6975 }
6976
6977 let remote_origin_url = backend.remote_url("origin").await;
6978 let remote_upstream_url = backend.remote_url("upstream").await;
6979
6980 let snapshot = RepositorySnapshot {
6981 id,
6982 statuses_by_path,
6983 work_directory_abs_path,
6984 original_repo_abs_path: prev_snapshot.original_repo_abs_path,
6985 path_style: prev_snapshot.path_style,
6986 scan_id: prev_snapshot.scan_id + 1,
6987 branch,
6988 head_commit,
6989 merge: merge_details,
6990 remote_origin_url,
6991 remote_upstream_url,
6992 stash_entries,
6993 linked_worktrees,
6994 };
6995
6996 Ok((snapshot, events))
6997}
6998
6999fn status_from_proto(
7000 simple_status: i32,
7001 status: Option<proto::GitFileStatus>,
7002) -> anyhow::Result<FileStatus> {
7003 use proto::git_file_status::Variant;
7004
7005 let Some(variant) = status.and_then(|status| status.variant) else {
7006 let code = proto::GitStatus::from_i32(simple_status)
7007 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7008 let result = match code {
7009 proto::GitStatus::Added => TrackedStatus {
7010 worktree_status: StatusCode::Added,
7011 index_status: StatusCode::Unmodified,
7012 }
7013 .into(),
7014 proto::GitStatus::Modified => TrackedStatus {
7015 worktree_status: StatusCode::Modified,
7016 index_status: StatusCode::Unmodified,
7017 }
7018 .into(),
7019 proto::GitStatus::Conflict => UnmergedStatus {
7020 first_head: UnmergedStatusCode::Updated,
7021 second_head: UnmergedStatusCode::Updated,
7022 }
7023 .into(),
7024 proto::GitStatus::Deleted => TrackedStatus {
7025 worktree_status: StatusCode::Deleted,
7026 index_status: StatusCode::Unmodified,
7027 }
7028 .into(),
7029 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7030 };
7031 return Ok(result);
7032 };
7033
7034 let result = match variant {
7035 Variant::Untracked(_) => FileStatus::Untracked,
7036 Variant::Ignored(_) => FileStatus::Ignored,
7037 Variant::Unmerged(unmerged) => {
7038 let [first_head, second_head] =
7039 [unmerged.first_head, unmerged.second_head].map(|head| {
7040 let code = proto::GitStatus::from_i32(head)
7041 .with_context(|| format!("Invalid git status code: {head}"))?;
7042 let result = match code {
7043 proto::GitStatus::Added => UnmergedStatusCode::Added,
7044 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7045 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7046 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7047 };
7048 Ok(result)
7049 });
7050 let [first_head, second_head] = [first_head?, second_head?];
7051 UnmergedStatus {
7052 first_head,
7053 second_head,
7054 }
7055 .into()
7056 }
7057 Variant::Tracked(tracked) => {
7058 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7059 .map(|status| {
7060 let code = proto::GitStatus::from_i32(status)
7061 .with_context(|| format!("Invalid git status code: {status}"))?;
7062 let result = match code {
7063 proto::GitStatus::Modified => StatusCode::Modified,
7064 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7065 proto::GitStatus::Added => StatusCode::Added,
7066 proto::GitStatus::Deleted => StatusCode::Deleted,
7067 proto::GitStatus::Renamed => StatusCode::Renamed,
7068 proto::GitStatus::Copied => StatusCode::Copied,
7069 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7070 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7071 };
7072 Ok(result)
7073 });
7074 let [index_status, worktree_status] = [index_status?, worktree_status?];
7075 TrackedStatus {
7076 index_status,
7077 worktree_status,
7078 }
7079 .into()
7080 }
7081 };
7082 Ok(result)
7083}
7084
7085fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7086 use proto::git_file_status::{Tracked, Unmerged, Variant};
7087
7088 let variant = match status {
7089 FileStatus::Untracked => Variant::Untracked(Default::default()),
7090 FileStatus::Ignored => Variant::Ignored(Default::default()),
7091 FileStatus::Unmerged(UnmergedStatus {
7092 first_head,
7093 second_head,
7094 }) => Variant::Unmerged(Unmerged {
7095 first_head: unmerged_status_to_proto(first_head),
7096 second_head: unmerged_status_to_proto(second_head),
7097 }),
7098 FileStatus::Tracked(TrackedStatus {
7099 index_status,
7100 worktree_status,
7101 }) => Variant::Tracked(Tracked {
7102 index_status: tracked_status_to_proto(index_status),
7103 worktree_status: tracked_status_to_proto(worktree_status),
7104 }),
7105 };
7106 proto::GitFileStatus {
7107 variant: Some(variant),
7108 }
7109}
7110
7111fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7112 match code {
7113 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7114 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7115 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7116 }
7117}
7118
7119fn tracked_status_to_proto(code: StatusCode) -> i32 {
7120 match code {
7121 StatusCode::Added => proto::GitStatus::Added as _,
7122 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7123 StatusCode::Modified => proto::GitStatus::Modified as _,
7124 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7125 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7126 StatusCode::Copied => proto::GitStatus::Copied as _,
7127 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7128 }
7129}