1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let unstaged_diff = this
825 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
826 .await?;
827 buffer_diff.update(cx, |buffer_diff, _| {
828 buffer_diff.set_secondary_diff(unstaged_diff);
829 });
830
831 this.update(cx, |this, cx| {
832 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
833 .detach();
834
835 this.loading_diffs.remove(&(buffer_id, diff_kind));
836
837 let git_store = cx.weak_entity();
838 let diff_state = this
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842
843 diff_state.update(cx, |state, _| {
844 if let Some(oid) = oid {
845 if let Some(content) = content {
846 state.oid_texts.insert(oid, content);
847 }
848 }
849 state.oid_diffs.insert(oid, buffer_diff.downgrade());
850 });
851 })?;
852
853 Ok(buffer_diff)
854 }
855 .await;
856 result.map_err(Arc::new)
857 })
858 .shared();
859
860 self.loading_diffs
861 .insert((buffer_id, diff_kind), task.clone());
862 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
863 }
864
865 #[ztracing::instrument(skip_all)]
866 pub fn open_uncommitted_diff(
867 &mut self,
868 buffer: Entity<Buffer>,
869 cx: &mut Context<Self>,
870 ) -> Task<Result<Entity<BufferDiff>>> {
871 let buffer_id = buffer.read(cx).remote_id();
872
873 if let Some(diff_state) = self.diffs.get(&buffer_id)
874 && let Some(uncommitted_diff) = diff_state
875 .read(cx)
876 .uncommitted_diff
877 .as_ref()
878 .and_then(|weak| weak.upgrade())
879 {
880 if let Some(task) =
881 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
882 {
883 return cx.background_executor().spawn(async move {
884 task.await;
885 Ok(uncommitted_diff)
886 });
887 }
888 return Task::ready(Ok(uncommitted_diff));
889 }
890
891 let Some((repo, repo_path)) =
892 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
893 else {
894 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
895 };
896
897 let task = self
898 .loading_diffs
899 .entry((buffer_id, DiffKind::Uncommitted))
900 .or_insert_with(|| {
901 let changes = repo.update(cx, |repo, cx| {
902 repo.load_committed_text(buffer_id, repo_path, cx)
903 });
904
905 // todo(lw): hot foreground spawn
906 cx.spawn(async move |this, cx| {
907 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
908 .await
909 .map_err(Arc::new)
910 })
911 .shared()
912 })
913 .clone();
914
915 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
916 }
917
918 #[ztracing::instrument(skip_all)]
919 async fn open_diff_internal(
920 this: WeakEntity<Self>,
921 kind: DiffKind,
922 texts: Result<DiffBasesChange>,
923 buffer_entity: Entity<Buffer>,
924 cx: &mut AsyncApp,
925 ) -> Result<Entity<BufferDiff>> {
926 let diff_bases_change = match texts {
927 Err(e) => {
928 this.update(cx, |this, cx| {
929 let buffer = buffer_entity.read(cx);
930 let buffer_id = buffer.remote_id();
931 this.loading_diffs.remove(&(buffer_id, kind));
932 })?;
933 return Err(e);
934 }
935 Ok(change) => change,
936 };
937
938 this.update(cx, |this, cx| {
939 let buffer = buffer_entity.read(cx);
940 let buffer_id = buffer.remote_id();
941 let language = buffer.language().cloned();
942 let language_registry = buffer.language_registry();
943 let text_snapshot = buffer.text_snapshot();
944 this.loading_diffs.remove(&(buffer_id, kind));
945
946 let git_store = cx.weak_entity();
947 let diff_state = this
948 .diffs
949 .entry(buffer_id)
950 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
951
952 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953
954 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
955 diff_state.update(cx, |diff_state, cx| {
956 diff_state.language_changed = true;
957 diff_state.language = language;
958 diff_state.language_registry = language_registry;
959
960 match kind {
961 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
962 DiffKind::Uncommitted => {
963 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
964 diff
965 } else {
966 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
967 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
968 unstaged_diff
969 };
970
971 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
972 diff_state.uncommitted_diff = Some(diff.downgrade())
973 }
974 DiffKind::SinceOid(_) => {
975 unreachable!("open_diff_internal is not used for OID diffs")
976 }
977 }
978
979 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
980 let rx = diff_state.wait_for_recalculation();
981
982 anyhow::Ok(async move {
983 if let Some(rx) = rx {
984 rx.await;
985 }
986 Ok(diff)
987 })
988 })
989 })??
990 .await
991 }
992
993 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
994 let diff_state = self.diffs.get(&buffer_id)?;
995 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
996 }
997
998 pub fn get_uncommitted_diff(
999 &self,
1000 buffer_id: BufferId,
1001 cx: &App,
1002 ) -> Option<Entity<BufferDiff>> {
1003 let diff_state = self.diffs.get(&buffer_id)?;
1004 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1005 }
1006
1007 pub fn get_diff_since_oid(
1008 &self,
1009 buffer_id: BufferId,
1010 oid: Option<git::Oid>,
1011 cx: &App,
1012 ) -> Option<Entity<BufferDiff>> {
1013 let diff_state = self.diffs.get(&buffer_id)?;
1014 diff_state.read(cx).oid_diff(oid)
1015 }
1016
1017 pub fn open_conflict_set(
1018 &mut self,
1019 buffer: Entity<Buffer>,
1020 cx: &mut Context<Self>,
1021 ) -> Entity<ConflictSet> {
1022 log::debug!("open conflict set");
1023 let buffer_id = buffer.read(cx).remote_id();
1024
1025 if let Some(git_state) = self.diffs.get(&buffer_id)
1026 && let Some(conflict_set) = git_state
1027 .read(cx)
1028 .conflict_set
1029 .as_ref()
1030 .and_then(|weak| weak.upgrade())
1031 {
1032 let conflict_set = conflict_set;
1033 let buffer_snapshot = buffer.read(cx).text_snapshot();
1034
1035 git_state.update(cx, |state, cx| {
1036 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1037 });
1038
1039 return conflict_set;
1040 }
1041
1042 let is_unmerged = self
1043 .repository_and_path_for_buffer_id(buffer_id, cx)
1044 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1045 let git_store = cx.weak_entity();
1046 let buffer_git_state = self
1047 .diffs
1048 .entry(buffer_id)
1049 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1050 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1051
1052 self._subscriptions
1053 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1054 cx.emit(GitStoreEvent::ConflictsUpdated);
1055 }));
1056
1057 buffer_git_state.update(cx, |state, cx| {
1058 state.conflict_set = Some(conflict_set.downgrade());
1059 let buffer_snapshot = buffer.read(cx).text_snapshot();
1060 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1061 });
1062
1063 conflict_set
1064 }
1065
1066 pub fn project_path_git_status(
1067 &self,
1068 project_path: &ProjectPath,
1069 cx: &App,
1070 ) -> Option<FileStatus> {
1071 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1072 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1073 }
1074
1075 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1076 let mut work_directory_abs_paths = Vec::new();
1077 let mut checkpoints = Vec::new();
1078 for repository in self.repositories.values() {
1079 repository.update(cx, |repository, _| {
1080 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1081 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1082 });
1083 }
1084
1085 cx.background_executor().spawn(async move {
1086 let checkpoints = future::try_join_all(checkpoints).await?;
1087 Ok(GitStoreCheckpoint {
1088 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1089 .into_iter()
1090 .zip(checkpoints)
1091 .collect(),
1092 })
1093 })
1094 }
1095
1096 pub fn restore_checkpoint(
1097 &self,
1098 checkpoint: GitStoreCheckpoint,
1099 cx: &mut App,
1100 ) -> Task<Result<()>> {
1101 let repositories_by_work_dir_abs_path = self
1102 .repositories
1103 .values()
1104 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1105 .collect::<HashMap<_, _>>();
1106
1107 let mut tasks = Vec::new();
1108 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1109 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1110 let restore = repository.update(cx, |repository, _| {
1111 repository.restore_checkpoint(checkpoint)
1112 });
1113 tasks.push(async move { restore.await? });
1114 }
1115 }
1116 cx.background_spawn(async move {
1117 future::try_join_all(tasks).await?;
1118 Ok(())
1119 })
1120 }
1121
1122 /// Compares two checkpoints, returning true if they are equal.
1123 pub fn compare_checkpoints(
1124 &self,
1125 left: GitStoreCheckpoint,
1126 mut right: GitStoreCheckpoint,
1127 cx: &mut App,
1128 ) -> Task<Result<bool>> {
1129 let repositories_by_work_dir_abs_path = self
1130 .repositories
1131 .values()
1132 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1133 .collect::<HashMap<_, _>>();
1134
1135 let mut tasks = Vec::new();
1136 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1137 if let Some(right_checkpoint) = right
1138 .checkpoints_by_work_dir_abs_path
1139 .remove(&work_dir_abs_path)
1140 {
1141 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1142 {
1143 let compare = repository.update(cx, |repository, _| {
1144 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1145 });
1146
1147 tasks.push(async move { compare.await? });
1148 }
1149 } else {
1150 return Task::ready(Ok(false));
1151 }
1152 }
1153 cx.background_spawn(async move {
1154 Ok(future::try_join_all(tasks)
1155 .await?
1156 .into_iter()
1157 .all(|result| result))
1158 })
1159 }
1160
1161 /// Blames a buffer.
1162 pub fn blame_buffer(
1163 &self,
1164 buffer: &Entity<Buffer>,
1165 version: Option<clock::Global>,
1166 cx: &mut Context<Self>,
1167 ) -> Task<Result<Option<Blame>>> {
1168 let buffer = buffer.read(cx);
1169 let Some((repo, repo_path)) =
1170 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1171 else {
1172 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1173 };
1174 let content = match &version {
1175 Some(version) => buffer.rope_for_version(version),
1176 None => buffer.as_rope().clone(),
1177 };
1178 let line_ending = buffer.line_ending();
1179 let version = version.unwrap_or(buffer.version());
1180 let buffer_id = buffer.remote_id();
1181
1182 let repo = repo.downgrade();
1183 cx.spawn(async move |_, cx| {
1184 let repository_state = repo
1185 .update(cx, |repo, _| repo.repository_state.clone())?
1186 .await
1187 .map_err(|err| anyhow::anyhow!(err))?;
1188 match repository_state {
1189 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1190 .blame(repo_path.clone(), content, line_ending)
1191 .await
1192 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1193 .map(Some),
1194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1195 let response = client
1196 .request(proto::BlameBuffer {
1197 project_id: project_id.to_proto(),
1198 buffer_id: buffer_id.into(),
1199 version: serialize_version(&version),
1200 })
1201 .await?;
1202 Ok(deserialize_blame_buffer_response(response))
1203 }
1204 }
1205 })
1206 }
1207
1208 pub fn file_history(
1209 &self,
1210 repo: &Entity<Repository>,
1211 path: RepoPath,
1212 cx: &mut App,
1213 ) -> Task<Result<git::repository::FileHistory>> {
1214 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1215
1216 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1217 }
1218
1219 pub fn file_history_paginated(
1220 &self,
1221 repo: &Entity<Repository>,
1222 path: RepoPath,
1223 skip: usize,
1224 limit: Option<usize>,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn get_permalink_to_line(
1233 &self,
1234 buffer: &Entity<Buffer>,
1235 selection: Range<u32>,
1236 cx: &mut App,
1237 ) -> Task<Result<url::Url>> {
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 return Task::ready(Err(anyhow!("buffer has no file")));
1240 };
1241
1242 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1243 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1244 cx,
1245 ) else {
1246 // If we're not in a Git repo, check whether this is a Rust source
1247 // file in the Cargo registry (presumably opened with go-to-definition
1248 // from a normal Rust file). If so, we can put together a permalink
1249 // using crate metadata.
1250 if buffer
1251 .read(cx)
1252 .language()
1253 .is_none_or(|lang| lang.name() != "Rust")
1254 {
1255 return Task::ready(Err(anyhow!("no permalink available")));
1256 }
1257 let file_path = file.worktree.read(cx).absolutize(&file.path);
1258 return cx.spawn(async move |cx| {
1259 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1260 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1261 .context("no permalink available")
1262 });
1263 };
1264
1265 let buffer_id = buffer.read(cx).remote_id();
1266 let branch = repo.read(cx).branch.clone();
1267 let remote = branch
1268 .as_ref()
1269 .and_then(|b| b.upstream.as_ref())
1270 .and_then(|b| b.remote_name())
1271 .unwrap_or("origin")
1272 .to_string();
1273
1274 let rx = repo.update(cx, |repo, _| {
1275 repo.send_job(None, move |state, cx| async move {
1276 match state {
1277 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1278 let origin_url = backend
1279 .remote_url(&remote)
1280 .await
1281 .with_context(|| format!("remote \"{remote}\" not found"))?;
1282
1283 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1284
1285 let provider_registry =
1286 cx.update(GitHostingProviderRegistry::default_global);
1287
1288 let (provider, remote) =
1289 parse_git_remote_url(provider_registry, &origin_url)
1290 .context("parsing Git remote URL")?;
1291
1292 Ok(provider.build_permalink(
1293 remote,
1294 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1295 ))
1296 }
1297 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1298 let response = client
1299 .request(proto::GetPermalinkToLine {
1300 project_id: project_id.to_proto(),
1301 buffer_id: buffer_id.into(),
1302 selection: Some(proto::Range {
1303 start: selection.start as u64,
1304 end: selection.end as u64,
1305 }),
1306 })
1307 .await?;
1308
1309 url::Url::parse(&response.permalink).context("failed to parse permalink")
1310 }
1311 }
1312 })
1313 });
1314 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1315 }
1316
1317 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1318 match &self.state {
1319 GitStoreState::Local {
1320 downstream: downstream_client,
1321 ..
1322 } => downstream_client
1323 .as_ref()
1324 .map(|state| (state.client.clone(), state.project_id)),
1325 GitStoreState::Remote {
1326 downstream: downstream_client,
1327 ..
1328 } => downstream_client.clone(),
1329 }
1330 }
1331
1332 fn upstream_client(&self) -> Option<AnyProtoClient> {
1333 match &self.state {
1334 GitStoreState::Local { .. } => None,
1335 GitStoreState::Remote {
1336 upstream_client, ..
1337 } => Some(upstream_client.clone()),
1338 }
1339 }
1340
1341 fn on_worktree_store_event(
1342 &mut self,
1343 worktree_store: Entity<WorktreeStore>,
1344 event: &WorktreeStoreEvent,
1345 cx: &mut Context<Self>,
1346 ) {
1347 let GitStoreState::Local {
1348 project_environment,
1349 downstream,
1350 next_repository_id,
1351 fs,
1352 } = &self.state
1353 else {
1354 return;
1355 };
1356
1357 match event {
1358 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1359 if let Some(worktree) = self
1360 .worktree_store
1361 .read(cx)
1362 .worktree_for_id(*worktree_id, cx)
1363 {
1364 let paths_by_git_repo =
1365 self.process_updated_entries(&worktree, updated_entries, cx);
1366 let downstream = downstream
1367 .as_ref()
1368 .map(|downstream| downstream.updates_tx.clone());
1369 cx.spawn(async move |_, cx| {
1370 let paths_by_git_repo = paths_by_git_repo.await;
1371 for (repo, paths) in paths_by_git_repo {
1372 repo.update(cx, |repo, cx| {
1373 repo.paths_changed(paths, downstream.clone(), cx);
1374 });
1375 }
1376 })
1377 .detach();
1378 }
1379 }
1380 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1381 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1382 else {
1383 return;
1384 };
1385 if !worktree.read(cx).is_visible() {
1386 log::debug!(
1387 "not adding repositories for local worktree {:?} because it's not visible",
1388 worktree.read(cx).abs_path()
1389 );
1390 return;
1391 }
1392 self.update_repositories_from_worktree(
1393 *worktree_id,
1394 project_environment.clone(),
1395 next_repository_id.clone(),
1396 downstream
1397 .as_ref()
1398 .map(|downstream| downstream.updates_tx.clone()),
1399 changed_repos.clone(),
1400 fs.clone(),
1401 cx,
1402 );
1403 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1404 }
1405 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1406 let repos_without_worktree: Vec<RepositoryId> = self
1407 .worktree_ids
1408 .iter_mut()
1409 .filter_map(|(repo_id, worktree_ids)| {
1410 worktree_ids.remove(worktree_id);
1411 if worktree_ids.is_empty() {
1412 Some(*repo_id)
1413 } else {
1414 None
1415 }
1416 })
1417 .collect();
1418 let is_active_repo_removed = repos_without_worktree
1419 .iter()
1420 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1421
1422 for repo_id in repos_without_worktree {
1423 self.repositories.remove(&repo_id);
1424 self.worktree_ids.remove(&repo_id);
1425 if let Some(updates_tx) =
1426 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1427 {
1428 updates_tx
1429 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1430 .ok();
1431 }
1432 }
1433
1434 if is_active_repo_removed {
1435 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1436 self.active_repo_id = Some(repo_id);
1437 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1438 } else {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 }
1443 }
1444 _ => {}
1445 }
1446 }
1447 fn on_repository_event(
1448 &mut self,
1449 repo: Entity<Repository>,
1450 event: &RepositoryEvent,
1451 cx: &mut Context<Self>,
1452 ) {
1453 let id = repo.read(cx).id;
1454 let repo_snapshot = repo.read(cx).snapshot.clone();
1455 for (buffer_id, diff) in self.diffs.iter() {
1456 if let Some((buffer_repo, repo_path)) =
1457 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1458 && buffer_repo == repo
1459 {
1460 diff.update(cx, |diff, cx| {
1461 if let Some(conflict_set) = &diff.conflict_set {
1462 let conflict_status_changed =
1463 conflict_set.update(cx, |conflict_set, cx| {
1464 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1465 conflict_set.set_has_conflict(has_conflict, cx)
1466 })?;
1467 if conflict_status_changed {
1468 let buffer_store = self.buffer_store.read(cx);
1469 if let Some(buffer) = buffer_store.get(*buffer_id) {
1470 let _ = diff
1471 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1472 }
1473 }
1474 }
1475 anyhow::Ok(())
1476 })
1477 .ok();
1478 }
1479 }
1480 cx.emit(GitStoreEvent::RepositoryUpdated(
1481 id,
1482 event.clone(),
1483 self.active_repo_id == Some(id),
1484 ))
1485 }
1486
1487 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1488 cx.emit(GitStoreEvent::JobsUpdated)
1489 }
1490
1491 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1492 fn update_repositories_from_worktree(
1493 &mut self,
1494 worktree_id: WorktreeId,
1495 project_environment: Entity<ProjectEnvironment>,
1496 next_repository_id: Arc<AtomicU64>,
1497 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1498 updated_git_repositories: UpdatedGitRepositoriesSet,
1499 fs: Arc<dyn Fs>,
1500 cx: &mut Context<Self>,
1501 ) {
1502 let mut removed_ids = Vec::new();
1503 for update in updated_git_repositories.iter() {
1504 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1505 let existing_work_directory_abs_path =
1506 repo.read(cx).work_directory_abs_path.clone();
1507 Some(&existing_work_directory_abs_path)
1508 == update.old_work_directory_abs_path.as_ref()
1509 || Some(&existing_work_directory_abs_path)
1510 == update.new_work_directory_abs_path.as_ref()
1511 }) {
1512 let repo_id = *id;
1513 if let Some(new_work_directory_abs_path) =
1514 update.new_work_directory_abs_path.clone()
1515 {
1516 self.worktree_ids
1517 .entry(repo_id)
1518 .or_insert_with(HashSet::new)
1519 .insert(worktree_id);
1520 existing.update(cx, |existing, cx| {
1521 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1522 existing.schedule_scan(updates_tx.clone(), cx);
1523 });
1524 } else {
1525 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1526 worktree_ids.remove(&worktree_id);
1527 if worktree_ids.is_empty() {
1528 removed_ids.push(repo_id);
1529 }
1530 }
1531 }
1532 } else if let UpdatedGitRepository {
1533 new_work_directory_abs_path: Some(work_directory_abs_path),
1534 dot_git_abs_path: Some(dot_git_abs_path),
1535 repository_dir_abs_path: Some(_repository_dir_abs_path),
1536 common_dir_abs_path: Some(common_dir_abs_path),
1537 ..
1538 } = update
1539 {
1540 let original_repo_abs_path: Arc<Path> =
1541 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1542 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1543 let is_trusted = TrustedWorktrees::try_get_global(cx)
1544 .map(|trusted_worktrees| {
1545 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1546 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1547 })
1548 })
1549 .unwrap_or(false);
1550 let git_store = cx.weak_entity();
1551 let repo = cx.new(|cx| {
1552 let mut repo = Repository::local(
1553 id,
1554 work_directory_abs_path.clone(),
1555 original_repo_abs_path.clone(),
1556 dot_git_abs_path.clone(),
1557 project_environment.downgrade(),
1558 fs.clone(),
1559 is_trusted,
1560 git_store,
1561 cx,
1562 );
1563 if let Some(updates_tx) = updates_tx.as_ref() {
1564 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1565 updates_tx
1566 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1567 .ok();
1568 }
1569 repo.schedule_scan(updates_tx.clone(), cx);
1570 repo
1571 });
1572 self._subscriptions
1573 .push(cx.subscribe(&repo, Self::on_repository_event));
1574 self._subscriptions
1575 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1576 self.repositories.insert(id, repo);
1577 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1578 cx.emit(GitStoreEvent::RepositoryAdded);
1579 self.active_repo_id.get_or_insert_with(|| {
1580 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1581 id
1582 });
1583 }
1584 }
1585
1586 for id in removed_ids {
1587 if self.active_repo_id == Some(id) {
1588 self.active_repo_id = None;
1589 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1590 }
1591 self.repositories.remove(&id);
1592 if let Some(updates_tx) = updates_tx.as_ref() {
1593 updates_tx
1594 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1595 .ok();
1596 }
1597 }
1598 }
1599
1600 fn on_trusted_worktrees_event(
1601 &mut self,
1602 _: Entity<TrustedWorktreesStore>,
1603 event: &TrustedWorktreesEvent,
1604 cx: &mut Context<Self>,
1605 ) {
1606 if !matches!(self.state, GitStoreState::Local { .. }) {
1607 return;
1608 }
1609
1610 let (is_trusted, event_paths) = match event {
1611 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1612 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1613 };
1614
1615 for (repo_id, worktree_ids) in &self.worktree_ids {
1616 if worktree_ids
1617 .iter()
1618 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1619 {
1620 if let Some(repo) = self.repositories.get(repo_id) {
1621 let repository_state = repo.read(cx).repository_state.clone();
1622 cx.background_spawn(async move {
1623 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1624 state.backend.set_trusted(is_trusted);
1625 }
1626 })
1627 .detach();
1628 }
1629 }
1630 }
1631 }
1632
1633 fn on_buffer_store_event(
1634 &mut self,
1635 _: Entity<BufferStore>,
1636 event: &BufferStoreEvent,
1637 cx: &mut Context<Self>,
1638 ) {
1639 match event {
1640 BufferStoreEvent::BufferAdded(buffer) => {
1641 cx.subscribe(buffer, |this, buffer, event, cx| {
1642 if let BufferEvent::LanguageChanged(_) = event {
1643 let buffer_id = buffer.read(cx).remote_id();
1644 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1645 diff_state.update(cx, |diff_state, cx| {
1646 diff_state.buffer_language_changed(buffer, cx);
1647 });
1648 }
1649 }
1650 })
1651 .detach();
1652 }
1653 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1654 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1655 diffs.remove(buffer_id);
1656 }
1657 }
1658 BufferStoreEvent::BufferDropped(buffer_id) => {
1659 self.diffs.remove(buffer_id);
1660 for diffs in self.shared_diffs.values_mut() {
1661 diffs.remove(buffer_id);
1662 }
1663 }
1664 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1665 // Whenever a buffer's file path changes, it's possible that the
1666 // new path is actually a path that is being tracked by a git
1667 // repository. In that case, we'll want to update the buffer's
1668 // `BufferDiffState`, in case it already has one.
1669 let buffer_id = buffer.read(cx).remote_id();
1670 let diff_state = self.diffs.get(&buffer_id);
1671 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1672
1673 if let Some(diff_state) = diff_state
1674 && let Some((repo, repo_path)) = repo
1675 {
1676 let buffer = buffer.clone();
1677 let diff_state = diff_state.clone();
1678
1679 cx.spawn(async move |_git_store, cx| {
1680 async {
1681 let diff_bases_change = repo
1682 .update(cx, |repo, cx| {
1683 repo.load_committed_text(buffer_id, repo_path, cx)
1684 })
1685 .await?;
1686
1687 diff_state.update(cx, |diff_state, cx| {
1688 let buffer_snapshot = buffer.read(cx).text_snapshot();
1689 diff_state.diff_bases_changed(
1690 buffer_snapshot,
1691 Some(diff_bases_change),
1692 cx,
1693 );
1694 });
1695 anyhow::Ok(())
1696 }
1697 .await
1698 .log_err();
1699 })
1700 .detach();
1701 }
1702 }
1703 }
1704 }
1705
1706 pub fn recalculate_buffer_diffs(
1707 &mut self,
1708 buffers: Vec<Entity<Buffer>>,
1709 cx: &mut Context<Self>,
1710 ) -> impl Future<Output = ()> + use<> {
1711 let mut futures = Vec::new();
1712 for buffer in buffers {
1713 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1714 let buffer = buffer.read(cx).text_snapshot();
1715 diff_state.update(cx, |diff_state, cx| {
1716 diff_state.recalculate_diffs(buffer.clone(), cx);
1717 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1718 });
1719 futures.push(diff_state.update(cx, |diff_state, cx| {
1720 diff_state
1721 .reparse_conflict_markers(buffer, cx)
1722 .map(|_| {})
1723 .boxed()
1724 }));
1725 }
1726 }
1727 async move {
1728 futures::future::join_all(futures).await;
1729 }
1730 }
1731
1732 fn on_buffer_diff_event(
1733 &mut self,
1734 diff: Entity<buffer_diff::BufferDiff>,
1735 event: &BufferDiffEvent,
1736 cx: &mut Context<Self>,
1737 ) {
1738 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1739 let buffer_id = diff.read(cx).buffer_id;
1740 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1741 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1742 diff_state.hunk_staging_operation_count += 1;
1743 diff_state.hunk_staging_operation_count
1744 });
1745 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1746 let recv = repo.update(cx, |repo, cx| {
1747 log::debug!("hunks changed for {}", path.as_unix_str());
1748 repo.spawn_set_index_text_job(
1749 path,
1750 new_index_text.as_ref().map(|rope| rope.to_string()),
1751 Some(hunk_staging_operation_count),
1752 cx,
1753 )
1754 });
1755 let diff = diff.downgrade();
1756 cx.spawn(async move |this, cx| {
1757 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1758 diff.update(cx, |diff, cx| {
1759 diff.clear_pending_hunks(cx);
1760 })
1761 .ok();
1762 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1763 .ok();
1764 }
1765 })
1766 .detach();
1767 }
1768 }
1769 }
1770 }
1771
1772 fn local_worktree_git_repos_changed(
1773 &mut self,
1774 worktree: Entity<Worktree>,
1775 changed_repos: &UpdatedGitRepositoriesSet,
1776 cx: &mut Context<Self>,
1777 ) {
1778 log::debug!("local worktree repos changed");
1779 debug_assert!(worktree.read(cx).is_local());
1780
1781 for repository in self.repositories.values() {
1782 repository.update(cx, |repository, cx| {
1783 let repo_abs_path = &repository.work_directory_abs_path;
1784 if changed_repos.iter().any(|update| {
1785 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1786 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1787 }) {
1788 repository.reload_buffer_diff_bases(cx);
1789 }
1790 });
1791 }
1792 }
1793
1794 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1795 &self.repositories
1796 }
1797
1798 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1799 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1800 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1801 Some(status.status)
1802 }
1803
1804 pub fn repository_and_path_for_buffer_id(
1805 &self,
1806 buffer_id: BufferId,
1807 cx: &App,
1808 ) -> Option<(Entity<Repository>, RepoPath)> {
1809 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1810 let project_path = buffer.read(cx).project_path(cx)?;
1811 self.repository_and_path_for_project_path(&project_path, cx)
1812 }
1813
1814 pub fn repository_and_path_for_project_path(
1815 &self,
1816 path: &ProjectPath,
1817 cx: &App,
1818 ) -> Option<(Entity<Repository>, RepoPath)> {
1819 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1820 self.repositories
1821 .values()
1822 .filter_map(|repo| {
1823 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1824 Some((repo.clone(), repo_path))
1825 })
1826 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1827 }
1828
1829 pub fn git_init(
1830 &self,
1831 path: Arc<Path>,
1832 fallback_branch_name: String,
1833 cx: &App,
1834 ) -> Task<Result<()>> {
1835 match &self.state {
1836 GitStoreState::Local { fs, .. } => {
1837 let fs = fs.clone();
1838 cx.background_executor()
1839 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1840 }
1841 GitStoreState::Remote {
1842 upstream_client,
1843 upstream_project_id: project_id,
1844 ..
1845 } => {
1846 let client = upstream_client.clone();
1847 let project_id = *project_id;
1848 cx.background_executor().spawn(async move {
1849 client
1850 .request(proto::GitInit {
1851 project_id: project_id,
1852 abs_path: path.to_string_lossy().into_owned(),
1853 fallback_branch_name,
1854 })
1855 .await?;
1856 Ok(())
1857 })
1858 }
1859 }
1860 }
1861
1862 pub fn git_clone(
1863 &self,
1864 repo: String,
1865 path: impl Into<Arc<std::path::Path>>,
1866 cx: &App,
1867 ) -> Task<Result<()>> {
1868 let path = path.into();
1869 match &self.state {
1870 GitStoreState::Local { fs, .. } => {
1871 let fs = fs.clone();
1872 cx.background_executor()
1873 .spawn(async move { fs.git_clone(&repo, &path).await })
1874 }
1875 GitStoreState::Remote {
1876 upstream_client,
1877 upstream_project_id,
1878 ..
1879 } => {
1880 if upstream_client.is_via_collab() {
1881 return Task::ready(Err(anyhow!(
1882 "Git Clone isn't supported for project guests"
1883 )));
1884 }
1885 let request = upstream_client.request(proto::GitClone {
1886 project_id: *upstream_project_id,
1887 abs_path: path.to_string_lossy().into_owned(),
1888 remote_repo: repo,
1889 });
1890
1891 cx.background_spawn(async move {
1892 let result = request.await?;
1893
1894 match result.success {
1895 true => Ok(()),
1896 false => Err(anyhow!("Git Clone failed")),
1897 }
1898 })
1899 }
1900 }
1901 }
1902
1903 async fn handle_update_repository(
1904 this: Entity<Self>,
1905 envelope: TypedEnvelope<proto::UpdateRepository>,
1906 mut cx: AsyncApp,
1907 ) -> Result<()> {
1908 this.update(&mut cx, |this, cx| {
1909 let path_style = this.worktree_store.read(cx).path_style();
1910 let mut update = envelope.payload;
1911
1912 let id = RepositoryId::from_proto(update.id);
1913 let client = this.upstream_client().context("no upstream client")?;
1914
1915 let original_repo_abs_path: Option<Arc<Path>> = update
1916 .original_repo_abs_path
1917 .as_deref()
1918 .map(|p| Path::new(p).into());
1919
1920 let mut repo_subscription = None;
1921 let repo = this.repositories.entry(id).or_insert_with(|| {
1922 let git_store = cx.weak_entity();
1923 let repo = cx.new(|cx| {
1924 Repository::remote(
1925 id,
1926 Path::new(&update.abs_path).into(),
1927 original_repo_abs_path.clone(),
1928 path_style,
1929 ProjectId(update.project_id),
1930 client,
1931 git_store,
1932 cx,
1933 )
1934 });
1935 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1936 cx.emit(GitStoreEvent::RepositoryAdded);
1937 repo
1938 });
1939 this._subscriptions.extend(repo_subscription);
1940
1941 repo.update(cx, {
1942 let update = update.clone();
1943 |repo, cx| repo.apply_remote_update(update, cx)
1944 })?;
1945
1946 this.active_repo_id.get_or_insert_with(|| {
1947 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1948 id
1949 });
1950
1951 if let Some((client, project_id)) = this.downstream_client() {
1952 update.project_id = project_id.to_proto();
1953 client.send(update).log_err();
1954 }
1955 Ok(())
1956 })
1957 }
1958
1959 async fn handle_remove_repository(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::RemoveRepository>,
1962 mut cx: AsyncApp,
1963 ) -> Result<()> {
1964 this.update(&mut cx, |this, cx| {
1965 let mut update = envelope.payload;
1966 let id = RepositoryId::from_proto(update.id);
1967 this.repositories.remove(&id);
1968 if let Some((client, project_id)) = this.downstream_client() {
1969 update.project_id = project_id.to_proto();
1970 client.send(update).log_err();
1971 }
1972 if this.active_repo_id == Some(id) {
1973 this.active_repo_id = None;
1974 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1975 }
1976 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1977 });
1978 Ok(())
1979 }
1980
1981 async fn handle_git_init(
1982 this: Entity<Self>,
1983 envelope: TypedEnvelope<proto::GitInit>,
1984 cx: AsyncApp,
1985 ) -> Result<proto::Ack> {
1986 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1987 let name = envelope.payload.fallback_branch_name;
1988 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1989 .await?;
1990
1991 Ok(proto::Ack {})
1992 }
1993
1994 async fn handle_git_clone(
1995 this: Entity<Self>,
1996 envelope: TypedEnvelope<proto::GitClone>,
1997 cx: AsyncApp,
1998 ) -> Result<proto::GitCloneResponse> {
1999 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2000 let repo_name = envelope.payload.remote_repo;
2001 let result = cx
2002 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2003 .await;
2004
2005 Ok(proto::GitCloneResponse {
2006 success: result.is_ok(),
2007 })
2008 }
2009
2010 async fn handle_fetch(
2011 this: Entity<Self>,
2012 envelope: TypedEnvelope<proto::Fetch>,
2013 mut cx: AsyncApp,
2014 ) -> Result<proto::RemoteMessageResponse> {
2015 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2016 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2017 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2018 let askpass_id = envelope.payload.askpass_id;
2019
2020 let askpass = make_remote_delegate(
2021 this,
2022 envelope.payload.project_id,
2023 repository_id,
2024 askpass_id,
2025 &mut cx,
2026 );
2027
2028 let remote_output = repository_handle
2029 .update(&mut cx, |repository_handle, cx| {
2030 repository_handle.fetch(fetch_options, askpass, cx)
2031 })
2032 .await??;
2033
2034 Ok(proto::RemoteMessageResponse {
2035 stdout: remote_output.stdout,
2036 stderr: remote_output.stderr,
2037 })
2038 }
2039
2040 async fn handle_push(
2041 this: Entity<Self>,
2042 envelope: TypedEnvelope<proto::Push>,
2043 mut cx: AsyncApp,
2044 ) -> Result<proto::RemoteMessageResponse> {
2045 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2046 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2047
2048 let askpass_id = envelope.payload.askpass_id;
2049 let askpass = make_remote_delegate(
2050 this,
2051 envelope.payload.project_id,
2052 repository_id,
2053 askpass_id,
2054 &mut cx,
2055 );
2056
2057 let options = envelope
2058 .payload
2059 .options
2060 .as_ref()
2061 .map(|_| match envelope.payload.options() {
2062 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2063 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2064 });
2065
2066 let branch_name = envelope.payload.branch_name.into();
2067 let remote_branch_name = envelope.payload.remote_branch_name.into();
2068 let remote_name = envelope.payload.remote_name.into();
2069
2070 let remote_output = repository_handle
2071 .update(&mut cx, |repository_handle, cx| {
2072 repository_handle.push(
2073 branch_name,
2074 remote_branch_name,
2075 remote_name,
2076 options,
2077 askpass,
2078 cx,
2079 )
2080 })
2081 .await??;
2082 Ok(proto::RemoteMessageResponse {
2083 stdout: remote_output.stdout,
2084 stderr: remote_output.stderr,
2085 })
2086 }
2087
2088 async fn handle_pull(
2089 this: Entity<Self>,
2090 envelope: TypedEnvelope<proto::Pull>,
2091 mut cx: AsyncApp,
2092 ) -> Result<proto::RemoteMessageResponse> {
2093 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2094 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2095 let askpass_id = envelope.payload.askpass_id;
2096 let askpass = make_remote_delegate(
2097 this,
2098 envelope.payload.project_id,
2099 repository_id,
2100 askpass_id,
2101 &mut cx,
2102 );
2103
2104 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2105 let remote_name = envelope.payload.remote_name.into();
2106 let rebase = envelope.payload.rebase;
2107
2108 let remote_message = repository_handle
2109 .update(&mut cx, |repository_handle, cx| {
2110 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2111 })
2112 .await??;
2113
2114 Ok(proto::RemoteMessageResponse {
2115 stdout: remote_message.stdout,
2116 stderr: remote_message.stderr,
2117 })
2118 }
2119
2120 async fn handle_stage(
2121 this: Entity<Self>,
2122 envelope: TypedEnvelope<proto::Stage>,
2123 mut cx: AsyncApp,
2124 ) -> Result<proto::Ack> {
2125 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2126 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2127
2128 let entries = envelope
2129 .payload
2130 .paths
2131 .into_iter()
2132 .map(|path| RepoPath::new(&path))
2133 .collect::<Result<Vec<_>>>()?;
2134
2135 repository_handle
2136 .update(&mut cx, |repository_handle, cx| {
2137 repository_handle.stage_entries(entries, cx)
2138 })
2139 .await?;
2140 Ok(proto::Ack {})
2141 }
2142
2143 async fn handle_unstage(
2144 this: Entity<Self>,
2145 envelope: TypedEnvelope<proto::Unstage>,
2146 mut cx: AsyncApp,
2147 ) -> Result<proto::Ack> {
2148 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2149 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2150
2151 let entries = envelope
2152 .payload
2153 .paths
2154 .into_iter()
2155 .map(|path| RepoPath::new(&path))
2156 .collect::<Result<Vec<_>>>()?;
2157
2158 repository_handle
2159 .update(&mut cx, |repository_handle, cx| {
2160 repository_handle.unstage_entries(entries, cx)
2161 })
2162 .await?;
2163
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_stash(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::Stash>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::Ack> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174
2175 let entries = envelope
2176 .payload
2177 .paths
2178 .into_iter()
2179 .map(|path| RepoPath::new(&path))
2180 .collect::<Result<Vec<_>>>()?;
2181
2182 repository_handle
2183 .update(&mut cx, |repository_handle, cx| {
2184 repository_handle.stash_entries(entries, cx)
2185 })
2186 .await?;
2187
2188 Ok(proto::Ack {})
2189 }
2190
2191 async fn handle_stash_pop(
2192 this: Entity<Self>,
2193 envelope: TypedEnvelope<proto::StashPop>,
2194 mut cx: AsyncApp,
2195 ) -> Result<proto::Ack> {
2196 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2197 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2198 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2199
2200 repository_handle
2201 .update(&mut cx, |repository_handle, cx| {
2202 repository_handle.stash_pop(stash_index, cx)
2203 })
2204 .await?;
2205
2206 Ok(proto::Ack {})
2207 }
2208
2209 async fn handle_stash_apply(
2210 this: Entity<Self>,
2211 envelope: TypedEnvelope<proto::StashApply>,
2212 mut cx: AsyncApp,
2213 ) -> Result<proto::Ack> {
2214 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2215 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2216 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2217
2218 repository_handle
2219 .update(&mut cx, |repository_handle, cx| {
2220 repository_handle.stash_apply(stash_index, cx)
2221 })
2222 .await?;
2223
2224 Ok(proto::Ack {})
2225 }
2226
2227 async fn handle_stash_drop(
2228 this: Entity<Self>,
2229 envelope: TypedEnvelope<proto::StashDrop>,
2230 mut cx: AsyncApp,
2231 ) -> Result<proto::Ack> {
2232 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2233 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2234 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2235
2236 repository_handle
2237 .update(&mut cx, |repository_handle, cx| {
2238 repository_handle.stash_drop(stash_index, cx)
2239 })
2240 .await??;
2241
2242 Ok(proto::Ack {})
2243 }
2244
2245 async fn handle_set_index_text(
2246 this: Entity<Self>,
2247 envelope: TypedEnvelope<proto::SetIndexText>,
2248 mut cx: AsyncApp,
2249 ) -> Result<proto::Ack> {
2250 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2251 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2252 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2253
2254 repository_handle
2255 .update(&mut cx, |repository_handle, cx| {
2256 repository_handle.spawn_set_index_text_job(
2257 repo_path,
2258 envelope.payload.text,
2259 None,
2260 cx,
2261 )
2262 })
2263 .await??;
2264 Ok(proto::Ack {})
2265 }
2266
2267 async fn handle_run_hook(
2268 this: Entity<Self>,
2269 envelope: TypedEnvelope<proto::RunGitHook>,
2270 mut cx: AsyncApp,
2271 ) -> Result<proto::Ack> {
2272 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2273 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2274 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2275 repository_handle
2276 .update(&mut cx, |repository_handle, cx| {
2277 repository_handle.run_hook(hook, cx)
2278 })
2279 .await??;
2280 Ok(proto::Ack {})
2281 }
2282
2283 async fn handle_commit(
2284 this: Entity<Self>,
2285 envelope: TypedEnvelope<proto::Commit>,
2286 mut cx: AsyncApp,
2287 ) -> Result<proto::Ack> {
2288 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2289 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2290 let askpass_id = envelope.payload.askpass_id;
2291
2292 let askpass = make_remote_delegate(
2293 this,
2294 envelope.payload.project_id,
2295 repository_id,
2296 askpass_id,
2297 &mut cx,
2298 );
2299
2300 let message = SharedString::from(envelope.payload.message);
2301 let name = envelope.payload.name.map(SharedString::from);
2302 let email = envelope.payload.email.map(SharedString::from);
2303 let options = envelope.payload.options.unwrap_or_default();
2304
2305 repository_handle
2306 .update(&mut cx, |repository_handle, cx| {
2307 repository_handle.commit(
2308 message,
2309 name.zip(email),
2310 CommitOptions {
2311 amend: options.amend,
2312 signoff: options.signoff,
2313 },
2314 askpass,
2315 cx,
2316 )
2317 })
2318 .await??;
2319 Ok(proto::Ack {})
2320 }
2321
2322 async fn handle_get_remotes(
2323 this: Entity<Self>,
2324 envelope: TypedEnvelope<proto::GetRemotes>,
2325 mut cx: AsyncApp,
2326 ) -> Result<proto::GetRemotesResponse> {
2327 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2328 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2329
2330 let branch_name = envelope.payload.branch_name;
2331 let is_push = envelope.payload.is_push;
2332
2333 let remotes = repository_handle
2334 .update(&mut cx, |repository_handle, _| {
2335 repository_handle.get_remotes(branch_name, is_push)
2336 })
2337 .await??;
2338
2339 Ok(proto::GetRemotesResponse {
2340 remotes: remotes
2341 .into_iter()
2342 .map(|remotes| proto::get_remotes_response::Remote {
2343 name: remotes.name.to_string(),
2344 })
2345 .collect::<Vec<_>>(),
2346 })
2347 }
2348
2349 async fn handle_get_worktrees(
2350 this: Entity<Self>,
2351 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2352 mut cx: AsyncApp,
2353 ) -> Result<proto::GitWorktreesResponse> {
2354 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2355 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2356
2357 let worktrees = repository_handle
2358 .update(&mut cx, |repository_handle, _| {
2359 repository_handle.worktrees()
2360 })
2361 .await??;
2362
2363 Ok(proto::GitWorktreesResponse {
2364 worktrees: worktrees
2365 .into_iter()
2366 .map(|worktree| worktree_to_proto(&worktree))
2367 .collect::<Vec<_>>(),
2368 })
2369 }
2370
2371 async fn handle_create_worktree(
2372 this: Entity<Self>,
2373 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2374 mut cx: AsyncApp,
2375 ) -> Result<proto::Ack> {
2376 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2377 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2378 let directory = PathBuf::from(envelope.payload.directory);
2379 let name = envelope.payload.name;
2380 let commit = envelope.payload.commit;
2381
2382 repository_handle
2383 .update(&mut cx, |repository_handle, _| {
2384 repository_handle.create_worktree(name, directory, commit)
2385 })
2386 .await??;
2387
2388 Ok(proto::Ack {})
2389 }
2390
2391 async fn handle_remove_worktree(
2392 this: Entity<Self>,
2393 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2394 mut cx: AsyncApp,
2395 ) -> Result<proto::Ack> {
2396 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2397 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2398 let path = PathBuf::from(envelope.payload.path);
2399 let force = envelope.payload.force;
2400
2401 repository_handle
2402 .update(&mut cx, |repository_handle, _| {
2403 repository_handle.remove_worktree(path, force)
2404 })
2405 .await??;
2406
2407 Ok(proto::Ack {})
2408 }
2409
2410 async fn handle_rename_worktree(
2411 this: Entity<Self>,
2412 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2413 mut cx: AsyncApp,
2414 ) -> Result<proto::Ack> {
2415 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2416 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2417 let old_path = PathBuf::from(envelope.payload.old_path);
2418 let new_path = PathBuf::from(envelope.payload.new_path);
2419
2420 repository_handle
2421 .update(&mut cx, |repository_handle, _| {
2422 repository_handle.rename_worktree(old_path, new_path)
2423 })
2424 .await??;
2425
2426 Ok(proto::Ack {})
2427 }
2428
2429 async fn handle_get_branches(
2430 this: Entity<Self>,
2431 envelope: TypedEnvelope<proto::GitGetBranches>,
2432 mut cx: AsyncApp,
2433 ) -> Result<proto::GitBranchesResponse> {
2434 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2435 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2436
2437 let branches = repository_handle
2438 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2439 .await??;
2440
2441 Ok(proto::GitBranchesResponse {
2442 branches: branches
2443 .into_iter()
2444 .map(|branch| branch_to_proto(&branch))
2445 .collect::<Vec<_>>(),
2446 })
2447 }
2448 async fn handle_get_default_branch(
2449 this: Entity<Self>,
2450 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2451 mut cx: AsyncApp,
2452 ) -> Result<proto::GetDefaultBranchResponse> {
2453 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2454 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2455
2456 let branch = repository_handle
2457 .update(&mut cx, |repository_handle, _| {
2458 repository_handle.default_branch(false)
2459 })
2460 .await??
2461 .map(Into::into);
2462
2463 Ok(proto::GetDefaultBranchResponse { branch })
2464 }
2465 async fn handle_create_branch(
2466 this: Entity<Self>,
2467 envelope: TypedEnvelope<proto::GitCreateBranch>,
2468 mut cx: AsyncApp,
2469 ) -> Result<proto::Ack> {
2470 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2471 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2472 let branch_name = envelope.payload.branch_name;
2473
2474 repository_handle
2475 .update(&mut cx, |repository_handle, _| {
2476 repository_handle.create_branch(branch_name, None)
2477 })
2478 .await??;
2479
2480 Ok(proto::Ack {})
2481 }
2482
2483 async fn handle_change_branch(
2484 this: Entity<Self>,
2485 envelope: TypedEnvelope<proto::GitChangeBranch>,
2486 mut cx: AsyncApp,
2487 ) -> Result<proto::Ack> {
2488 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2489 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2490 let branch_name = envelope.payload.branch_name;
2491
2492 repository_handle
2493 .update(&mut cx, |repository_handle, _| {
2494 repository_handle.change_branch(branch_name)
2495 })
2496 .await??;
2497
2498 Ok(proto::Ack {})
2499 }
2500
2501 async fn handle_rename_branch(
2502 this: Entity<Self>,
2503 envelope: TypedEnvelope<proto::GitRenameBranch>,
2504 mut cx: AsyncApp,
2505 ) -> Result<proto::Ack> {
2506 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2507 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2508 let branch = envelope.payload.branch;
2509 let new_name = envelope.payload.new_name;
2510
2511 repository_handle
2512 .update(&mut cx, |repository_handle, _| {
2513 repository_handle.rename_branch(branch, new_name)
2514 })
2515 .await??;
2516
2517 Ok(proto::Ack {})
2518 }
2519
2520 async fn handle_create_remote(
2521 this: Entity<Self>,
2522 envelope: TypedEnvelope<proto::GitCreateRemote>,
2523 mut cx: AsyncApp,
2524 ) -> Result<proto::Ack> {
2525 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2526 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2527 let remote_name = envelope.payload.remote_name;
2528 let remote_url = envelope.payload.remote_url;
2529
2530 repository_handle
2531 .update(&mut cx, |repository_handle, _| {
2532 repository_handle.create_remote(remote_name, remote_url)
2533 })
2534 .await??;
2535
2536 Ok(proto::Ack {})
2537 }
2538
2539 async fn handle_delete_branch(
2540 this: Entity<Self>,
2541 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2542 mut cx: AsyncApp,
2543 ) -> Result<proto::Ack> {
2544 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2545 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2546 let is_remote = envelope.payload.is_remote;
2547 let branch_name = envelope.payload.branch_name;
2548
2549 repository_handle
2550 .update(&mut cx, |repository_handle, _| {
2551 repository_handle.delete_branch(is_remote, branch_name)
2552 })
2553 .await??;
2554
2555 Ok(proto::Ack {})
2556 }
2557
2558 async fn handle_remove_remote(
2559 this: Entity<Self>,
2560 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2561 mut cx: AsyncApp,
2562 ) -> Result<proto::Ack> {
2563 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2564 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2565 let remote_name = envelope.payload.remote_name;
2566
2567 repository_handle
2568 .update(&mut cx, |repository_handle, _| {
2569 repository_handle.remove_remote(remote_name)
2570 })
2571 .await??;
2572
2573 Ok(proto::Ack {})
2574 }
2575
2576 async fn handle_show(
2577 this: Entity<Self>,
2578 envelope: TypedEnvelope<proto::GitShow>,
2579 mut cx: AsyncApp,
2580 ) -> Result<proto::GitCommitDetails> {
2581 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2582 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2583
2584 let commit = repository_handle
2585 .update(&mut cx, |repository_handle, _| {
2586 repository_handle.show(envelope.payload.commit)
2587 })
2588 .await??;
2589 Ok(proto::GitCommitDetails {
2590 sha: commit.sha.into(),
2591 message: commit.message.into(),
2592 commit_timestamp: commit.commit_timestamp,
2593 author_email: commit.author_email.into(),
2594 author_name: commit.author_name.into(),
2595 })
2596 }
2597
2598 async fn handle_load_commit_diff(
2599 this: Entity<Self>,
2600 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2601 mut cx: AsyncApp,
2602 ) -> Result<proto::LoadCommitDiffResponse> {
2603 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2604 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2605
2606 let commit_diff = repository_handle
2607 .update(&mut cx, |repository_handle, _| {
2608 repository_handle.load_commit_diff(envelope.payload.commit)
2609 })
2610 .await??;
2611 Ok(proto::LoadCommitDiffResponse {
2612 files: commit_diff
2613 .files
2614 .into_iter()
2615 .map(|file| proto::CommitFile {
2616 path: file.path.to_proto(),
2617 old_text: file.old_text,
2618 new_text: file.new_text,
2619 is_binary: file.is_binary,
2620 })
2621 .collect(),
2622 })
2623 }
2624
2625 async fn handle_file_history(
2626 this: Entity<Self>,
2627 envelope: TypedEnvelope<proto::GitFileHistory>,
2628 mut cx: AsyncApp,
2629 ) -> Result<proto::GitFileHistoryResponse> {
2630 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2631 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2632 let path = RepoPath::from_proto(&envelope.payload.path)?;
2633 let skip = envelope.payload.skip as usize;
2634 let limit = envelope.payload.limit.map(|l| l as usize);
2635
2636 let file_history = repository_handle
2637 .update(&mut cx, |repository_handle, _| {
2638 repository_handle.file_history_paginated(path, skip, limit)
2639 })
2640 .await??;
2641
2642 Ok(proto::GitFileHistoryResponse {
2643 entries: file_history
2644 .entries
2645 .into_iter()
2646 .map(|entry| proto::FileHistoryEntry {
2647 sha: entry.sha.to_string(),
2648 subject: entry.subject.to_string(),
2649 message: entry.message.to_string(),
2650 commit_timestamp: entry.commit_timestamp,
2651 author_name: entry.author_name.to_string(),
2652 author_email: entry.author_email.to_string(),
2653 })
2654 .collect(),
2655 path: file_history.path.to_proto(),
2656 })
2657 }
2658
2659 async fn handle_reset(
2660 this: Entity<Self>,
2661 envelope: TypedEnvelope<proto::GitReset>,
2662 mut cx: AsyncApp,
2663 ) -> Result<proto::Ack> {
2664 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2665 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2666
2667 let mode = match envelope.payload.mode() {
2668 git_reset::ResetMode::Soft => ResetMode::Soft,
2669 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2670 };
2671
2672 repository_handle
2673 .update(&mut cx, |repository_handle, cx| {
2674 repository_handle.reset(envelope.payload.commit, mode, cx)
2675 })
2676 .await??;
2677 Ok(proto::Ack {})
2678 }
2679
2680 async fn handle_checkout_files(
2681 this: Entity<Self>,
2682 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2683 mut cx: AsyncApp,
2684 ) -> Result<proto::Ack> {
2685 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2686 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2687 let paths = envelope
2688 .payload
2689 .paths
2690 .iter()
2691 .map(|s| RepoPath::from_proto(s))
2692 .collect::<Result<Vec<_>>>()?;
2693
2694 repository_handle
2695 .update(&mut cx, |repository_handle, cx| {
2696 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2697 })
2698 .await?;
2699 Ok(proto::Ack {})
2700 }
2701
2702 async fn handle_open_commit_message_buffer(
2703 this: Entity<Self>,
2704 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2705 mut cx: AsyncApp,
2706 ) -> Result<proto::OpenBufferResponse> {
2707 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2708 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2709 let buffer = repository
2710 .update(&mut cx, |repository, cx| {
2711 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2712 })
2713 .await?;
2714
2715 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2716 this.update(&mut cx, |this, cx| {
2717 this.buffer_store.update(cx, |buffer_store, cx| {
2718 buffer_store
2719 .create_buffer_for_peer(
2720 &buffer,
2721 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2722 cx,
2723 )
2724 .detach_and_log_err(cx);
2725 })
2726 });
2727
2728 Ok(proto::OpenBufferResponse {
2729 buffer_id: buffer_id.to_proto(),
2730 })
2731 }
2732
2733 async fn handle_askpass(
2734 this: Entity<Self>,
2735 envelope: TypedEnvelope<proto::AskPassRequest>,
2736 mut cx: AsyncApp,
2737 ) -> Result<proto::AskPassResponse> {
2738 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2739 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2740
2741 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2742 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2743 debug_panic!("no askpass found");
2744 anyhow::bail!("no askpass found");
2745 };
2746
2747 let response = askpass
2748 .ask_password(envelope.payload.prompt)
2749 .await
2750 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2751
2752 delegates
2753 .lock()
2754 .insert(envelope.payload.askpass_id, askpass);
2755
2756 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2757 Ok(proto::AskPassResponse {
2758 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2759 })
2760 }
2761
2762 async fn handle_check_for_pushed_commits(
2763 this: Entity<Self>,
2764 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2765 mut cx: AsyncApp,
2766 ) -> Result<proto::CheckForPushedCommitsResponse> {
2767 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2768 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2769
2770 let branches = repository_handle
2771 .update(&mut cx, |repository_handle, _| {
2772 repository_handle.check_for_pushed_commits()
2773 })
2774 .await??;
2775 Ok(proto::CheckForPushedCommitsResponse {
2776 pushed_to: branches
2777 .into_iter()
2778 .map(|commit| commit.to_string())
2779 .collect(),
2780 })
2781 }
2782
2783 async fn handle_git_diff(
2784 this: Entity<Self>,
2785 envelope: TypedEnvelope<proto::GitDiff>,
2786 mut cx: AsyncApp,
2787 ) -> Result<proto::GitDiffResponse> {
2788 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2789 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2790 let diff_type = match envelope.payload.diff_type() {
2791 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2792 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2793 proto::git_diff::DiffType::MergeBase => {
2794 let base_ref = envelope
2795 .payload
2796 .merge_base_ref
2797 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2798 DiffType::MergeBase {
2799 base_ref: base_ref.into(),
2800 }
2801 }
2802 };
2803
2804 let mut diff = repository_handle
2805 .update(&mut cx, |repository_handle, cx| {
2806 repository_handle.diff(diff_type, cx)
2807 })
2808 .await??;
2809 const ONE_MB: usize = 1_000_000;
2810 if diff.len() > ONE_MB {
2811 diff = diff.chars().take(ONE_MB).collect()
2812 }
2813
2814 Ok(proto::GitDiffResponse { diff })
2815 }
2816
2817 async fn handle_tree_diff(
2818 this: Entity<Self>,
2819 request: TypedEnvelope<proto::GetTreeDiff>,
2820 mut cx: AsyncApp,
2821 ) -> Result<proto::GetTreeDiffResponse> {
2822 let repository_id = RepositoryId(request.payload.repository_id);
2823 let diff_type = if request.payload.is_merge {
2824 DiffTreeType::MergeBase {
2825 base: request.payload.base.into(),
2826 head: request.payload.head.into(),
2827 }
2828 } else {
2829 DiffTreeType::Since {
2830 base: request.payload.base.into(),
2831 head: request.payload.head.into(),
2832 }
2833 };
2834
2835 let diff = this
2836 .update(&mut cx, |this, cx| {
2837 let repository = this.repositories().get(&repository_id)?;
2838 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2839 })
2840 .context("missing repository")?
2841 .await??;
2842
2843 Ok(proto::GetTreeDiffResponse {
2844 entries: diff
2845 .entries
2846 .into_iter()
2847 .map(|(path, status)| proto::TreeDiffStatus {
2848 path: path.as_ref().to_proto(),
2849 status: match status {
2850 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2851 TreeDiffStatus::Modified { .. } => {
2852 proto::tree_diff_status::Status::Modified.into()
2853 }
2854 TreeDiffStatus::Deleted { .. } => {
2855 proto::tree_diff_status::Status::Deleted.into()
2856 }
2857 },
2858 oid: match status {
2859 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2860 Some(old.to_string())
2861 }
2862 TreeDiffStatus::Added => None,
2863 },
2864 })
2865 .collect(),
2866 })
2867 }
2868
2869 async fn handle_get_blob_content(
2870 this: Entity<Self>,
2871 request: TypedEnvelope<proto::GetBlobContent>,
2872 mut cx: AsyncApp,
2873 ) -> Result<proto::GetBlobContentResponse> {
2874 let oid = git::Oid::from_str(&request.payload.oid)?;
2875 let repository_id = RepositoryId(request.payload.repository_id);
2876 let content = this
2877 .update(&mut cx, |this, cx| {
2878 let repository = this.repositories().get(&repository_id)?;
2879 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2880 })
2881 .context("missing repository")?
2882 .await?;
2883 Ok(proto::GetBlobContentResponse { content })
2884 }
2885
2886 async fn handle_open_unstaged_diff(
2887 this: Entity<Self>,
2888 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2889 mut cx: AsyncApp,
2890 ) -> Result<proto::OpenUnstagedDiffResponse> {
2891 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2892 let diff = this
2893 .update(&mut cx, |this, cx| {
2894 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2895 Some(this.open_unstaged_diff(buffer, cx))
2896 })
2897 .context("missing buffer")?
2898 .await?;
2899 this.update(&mut cx, |this, _| {
2900 let shared_diffs = this
2901 .shared_diffs
2902 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2903 .or_default();
2904 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2905 });
2906 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2907 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2908 }
2909
2910 async fn handle_open_uncommitted_diff(
2911 this: Entity<Self>,
2912 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2913 mut cx: AsyncApp,
2914 ) -> Result<proto::OpenUncommittedDiffResponse> {
2915 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2916 let diff = this
2917 .update(&mut cx, |this, cx| {
2918 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2919 Some(this.open_uncommitted_diff(buffer, cx))
2920 })
2921 .context("missing buffer")?
2922 .await?;
2923 this.update(&mut cx, |this, _| {
2924 let shared_diffs = this
2925 .shared_diffs
2926 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2927 .or_default();
2928 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2929 });
2930 Ok(diff.read_with(&cx, |diff, cx| {
2931 use proto::open_uncommitted_diff_response::Mode;
2932
2933 let unstaged_diff = diff.secondary_diff();
2934 let index_snapshot = unstaged_diff.and_then(|diff| {
2935 let diff = diff.read(cx);
2936 diff.base_text_exists().then(|| diff.base_text(cx))
2937 });
2938
2939 let mode;
2940 let staged_text;
2941 let committed_text;
2942 if diff.base_text_exists() {
2943 let committed_snapshot = diff.base_text(cx);
2944 committed_text = Some(committed_snapshot.text());
2945 if let Some(index_text) = index_snapshot {
2946 if index_text.remote_id() == committed_snapshot.remote_id() {
2947 mode = Mode::IndexMatchesHead;
2948 staged_text = None;
2949 } else {
2950 mode = Mode::IndexAndHead;
2951 staged_text = Some(index_text.text());
2952 }
2953 } else {
2954 mode = Mode::IndexAndHead;
2955 staged_text = None;
2956 }
2957 } else {
2958 mode = Mode::IndexAndHead;
2959 committed_text = None;
2960 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2961 }
2962
2963 proto::OpenUncommittedDiffResponse {
2964 committed_text,
2965 staged_text,
2966 mode: mode.into(),
2967 }
2968 }))
2969 }
2970
2971 async fn handle_update_diff_bases(
2972 this: Entity<Self>,
2973 request: TypedEnvelope<proto::UpdateDiffBases>,
2974 mut cx: AsyncApp,
2975 ) -> Result<()> {
2976 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2977 this.update(&mut cx, |this, cx| {
2978 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2979 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2980 {
2981 let buffer = buffer.read(cx).text_snapshot();
2982 diff_state.update(cx, |diff_state, cx| {
2983 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2984 })
2985 }
2986 });
2987 Ok(())
2988 }
2989
2990 async fn handle_blame_buffer(
2991 this: Entity<Self>,
2992 envelope: TypedEnvelope<proto::BlameBuffer>,
2993 mut cx: AsyncApp,
2994 ) -> Result<proto::BlameBufferResponse> {
2995 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2996 let version = deserialize_version(&envelope.payload.version);
2997 let buffer = this.read_with(&cx, |this, cx| {
2998 this.buffer_store.read(cx).get_existing(buffer_id)
2999 })?;
3000 buffer
3001 .update(&mut cx, |buffer, _| {
3002 buffer.wait_for_version(version.clone())
3003 })
3004 .await?;
3005 let blame = this
3006 .update(&mut cx, |this, cx| {
3007 this.blame_buffer(&buffer, Some(version), cx)
3008 })
3009 .await?;
3010 Ok(serialize_blame_buffer_response(blame))
3011 }
3012
3013 async fn handle_get_permalink_to_line(
3014 this: Entity<Self>,
3015 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3016 mut cx: AsyncApp,
3017 ) -> Result<proto::GetPermalinkToLineResponse> {
3018 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3019 // let version = deserialize_version(&envelope.payload.version);
3020 let selection = {
3021 let proto_selection = envelope
3022 .payload
3023 .selection
3024 .context("no selection to get permalink for defined")?;
3025 proto_selection.start as u32..proto_selection.end as u32
3026 };
3027 let buffer = this.read_with(&cx, |this, cx| {
3028 this.buffer_store.read(cx).get_existing(buffer_id)
3029 })?;
3030 let permalink = this
3031 .update(&mut cx, |this, cx| {
3032 this.get_permalink_to_line(&buffer, selection, cx)
3033 })
3034 .await?;
3035 Ok(proto::GetPermalinkToLineResponse {
3036 permalink: permalink.to_string(),
3037 })
3038 }
3039
3040 fn repository_for_request(
3041 this: &Entity<Self>,
3042 id: RepositoryId,
3043 cx: &mut AsyncApp,
3044 ) -> Result<Entity<Repository>> {
3045 this.read_with(cx, |this, _| {
3046 this.repositories
3047 .get(&id)
3048 .context("missing repository handle")
3049 .cloned()
3050 })
3051 }
3052
3053 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3054 self.repositories
3055 .iter()
3056 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3057 .collect()
3058 }
3059
3060 fn process_updated_entries(
3061 &self,
3062 worktree: &Entity<Worktree>,
3063 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3064 cx: &mut App,
3065 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3066 let path_style = worktree.read(cx).path_style();
3067 let mut repo_paths = self
3068 .repositories
3069 .values()
3070 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3071 .collect::<Vec<_>>();
3072 let mut entries: Vec<_> = updated_entries
3073 .iter()
3074 .map(|(path, _, _)| path.clone())
3075 .collect();
3076 entries.sort();
3077 let worktree = worktree.read(cx);
3078
3079 let entries = entries
3080 .into_iter()
3081 .map(|path| worktree.absolutize(&path))
3082 .collect::<Arc<[_]>>();
3083
3084 let executor = cx.background_executor().clone();
3085 cx.background_executor().spawn(async move {
3086 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3087 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3088 let mut tasks = FuturesOrdered::new();
3089 for (repo_path, repo) in repo_paths.into_iter().rev() {
3090 let entries = entries.clone();
3091 let task = executor.spawn(async move {
3092 // Find all repository paths that belong to this repo
3093 let mut ix = entries.partition_point(|path| path < &*repo_path);
3094 if ix == entries.len() {
3095 return None;
3096 };
3097
3098 let mut paths = Vec::new();
3099 // All paths prefixed by a given repo will constitute a continuous range.
3100 while let Some(path) = entries.get(ix)
3101 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3102 &repo_path, path, path_style,
3103 )
3104 {
3105 paths.push((repo_path, ix));
3106 ix += 1;
3107 }
3108 if paths.is_empty() {
3109 None
3110 } else {
3111 Some((repo, paths))
3112 }
3113 });
3114 tasks.push_back(task);
3115 }
3116
3117 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3118 let mut path_was_used = vec![false; entries.len()];
3119 let tasks = tasks.collect::<Vec<_>>().await;
3120 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3121 // We always want to assign a path to it's innermost repository.
3122 for t in tasks {
3123 let Some((repo, paths)) = t else {
3124 continue;
3125 };
3126 let entry = paths_by_git_repo.entry(repo).or_default();
3127 for (repo_path, ix) in paths {
3128 if path_was_used[ix] {
3129 continue;
3130 }
3131 path_was_used[ix] = true;
3132 entry.push(repo_path);
3133 }
3134 }
3135
3136 paths_by_git_repo
3137 })
3138 }
3139}
3140
3141impl BufferGitState {
3142 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3143 Self {
3144 unstaged_diff: Default::default(),
3145 uncommitted_diff: Default::default(),
3146 oid_diffs: Default::default(),
3147 recalculate_diff_task: Default::default(),
3148 language: Default::default(),
3149 language_registry: Default::default(),
3150 recalculating_tx: postage::watch::channel_with(false).0,
3151 hunk_staging_operation_count: 0,
3152 hunk_staging_operation_count_as_of_write: 0,
3153 head_text: Default::default(),
3154 index_text: Default::default(),
3155 oid_texts: Default::default(),
3156 head_changed: Default::default(),
3157 index_changed: Default::default(),
3158 language_changed: Default::default(),
3159 conflict_updated_futures: Default::default(),
3160 conflict_set: Default::default(),
3161 reparse_conflict_markers_task: Default::default(),
3162 }
3163 }
3164
3165 #[ztracing::instrument(skip_all)]
3166 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3167 self.language = buffer.read(cx).language().cloned();
3168 self.language_changed = true;
3169 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3170 }
3171
3172 fn reparse_conflict_markers(
3173 &mut self,
3174 buffer: text::BufferSnapshot,
3175 cx: &mut Context<Self>,
3176 ) -> oneshot::Receiver<()> {
3177 let (tx, rx) = oneshot::channel();
3178
3179 let Some(conflict_set) = self
3180 .conflict_set
3181 .as_ref()
3182 .and_then(|conflict_set| conflict_set.upgrade())
3183 else {
3184 return rx;
3185 };
3186
3187 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3188 if conflict_set.has_conflict {
3189 Some(conflict_set.snapshot())
3190 } else {
3191 None
3192 }
3193 });
3194
3195 if let Some(old_snapshot) = old_snapshot {
3196 self.conflict_updated_futures.push(tx);
3197 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3198 let (snapshot, changed_range) = cx
3199 .background_spawn(async move {
3200 let new_snapshot = ConflictSet::parse(&buffer);
3201 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3202 (new_snapshot, changed_range)
3203 })
3204 .await;
3205 this.update(cx, |this, cx| {
3206 if let Some(conflict_set) = &this.conflict_set {
3207 conflict_set
3208 .update(cx, |conflict_set, cx| {
3209 conflict_set.set_snapshot(snapshot, changed_range, cx);
3210 })
3211 .ok();
3212 }
3213 let futures = std::mem::take(&mut this.conflict_updated_futures);
3214 for tx in futures {
3215 tx.send(()).ok();
3216 }
3217 })
3218 }))
3219 }
3220
3221 rx
3222 }
3223
3224 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3225 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3226 }
3227
3228 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3229 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3230 }
3231
3232 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3233 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3234 }
3235
3236 fn handle_base_texts_updated(
3237 &mut self,
3238 buffer: text::BufferSnapshot,
3239 message: proto::UpdateDiffBases,
3240 cx: &mut Context<Self>,
3241 ) {
3242 use proto::update_diff_bases::Mode;
3243
3244 let Some(mode) = Mode::from_i32(message.mode) else {
3245 return;
3246 };
3247
3248 let diff_bases_change = match mode {
3249 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3250 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3251 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3252 Mode::IndexAndHead => DiffBasesChange::SetEach {
3253 index: message.staged_text,
3254 head: message.committed_text,
3255 },
3256 };
3257
3258 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3259 }
3260
3261 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3262 if *self.recalculating_tx.borrow() {
3263 let mut rx = self.recalculating_tx.subscribe();
3264 Some(async move {
3265 loop {
3266 let is_recalculating = rx.recv().await;
3267 if is_recalculating != Some(true) {
3268 break;
3269 }
3270 }
3271 })
3272 } else {
3273 None
3274 }
3275 }
3276
3277 fn diff_bases_changed(
3278 &mut self,
3279 buffer: text::BufferSnapshot,
3280 diff_bases_change: Option<DiffBasesChange>,
3281 cx: &mut Context<Self>,
3282 ) {
3283 match diff_bases_change {
3284 Some(DiffBasesChange::SetIndex(index)) => {
3285 self.index_text = index.map(|mut index| {
3286 text::LineEnding::normalize(&mut index);
3287 Arc::from(index.as_str())
3288 });
3289 self.index_changed = true;
3290 }
3291 Some(DiffBasesChange::SetHead(head)) => {
3292 self.head_text = head.map(|mut head| {
3293 text::LineEnding::normalize(&mut head);
3294 Arc::from(head.as_str())
3295 });
3296 self.head_changed = true;
3297 }
3298 Some(DiffBasesChange::SetBoth(text)) => {
3299 let text = text.map(|mut text| {
3300 text::LineEnding::normalize(&mut text);
3301 Arc::from(text.as_str())
3302 });
3303 self.head_text = text.clone();
3304 self.index_text = text;
3305 self.head_changed = true;
3306 self.index_changed = true;
3307 }
3308 Some(DiffBasesChange::SetEach { index, head }) => {
3309 self.index_text = index.map(|mut index| {
3310 text::LineEnding::normalize(&mut index);
3311 Arc::from(index.as_str())
3312 });
3313 self.index_changed = true;
3314 self.head_text = head.map(|mut head| {
3315 text::LineEnding::normalize(&mut head);
3316 Arc::from(head.as_str())
3317 });
3318 self.head_changed = true;
3319 }
3320 None => {}
3321 }
3322
3323 self.recalculate_diffs(buffer, cx)
3324 }
3325
3326 #[ztracing::instrument(skip_all)]
3327 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3328 *self.recalculating_tx.borrow_mut() = true;
3329
3330 let language = self.language.clone();
3331 let language_registry = self.language_registry.clone();
3332 let unstaged_diff = self.unstaged_diff();
3333 let uncommitted_diff = self.uncommitted_diff();
3334 let head = self.head_text.clone();
3335 let index = self.index_text.clone();
3336 let index_changed = self.index_changed;
3337 let head_changed = self.head_changed;
3338 let language_changed = self.language_changed;
3339 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3340 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3341 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3342 (None, None) => true,
3343 _ => false,
3344 };
3345
3346 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3347 .oid_diffs
3348 .iter()
3349 .filter_map(|(oid, weak)| {
3350 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3351 weak.upgrade().map(|diff| (*oid, diff, base_text))
3352 })
3353 .collect();
3354
3355 self.oid_diffs.retain(|oid, weak| {
3356 let alive = weak.upgrade().is_some();
3357 if !alive {
3358 if let Some(oid) = oid {
3359 self.oid_texts.remove(oid);
3360 }
3361 }
3362 alive
3363 });
3364 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3365 log::debug!(
3366 "start recalculating diffs for buffer {}",
3367 buffer.remote_id()
3368 );
3369
3370 let mut new_unstaged_diff = None;
3371 if let Some(unstaged_diff) = &unstaged_diff {
3372 new_unstaged_diff = Some(
3373 cx.update(|cx| {
3374 unstaged_diff.read(cx).update_diff(
3375 buffer.clone(),
3376 index,
3377 index_changed.then_some(false),
3378 language.clone(),
3379 cx,
3380 )
3381 })
3382 .await,
3383 );
3384 }
3385
3386 // Dropping BufferDiff can be expensive, so yield back to the event loop
3387 // for a bit
3388 yield_now().await;
3389
3390 let mut new_uncommitted_diff = None;
3391 if let Some(uncommitted_diff) = &uncommitted_diff {
3392 new_uncommitted_diff = if index_matches_head {
3393 new_unstaged_diff.clone()
3394 } else {
3395 Some(
3396 cx.update(|cx| {
3397 uncommitted_diff.read(cx).update_diff(
3398 buffer.clone(),
3399 head,
3400 head_changed.then_some(true),
3401 language.clone(),
3402 cx,
3403 )
3404 })
3405 .await,
3406 )
3407 }
3408 }
3409
3410 // Dropping BufferDiff can be expensive, so yield back to the event loop
3411 // for a bit
3412 yield_now().await;
3413
3414 let cancel = this.update(cx, |this, _| {
3415 // This checks whether all pending stage/unstage operations
3416 // have quiesced (i.e. both the corresponding write and the
3417 // read of that write have completed). If not, then we cancel
3418 // this recalculation attempt to avoid invalidating pending
3419 // state too quickly; another recalculation will come along
3420 // later and clear the pending state once the state of the index has settled.
3421 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3422 *this.recalculating_tx.borrow_mut() = false;
3423 true
3424 } else {
3425 false
3426 }
3427 })?;
3428 if cancel {
3429 log::debug!(
3430 concat!(
3431 "aborting recalculating diffs for buffer {}",
3432 "due to subsequent hunk operations",
3433 ),
3434 buffer.remote_id()
3435 );
3436 return Ok(());
3437 }
3438
3439 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3440 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3441 {
3442 let task = unstaged_diff.update(cx, |diff, cx| {
3443 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3444 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3445 // view multibuffers.
3446 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3447 });
3448 Some(task.await)
3449 } else {
3450 None
3451 };
3452
3453 yield_now().await;
3454
3455 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3456 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3457 {
3458 uncommitted_diff
3459 .update(cx, |diff, cx| {
3460 if language_changed {
3461 diff.language_changed(language.clone(), language_registry, cx);
3462 }
3463 diff.set_snapshot_with_secondary(
3464 new_uncommitted_diff,
3465 &buffer,
3466 unstaged_changed_range.flatten(),
3467 true,
3468 cx,
3469 )
3470 })
3471 .await;
3472 }
3473
3474 yield_now().await;
3475
3476 for (oid, oid_diff, base_text) in oid_diffs {
3477 let new_oid_diff = cx
3478 .update(|cx| {
3479 oid_diff.read(cx).update_diff(
3480 buffer.clone(),
3481 base_text,
3482 None,
3483 language.clone(),
3484 cx,
3485 )
3486 })
3487 .await;
3488
3489 oid_diff
3490 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3491 .await;
3492
3493 log::debug!(
3494 "finished recalculating oid diff for buffer {} oid {:?}",
3495 buffer.remote_id(),
3496 oid
3497 );
3498
3499 yield_now().await;
3500 }
3501
3502 log::debug!(
3503 "finished recalculating diffs for buffer {}",
3504 buffer.remote_id()
3505 );
3506
3507 if let Some(this) = this.upgrade() {
3508 this.update(cx, |this, _| {
3509 this.index_changed = false;
3510 this.head_changed = false;
3511 this.language_changed = false;
3512 *this.recalculating_tx.borrow_mut() = false;
3513 });
3514 }
3515
3516 Ok(())
3517 }));
3518 }
3519}
3520
3521fn make_remote_delegate(
3522 this: Entity<GitStore>,
3523 project_id: u64,
3524 repository_id: RepositoryId,
3525 askpass_id: u64,
3526 cx: &mut AsyncApp,
3527) -> AskPassDelegate {
3528 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3529 this.update(cx, |this, cx| {
3530 let Some((client, _)) = this.downstream_client() else {
3531 return;
3532 };
3533 let response = client.request(proto::AskPassRequest {
3534 project_id,
3535 repository_id: repository_id.to_proto(),
3536 askpass_id,
3537 prompt,
3538 });
3539 cx.spawn(async move |_, _| {
3540 let mut response = response.await?.response;
3541 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3542 .ok();
3543 response.zeroize();
3544 anyhow::Ok(())
3545 })
3546 .detach_and_log_err(cx);
3547 });
3548 })
3549}
3550
3551impl RepositoryId {
3552 pub fn to_proto(self) -> u64 {
3553 self.0
3554 }
3555
3556 pub fn from_proto(id: u64) -> Self {
3557 RepositoryId(id)
3558 }
3559}
3560
3561impl RepositorySnapshot {
3562 fn empty(
3563 id: RepositoryId,
3564 work_directory_abs_path: Arc<Path>,
3565 original_repo_abs_path: Option<Arc<Path>>,
3566 path_style: PathStyle,
3567 ) -> Self {
3568 Self {
3569 id,
3570 statuses_by_path: Default::default(),
3571 original_repo_abs_path: original_repo_abs_path
3572 .unwrap_or_else(|| work_directory_abs_path.clone()),
3573 work_directory_abs_path,
3574 branch: None,
3575 head_commit: None,
3576 scan_id: 0,
3577 merge: Default::default(),
3578 remote_origin_url: None,
3579 remote_upstream_url: None,
3580 stash_entries: Default::default(),
3581 linked_worktrees: Arc::from([]),
3582 path_style,
3583 }
3584 }
3585
3586 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3587 proto::UpdateRepository {
3588 branch_summary: self.branch.as_ref().map(branch_to_proto),
3589 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3590 updated_statuses: self
3591 .statuses_by_path
3592 .iter()
3593 .map(|entry| entry.to_proto())
3594 .collect(),
3595 removed_statuses: Default::default(),
3596 current_merge_conflicts: self
3597 .merge
3598 .merge_heads_by_conflicted_path
3599 .iter()
3600 .map(|(repo_path, _)| repo_path.to_proto())
3601 .collect(),
3602 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3603 project_id,
3604 id: self.id.to_proto(),
3605 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3606 entry_ids: vec![self.id.to_proto()],
3607 scan_id: self.scan_id,
3608 is_last_update: true,
3609 stash_entries: self
3610 .stash_entries
3611 .entries
3612 .iter()
3613 .map(stash_to_proto)
3614 .collect(),
3615 remote_upstream_url: self.remote_upstream_url.clone(),
3616 remote_origin_url: self.remote_origin_url.clone(),
3617 original_repo_abs_path: Some(
3618 self.original_repo_abs_path.to_string_lossy().into_owned(),
3619 ),
3620 linked_worktrees: self
3621 .linked_worktrees
3622 .iter()
3623 .map(worktree_to_proto)
3624 .collect(),
3625 }
3626 }
3627
3628 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3629 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3630 let mut removed_statuses: Vec<String> = Vec::new();
3631
3632 let mut new_statuses = self.statuses_by_path.iter().peekable();
3633 let mut old_statuses = old.statuses_by_path.iter().peekable();
3634
3635 let mut current_new_entry = new_statuses.next();
3636 let mut current_old_entry = old_statuses.next();
3637 loop {
3638 match (current_new_entry, current_old_entry) {
3639 (Some(new_entry), Some(old_entry)) => {
3640 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3641 Ordering::Less => {
3642 updated_statuses.push(new_entry.to_proto());
3643 current_new_entry = new_statuses.next();
3644 }
3645 Ordering::Equal => {
3646 if new_entry.status != old_entry.status
3647 || new_entry.diff_stat != old_entry.diff_stat
3648 {
3649 updated_statuses.push(new_entry.to_proto());
3650 }
3651 current_old_entry = old_statuses.next();
3652 current_new_entry = new_statuses.next();
3653 }
3654 Ordering::Greater => {
3655 removed_statuses.push(old_entry.repo_path.to_proto());
3656 current_old_entry = old_statuses.next();
3657 }
3658 }
3659 }
3660 (None, Some(old_entry)) => {
3661 removed_statuses.push(old_entry.repo_path.to_proto());
3662 current_old_entry = old_statuses.next();
3663 }
3664 (Some(new_entry), None) => {
3665 updated_statuses.push(new_entry.to_proto());
3666 current_new_entry = new_statuses.next();
3667 }
3668 (None, None) => break,
3669 }
3670 }
3671
3672 proto::UpdateRepository {
3673 branch_summary: self.branch.as_ref().map(branch_to_proto),
3674 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3675 updated_statuses,
3676 removed_statuses,
3677 current_merge_conflicts: self
3678 .merge
3679 .merge_heads_by_conflicted_path
3680 .iter()
3681 .map(|(path, _)| path.to_proto())
3682 .collect(),
3683 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3684 project_id,
3685 id: self.id.to_proto(),
3686 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3687 entry_ids: vec![],
3688 scan_id: self.scan_id,
3689 is_last_update: true,
3690 stash_entries: self
3691 .stash_entries
3692 .entries
3693 .iter()
3694 .map(stash_to_proto)
3695 .collect(),
3696 remote_upstream_url: self.remote_upstream_url.clone(),
3697 remote_origin_url: self.remote_origin_url.clone(),
3698 original_repo_abs_path: Some(
3699 self.original_repo_abs_path.to_string_lossy().into_owned(),
3700 ),
3701 linked_worktrees: self
3702 .linked_worktrees
3703 .iter()
3704 .map(worktree_to_proto)
3705 .collect(),
3706 }
3707 }
3708
3709 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3710 &self.linked_worktrees
3711 }
3712
3713 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3714 self.statuses_by_path.iter().cloned()
3715 }
3716
3717 pub fn status_summary(&self) -> GitSummary {
3718 self.statuses_by_path.summary().item_summary
3719 }
3720
3721 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3722 self.statuses_by_path
3723 .get(&PathKey(path.as_ref().clone()), ())
3724 .cloned()
3725 }
3726
3727 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3728 self.statuses_by_path
3729 .get(&PathKey(path.as_ref().clone()), ())
3730 .and_then(|entry| entry.diff_stat)
3731 }
3732
3733 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3734 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3735 }
3736
3737 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3738 self.path_style
3739 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3740 .unwrap()
3741 .into()
3742 }
3743
3744 #[inline]
3745 fn abs_path_to_repo_path_inner(
3746 work_directory_abs_path: &Path,
3747 abs_path: &Path,
3748 path_style: PathStyle,
3749 ) -> Option<RepoPath> {
3750 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3751 Some(RepoPath::from_rel_path(&rel_path))
3752 }
3753
3754 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3755 self.merge
3756 .merge_heads_by_conflicted_path
3757 .contains_key(repo_path)
3758 }
3759
3760 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3761 let had_conflict_on_last_merge_head_change = self
3762 .merge
3763 .merge_heads_by_conflicted_path
3764 .contains_key(repo_path);
3765 let has_conflict_currently = self
3766 .status_for_path(repo_path)
3767 .is_some_and(|entry| entry.status.is_conflicted());
3768 had_conflict_on_last_merge_head_change || has_conflict_currently
3769 }
3770
3771 /// This is the name that will be displayed in the repository selector for this repository.
3772 pub fn display_name(&self) -> SharedString {
3773 self.work_directory_abs_path
3774 .file_name()
3775 .unwrap_or_default()
3776 .to_string_lossy()
3777 .to_string()
3778 .into()
3779 }
3780}
3781
3782pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3783 proto::StashEntry {
3784 oid: entry.oid.as_bytes().to_vec(),
3785 message: entry.message.clone(),
3786 branch: entry.branch.clone(),
3787 index: entry.index as u64,
3788 timestamp: entry.timestamp,
3789 }
3790}
3791
3792pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3793 Ok(StashEntry {
3794 oid: Oid::from_bytes(&entry.oid)?,
3795 message: entry.message.clone(),
3796 index: entry.index as usize,
3797 branch: entry.branch.clone(),
3798 timestamp: entry.timestamp,
3799 })
3800}
3801
3802impl MergeDetails {
3803 async fn update(
3804 &mut self,
3805 backend: &Arc<dyn GitRepository>,
3806 current_conflicted_paths: Vec<RepoPath>,
3807 ) -> Result<bool> {
3808 log::debug!("load merge details");
3809 self.message = backend.merge_message().await.map(SharedString::from);
3810 let heads = backend
3811 .revparse_batch(vec![
3812 "MERGE_HEAD".into(),
3813 "CHERRY_PICK_HEAD".into(),
3814 "REBASE_HEAD".into(),
3815 "REVERT_HEAD".into(),
3816 "APPLY_HEAD".into(),
3817 ])
3818 .await
3819 .log_err()
3820 .unwrap_or_default()
3821 .into_iter()
3822 .map(|opt| opt.map(SharedString::from))
3823 .collect::<Vec<_>>();
3824
3825 let mut conflicts_changed = false;
3826
3827 // Record the merge state for newly conflicted paths
3828 for path in ¤t_conflicted_paths {
3829 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3830 conflicts_changed = true;
3831 self.merge_heads_by_conflicted_path
3832 .insert(path.clone(), heads.clone());
3833 }
3834 }
3835
3836 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3837 self.merge_heads_by_conflicted_path
3838 .retain(|path, old_merge_heads| {
3839 let keep = current_conflicted_paths.contains(path)
3840 || (old_merge_heads == &heads
3841 && old_merge_heads.iter().any(|head| head.is_some()));
3842 if !keep {
3843 conflicts_changed = true;
3844 }
3845 keep
3846 });
3847
3848 Ok(conflicts_changed)
3849 }
3850}
3851
3852impl Repository {
3853 pub fn is_trusted(&self) -> bool {
3854 match self.repository_state.peek() {
3855 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3856 _ => false,
3857 }
3858 }
3859
3860 pub fn snapshot(&self) -> RepositorySnapshot {
3861 self.snapshot.clone()
3862 }
3863
3864 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3865 self.pending_ops.iter().cloned()
3866 }
3867
3868 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3869 self.pending_ops.summary().clone()
3870 }
3871
3872 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3873 self.pending_ops
3874 .get(&PathKey(path.as_ref().clone()), ())
3875 .cloned()
3876 }
3877
3878 fn local(
3879 id: RepositoryId,
3880 work_directory_abs_path: Arc<Path>,
3881 original_repo_abs_path: Arc<Path>,
3882 dot_git_abs_path: Arc<Path>,
3883 project_environment: WeakEntity<ProjectEnvironment>,
3884 fs: Arc<dyn Fs>,
3885 is_trusted: bool,
3886 git_store: WeakEntity<GitStore>,
3887 cx: &mut Context<Self>,
3888 ) -> Self {
3889 let snapshot = RepositorySnapshot::empty(
3890 id,
3891 work_directory_abs_path.clone(),
3892 Some(original_repo_abs_path),
3893 PathStyle::local(),
3894 );
3895 let state = cx
3896 .spawn(async move |_, cx| {
3897 LocalRepositoryState::new(
3898 work_directory_abs_path,
3899 dot_git_abs_path,
3900 project_environment,
3901 fs,
3902 is_trusted,
3903 cx,
3904 )
3905 .await
3906 .map_err(|err| err.to_string())
3907 })
3908 .shared();
3909 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3910 let state = cx
3911 .spawn(async move |_, _| {
3912 let state = state.await?;
3913 Ok(RepositoryState::Local(state))
3914 })
3915 .shared();
3916
3917 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3918 RepositoryEvent::BranchChanged => {
3919 if this.scan_id > 1 {
3920 this.initial_graph_data.clear();
3921 }
3922 }
3923 _ => {}
3924 })
3925 .detach();
3926
3927 Repository {
3928 this: cx.weak_entity(),
3929 git_store,
3930 snapshot,
3931 pending_ops: Default::default(),
3932 repository_state: state,
3933 commit_message_buffer: None,
3934 askpass_delegates: Default::default(),
3935 paths_needing_status_update: Default::default(),
3936 latest_askpass_id: 0,
3937 job_sender,
3938 job_id: 0,
3939 active_jobs: Default::default(),
3940 initial_graph_data: Default::default(),
3941 commit_data: Default::default(),
3942 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3943 }
3944 }
3945
3946 fn remote(
3947 id: RepositoryId,
3948 work_directory_abs_path: Arc<Path>,
3949 original_repo_abs_path: Option<Arc<Path>>,
3950 path_style: PathStyle,
3951 project_id: ProjectId,
3952 client: AnyProtoClient,
3953 git_store: WeakEntity<GitStore>,
3954 cx: &mut Context<Self>,
3955 ) -> Self {
3956 let snapshot = RepositorySnapshot::empty(
3957 id,
3958 work_directory_abs_path,
3959 original_repo_abs_path,
3960 path_style,
3961 );
3962 let repository_state = RemoteRepositoryState { project_id, client };
3963 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3964 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3965 Self {
3966 this: cx.weak_entity(),
3967 snapshot,
3968 commit_message_buffer: None,
3969 git_store,
3970 pending_ops: Default::default(),
3971 paths_needing_status_update: Default::default(),
3972 job_sender,
3973 repository_state,
3974 askpass_delegates: Default::default(),
3975 latest_askpass_id: 0,
3976 active_jobs: Default::default(),
3977 job_id: 0,
3978 initial_graph_data: Default::default(),
3979 commit_data: Default::default(),
3980 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3981 }
3982 }
3983
3984 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3985 self.git_store.upgrade()
3986 }
3987
3988 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3989 let this = cx.weak_entity();
3990 let git_store = self.git_store.clone();
3991 let _ = self.send_keyed_job(
3992 Some(GitJobKey::ReloadBufferDiffBases),
3993 None,
3994 |state, mut cx| async move {
3995 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3996 log::error!("tried to recompute diffs for a non-local repository");
3997 return Ok(());
3998 };
3999
4000 let Some(this) = this.upgrade() else {
4001 return Ok(());
4002 };
4003
4004 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4005 git_store.update(cx, |git_store, cx| {
4006 git_store
4007 .diffs
4008 .iter()
4009 .filter_map(|(buffer_id, diff_state)| {
4010 let buffer_store = git_store.buffer_store.read(cx);
4011 let buffer = buffer_store.get(*buffer_id)?;
4012 let file = File::from_dyn(buffer.read(cx).file())?;
4013 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4014 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4015 log::debug!(
4016 "start reload diff bases for repo path {}",
4017 repo_path.as_unix_str()
4018 );
4019 diff_state.update(cx, |diff_state, _| {
4020 let has_unstaged_diff = diff_state
4021 .unstaged_diff
4022 .as_ref()
4023 .is_some_and(|diff| diff.is_upgradable());
4024 let has_uncommitted_diff = diff_state
4025 .uncommitted_diff
4026 .as_ref()
4027 .is_some_and(|set| set.is_upgradable());
4028
4029 Some((
4030 buffer,
4031 repo_path,
4032 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4033 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4034 ))
4035 })
4036 })
4037 .collect::<Vec<_>>()
4038 })
4039 })?;
4040
4041 let buffer_diff_base_changes = cx
4042 .background_spawn(async move {
4043 let mut changes = Vec::new();
4044 for (buffer, repo_path, current_index_text, current_head_text) in
4045 &repo_diff_state_updates
4046 {
4047 let index_text = if current_index_text.is_some() {
4048 backend.load_index_text(repo_path.clone()).await
4049 } else {
4050 None
4051 };
4052 let head_text = if current_head_text.is_some() {
4053 backend.load_committed_text(repo_path.clone()).await
4054 } else {
4055 None
4056 };
4057
4058 let change =
4059 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4060 (Some(current_index), Some(current_head)) => {
4061 let index_changed =
4062 index_text.as_deref() != current_index.as_deref();
4063 let head_changed =
4064 head_text.as_deref() != current_head.as_deref();
4065 if index_changed && head_changed {
4066 if index_text == head_text {
4067 Some(DiffBasesChange::SetBoth(head_text))
4068 } else {
4069 Some(DiffBasesChange::SetEach {
4070 index: index_text,
4071 head: head_text,
4072 })
4073 }
4074 } else if index_changed {
4075 Some(DiffBasesChange::SetIndex(index_text))
4076 } else if head_changed {
4077 Some(DiffBasesChange::SetHead(head_text))
4078 } else {
4079 None
4080 }
4081 }
4082 (Some(current_index), None) => {
4083 let index_changed =
4084 index_text.as_deref() != current_index.as_deref();
4085 index_changed
4086 .then_some(DiffBasesChange::SetIndex(index_text))
4087 }
4088 (None, Some(current_head)) => {
4089 let head_changed =
4090 head_text.as_deref() != current_head.as_deref();
4091 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4092 }
4093 (None, None) => None,
4094 };
4095
4096 changes.push((buffer.clone(), change))
4097 }
4098 changes
4099 })
4100 .await;
4101
4102 git_store.update(&mut cx, |git_store, cx| {
4103 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4104 let buffer_snapshot = buffer.read(cx).text_snapshot();
4105 let buffer_id = buffer_snapshot.remote_id();
4106 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4107 continue;
4108 };
4109
4110 let downstream_client = git_store.downstream_client();
4111 diff_state.update(cx, |diff_state, cx| {
4112 use proto::update_diff_bases::Mode;
4113
4114 if let Some((diff_bases_change, (client, project_id))) =
4115 diff_bases_change.clone().zip(downstream_client)
4116 {
4117 let (staged_text, committed_text, mode) = match diff_bases_change {
4118 DiffBasesChange::SetIndex(index) => {
4119 (index, None, Mode::IndexOnly)
4120 }
4121 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4122 DiffBasesChange::SetEach { index, head } => {
4123 (index, head, Mode::IndexAndHead)
4124 }
4125 DiffBasesChange::SetBoth(text) => {
4126 (None, text, Mode::IndexMatchesHead)
4127 }
4128 };
4129 client
4130 .send(proto::UpdateDiffBases {
4131 project_id: project_id.to_proto(),
4132 buffer_id: buffer_id.to_proto(),
4133 staged_text,
4134 committed_text,
4135 mode: mode as i32,
4136 })
4137 .log_err();
4138 }
4139
4140 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4141 });
4142 }
4143 })
4144 },
4145 );
4146 }
4147
4148 pub fn send_job<F, Fut, R>(
4149 &mut self,
4150 status: Option<SharedString>,
4151 job: F,
4152 ) -> oneshot::Receiver<R>
4153 where
4154 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4155 Fut: Future<Output = R> + 'static,
4156 R: Send + 'static,
4157 {
4158 self.send_keyed_job(None, status, job)
4159 }
4160
4161 fn send_keyed_job<F, Fut, R>(
4162 &mut self,
4163 key: Option<GitJobKey>,
4164 status: Option<SharedString>,
4165 job: F,
4166 ) -> oneshot::Receiver<R>
4167 where
4168 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4169 Fut: Future<Output = R> + 'static,
4170 R: Send + 'static,
4171 {
4172 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4173 let job_id = post_inc(&mut self.job_id);
4174 let this = self.this.clone();
4175 self.job_sender
4176 .unbounded_send(GitJob {
4177 key,
4178 job: Box::new(move |state, cx: &mut AsyncApp| {
4179 let job = job(state, cx.clone());
4180 cx.spawn(async move |cx| {
4181 if let Some(s) = status.clone() {
4182 this.update(cx, |this, cx| {
4183 this.active_jobs.insert(
4184 job_id,
4185 JobInfo {
4186 start: Instant::now(),
4187 message: s.clone(),
4188 },
4189 );
4190
4191 cx.notify();
4192 })
4193 .ok();
4194 }
4195 let result = job.await;
4196
4197 this.update(cx, |this, cx| {
4198 this.active_jobs.remove(&job_id);
4199 cx.notify();
4200 })
4201 .ok();
4202
4203 result_tx.send(result).ok();
4204 })
4205 }),
4206 })
4207 .ok();
4208 result_rx
4209 }
4210
4211 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4212 let Some(git_store) = self.git_store.upgrade() else {
4213 return;
4214 };
4215 let entity = cx.entity();
4216 git_store.update(cx, |git_store, cx| {
4217 let Some((&id, _)) = git_store
4218 .repositories
4219 .iter()
4220 .find(|(_, handle)| *handle == &entity)
4221 else {
4222 return;
4223 };
4224 git_store.active_repo_id = Some(id);
4225 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4226 });
4227 }
4228
4229 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4230 self.snapshot.status()
4231 }
4232
4233 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4234 self.snapshot.diff_stat_for_path(path)
4235 }
4236
4237 pub fn cached_stash(&self) -> GitStash {
4238 self.snapshot.stash_entries.clone()
4239 }
4240
4241 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4242 let git_store = self.git_store.upgrade()?;
4243 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4244 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4245 let abs_path = SanitizedPath::new(&abs_path);
4246 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4247 Some(ProjectPath {
4248 worktree_id: worktree.read(cx).id(),
4249 path: relative_path,
4250 })
4251 }
4252
4253 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4254 let git_store = self.git_store.upgrade()?;
4255 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4256 let abs_path = worktree_store.absolutize(path, cx)?;
4257 self.snapshot.abs_path_to_repo_path(&abs_path)
4258 }
4259
4260 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4261 other
4262 .read(cx)
4263 .snapshot
4264 .work_directory_abs_path
4265 .starts_with(&self.snapshot.work_directory_abs_path)
4266 }
4267
4268 pub fn open_commit_buffer(
4269 &mut self,
4270 languages: Option<Arc<LanguageRegistry>>,
4271 buffer_store: Entity<BufferStore>,
4272 cx: &mut Context<Self>,
4273 ) -> Task<Result<Entity<Buffer>>> {
4274 let id = self.id;
4275 if let Some(buffer) = self.commit_message_buffer.clone() {
4276 return Task::ready(Ok(buffer));
4277 }
4278 let this = cx.weak_entity();
4279
4280 let rx = self.send_job(None, move |state, mut cx| async move {
4281 let Some(this) = this.upgrade() else {
4282 bail!("git store was dropped");
4283 };
4284 match state {
4285 RepositoryState::Local(..) => {
4286 this.update(&mut cx, |_, cx| {
4287 Self::open_local_commit_buffer(languages, buffer_store, cx)
4288 })
4289 .await
4290 }
4291 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4292 let request = client.request(proto::OpenCommitMessageBuffer {
4293 project_id: project_id.0,
4294 repository_id: id.to_proto(),
4295 });
4296 let response = request.await.context("requesting to open commit buffer")?;
4297 let buffer_id = BufferId::new(response.buffer_id)?;
4298 let buffer = buffer_store
4299 .update(&mut cx, |buffer_store, cx| {
4300 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4301 })
4302 .await?;
4303 if let Some(language_registry) = languages {
4304 let git_commit_language =
4305 language_registry.language_for_name("Git Commit").await?;
4306 buffer.update(&mut cx, |buffer, cx| {
4307 buffer.set_language(Some(git_commit_language), cx);
4308 });
4309 }
4310 this.update(&mut cx, |this, _| {
4311 this.commit_message_buffer = Some(buffer.clone());
4312 });
4313 Ok(buffer)
4314 }
4315 }
4316 });
4317
4318 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4319 }
4320
4321 fn open_local_commit_buffer(
4322 language_registry: Option<Arc<LanguageRegistry>>,
4323 buffer_store: Entity<BufferStore>,
4324 cx: &mut Context<Self>,
4325 ) -> Task<Result<Entity<Buffer>>> {
4326 cx.spawn(async move |repository, cx| {
4327 let git_commit_language = match language_registry {
4328 Some(language_registry) => {
4329 Some(language_registry.language_for_name("Git Commit").await?)
4330 }
4331 None => None,
4332 };
4333 let buffer = buffer_store
4334 .update(cx, |buffer_store, cx| {
4335 buffer_store.create_buffer(git_commit_language, false, cx)
4336 })
4337 .await?;
4338
4339 repository.update(cx, |repository, _| {
4340 repository.commit_message_buffer = Some(buffer.clone());
4341 })?;
4342 Ok(buffer)
4343 })
4344 }
4345
4346 pub fn checkout_files(
4347 &mut self,
4348 commit: &str,
4349 paths: Vec<RepoPath>,
4350 cx: &mut Context<Self>,
4351 ) -> Task<Result<()>> {
4352 let commit = commit.to_string();
4353 let id = self.id;
4354
4355 self.spawn_job_with_tracking(
4356 paths.clone(),
4357 pending_op::GitStatus::Reverted,
4358 cx,
4359 async move |this, cx| {
4360 this.update(cx, |this, _cx| {
4361 this.send_job(
4362 Some(format!("git checkout {}", commit).into()),
4363 move |git_repo, _| async move {
4364 match git_repo {
4365 RepositoryState::Local(LocalRepositoryState {
4366 backend,
4367 environment,
4368 ..
4369 }) => {
4370 backend
4371 .checkout_files(commit, paths, environment.clone())
4372 .await
4373 }
4374 RepositoryState::Remote(RemoteRepositoryState {
4375 project_id,
4376 client,
4377 }) => {
4378 client
4379 .request(proto::GitCheckoutFiles {
4380 project_id: project_id.0,
4381 repository_id: id.to_proto(),
4382 commit,
4383 paths: paths
4384 .into_iter()
4385 .map(|p| p.to_proto())
4386 .collect(),
4387 })
4388 .await?;
4389
4390 Ok(())
4391 }
4392 }
4393 },
4394 )
4395 })?
4396 .await?
4397 },
4398 )
4399 }
4400
4401 pub fn reset(
4402 &mut self,
4403 commit: String,
4404 reset_mode: ResetMode,
4405 _cx: &mut App,
4406 ) -> oneshot::Receiver<Result<()>> {
4407 let id = self.id;
4408
4409 self.send_job(None, move |git_repo, _| async move {
4410 match git_repo {
4411 RepositoryState::Local(LocalRepositoryState {
4412 backend,
4413 environment,
4414 ..
4415 }) => backend.reset(commit, reset_mode, environment).await,
4416 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4417 client
4418 .request(proto::GitReset {
4419 project_id: project_id.0,
4420 repository_id: id.to_proto(),
4421 commit,
4422 mode: match reset_mode {
4423 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4424 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4425 },
4426 })
4427 .await?;
4428
4429 Ok(())
4430 }
4431 }
4432 })
4433 }
4434
4435 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4436 let id = self.id;
4437 self.send_job(None, move |git_repo, _cx| async move {
4438 match git_repo {
4439 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4440 backend.show(commit).await
4441 }
4442 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4443 let resp = client
4444 .request(proto::GitShow {
4445 project_id: project_id.0,
4446 repository_id: id.to_proto(),
4447 commit,
4448 })
4449 .await?;
4450
4451 Ok(CommitDetails {
4452 sha: resp.sha.into(),
4453 message: resp.message.into(),
4454 commit_timestamp: resp.commit_timestamp,
4455 author_email: resp.author_email.into(),
4456 author_name: resp.author_name.into(),
4457 })
4458 }
4459 }
4460 })
4461 }
4462
4463 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4464 let id = self.id;
4465 self.send_job(None, move |git_repo, cx| async move {
4466 match git_repo {
4467 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4468 backend.load_commit(commit, cx).await
4469 }
4470 RepositoryState::Remote(RemoteRepositoryState {
4471 client, project_id, ..
4472 }) => {
4473 let response = client
4474 .request(proto::LoadCommitDiff {
4475 project_id: project_id.0,
4476 repository_id: id.to_proto(),
4477 commit,
4478 })
4479 .await?;
4480 Ok(CommitDiff {
4481 files: response
4482 .files
4483 .into_iter()
4484 .map(|file| {
4485 Ok(CommitFile {
4486 path: RepoPath::from_proto(&file.path)?,
4487 old_text: file.old_text,
4488 new_text: file.new_text,
4489 is_binary: file.is_binary,
4490 })
4491 })
4492 .collect::<Result<Vec<_>>>()?,
4493 })
4494 }
4495 }
4496 })
4497 }
4498
4499 pub fn file_history(
4500 &mut self,
4501 path: RepoPath,
4502 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4503 self.file_history_paginated(path, 0, None)
4504 }
4505
4506 pub fn file_history_paginated(
4507 &mut self,
4508 path: RepoPath,
4509 skip: usize,
4510 limit: Option<usize>,
4511 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4512 let id = self.id;
4513 self.send_job(None, move |git_repo, _cx| async move {
4514 match git_repo {
4515 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4516 backend.file_history_paginated(path, skip, limit).await
4517 }
4518 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4519 let response = client
4520 .request(proto::GitFileHistory {
4521 project_id: project_id.0,
4522 repository_id: id.to_proto(),
4523 path: path.to_proto(),
4524 skip: skip as u64,
4525 limit: limit.map(|l| l as u64),
4526 })
4527 .await?;
4528 Ok(git::repository::FileHistory {
4529 entries: response
4530 .entries
4531 .into_iter()
4532 .map(|entry| git::repository::FileHistoryEntry {
4533 sha: entry.sha.into(),
4534 subject: entry.subject.into(),
4535 message: entry.message.into(),
4536 commit_timestamp: entry.commit_timestamp,
4537 author_name: entry.author_name.into(),
4538 author_email: entry.author_email.into(),
4539 })
4540 .collect(),
4541 path: RepoPath::from_proto(&response.path)?,
4542 })
4543 }
4544 }
4545 })
4546 }
4547
4548 pub fn get_graph_data(
4549 &self,
4550 log_source: LogSource,
4551 log_order: LogOrder,
4552 ) -> Option<&InitialGitGraphData> {
4553 self.initial_graph_data.get(&(log_source, log_order))
4554 }
4555
4556 pub fn graph_data(
4557 &mut self,
4558 log_source: LogSource,
4559 log_order: LogOrder,
4560 range: Range<usize>,
4561 cx: &mut Context<Self>,
4562 ) -> GraphDataResponse<'_> {
4563 let initial_commit_data = self
4564 .initial_graph_data
4565 .entry((log_source.clone(), log_order))
4566 .or_insert_with(|| {
4567 let state = self.repository_state.clone();
4568 let log_source = log_source.clone();
4569
4570 let fetch_task = cx.spawn(async move |repository, cx| {
4571 let state = state.await;
4572 let result = match state {
4573 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4574 Self::local_git_graph_data(
4575 repository.clone(),
4576 backend,
4577 log_source.clone(),
4578 log_order,
4579 cx,
4580 )
4581 .await
4582 }
4583 Ok(RepositoryState::Remote(_)) => {
4584 Err("Git graph is not supported for collab yet".into())
4585 }
4586 Err(e) => Err(SharedString::from(e)),
4587 };
4588
4589 if let Err(fetch_task_error) = result {
4590 repository
4591 .update(cx, |repository, _| {
4592 if let Some(data) = repository
4593 .initial_graph_data
4594 .get_mut(&(log_source, log_order))
4595 {
4596 data.error = Some(fetch_task_error);
4597 } else {
4598 debug_panic!(
4599 "This task would be dropped if this entry doesn't exist"
4600 );
4601 }
4602 })
4603 .ok();
4604 }
4605 });
4606
4607 InitialGitGraphData {
4608 fetch_task,
4609 error: None,
4610 commit_data: Vec::new(),
4611 commit_oid_to_index: HashMap::default(),
4612 }
4613 });
4614
4615 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4616 let max_end = initial_commit_data.commit_data.len();
4617
4618 GraphDataResponse {
4619 commits: &initial_commit_data.commit_data
4620 [range.start.min(max_start)..range.end.min(max_end)],
4621 is_loading: !initial_commit_data.fetch_task.is_ready(),
4622 error: initial_commit_data.error.clone(),
4623 }
4624 }
4625
4626 async fn local_git_graph_data(
4627 this: WeakEntity<Self>,
4628 backend: Arc<dyn GitRepository>,
4629 log_source: LogSource,
4630 log_order: LogOrder,
4631 cx: &mut AsyncApp,
4632 ) -> Result<(), SharedString> {
4633 let (request_tx, request_rx) =
4634 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4635
4636 let task = cx.background_executor().spawn({
4637 let log_source = log_source.clone();
4638 async move {
4639 backend
4640 .initial_graph_data(log_source, log_order, request_tx)
4641 .await
4642 .map_err(|err| SharedString::from(err.to_string()))
4643 }
4644 });
4645
4646 let graph_data_key = (log_source, log_order);
4647
4648 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4649 this.update(cx, |repository, cx| {
4650 let graph_data = repository
4651 .initial_graph_data
4652 .entry(graph_data_key.clone())
4653 .and_modify(|graph_data| {
4654 for commit_data in initial_graph_commit_data {
4655 graph_data
4656 .commit_oid_to_index
4657 .insert(commit_data.sha, graph_data.commit_data.len());
4658 graph_data.commit_data.push(commit_data);
4659
4660 cx.emit(RepositoryEvent::GraphEvent(
4661 graph_data_key.clone(),
4662 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4663 ));
4664 }
4665 });
4666
4667 match &graph_data {
4668 Entry::Occupied(_) => {}
4669 Entry::Vacant(_) => {
4670 debug_panic!("This task should be dropped if data doesn't exist");
4671 }
4672 }
4673 })
4674 .ok();
4675 }
4676
4677 task.await?;
4678 Ok(())
4679 }
4680
4681 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4682 if !self.commit_data.contains_key(&sha) {
4683 match &self.graph_commit_data_handler {
4684 GraphCommitHandlerState::Open(handler) => {
4685 if handler.commit_data_request.try_send(sha).is_ok() {
4686 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4687 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4688 }
4689 }
4690 GraphCommitHandlerState::Closed => {
4691 self.open_graph_commit_data_handler(cx);
4692 }
4693 GraphCommitHandlerState::Starting => {}
4694 }
4695 }
4696
4697 self.commit_data
4698 .get(&sha)
4699 .unwrap_or(&CommitDataState::Loading)
4700 }
4701
4702 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4703 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4704
4705 let state = self.repository_state.clone();
4706 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4707 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4708
4709 let foreground_task = cx.spawn(async move |this, cx| {
4710 while let Ok((sha, commit_data)) = result_rx.recv().await {
4711 let result = this.update(cx, |this, cx| {
4712 let old_value = this
4713 .commit_data
4714 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4715 debug_assert!(
4716 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4717 "We should never overwrite commit data"
4718 );
4719
4720 cx.notify();
4721 });
4722 if result.is_err() {
4723 break;
4724 }
4725 }
4726
4727 this.update(cx, |this, _cx| {
4728 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4729 })
4730 .ok();
4731 });
4732
4733 let request_tx_for_handler = request_tx;
4734 let background_executor = cx.background_executor().clone();
4735
4736 cx.background_spawn(async move {
4737 let backend = match state.await {
4738 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4739 Ok(RepositoryState::Remote(_)) => {
4740 log::error!("commit_data_reader not supported for remote repositories");
4741 return;
4742 }
4743 Err(error) => {
4744 log::error!("failed to get repository state: {error}");
4745 return;
4746 }
4747 };
4748
4749 let reader = match backend.commit_data_reader() {
4750 Ok(reader) => reader,
4751 Err(error) => {
4752 log::error!("failed to create commit data reader: {error:?}");
4753 return;
4754 }
4755 };
4756
4757 loop {
4758 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4759
4760 futures::select_biased! {
4761 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4762 let Ok(sha) = sha else {
4763 break;
4764 };
4765
4766 match reader.read(sha).await {
4767 Ok(commit_data) => {
4768 if result_tx.send((sha, commit_data)).await.is_err() {
4769 break;
4770 }
4771 }
4772 Err(error) => {
4773 log::error!("failed to read commit data for {sha}: {error:?}");
4774 }
4775 }
4776 }
4777 _ = futures::FutureExt::fuse(timeout) => {
4778 break;
4779 }
4780 }
4781 }
4782
4783 drop(result_tx);
4784 })
4785 .detach();
4786
4787 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4788 _task: foreground_task,
4789 commit_data_request: request_tx_for_handler,
4790 });
4791 }
4792
4793 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4794 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4795 }
4796
4797 fn save_buffers<'a>(
4798 &self,
4799 entries: impl IntoIterator<Item = &'a RepoPath>,
4800 cx: &mut Context<Self>,
4801 ) -> Vec<Task<anyhow::Result<()>>> {
4802 let mut save_futures = Vec::new();
4803 if let Some(buffer_store) = self.buffer_store(cx) {
4804 buffer_store.update(cx, |buffer_store, cx| {
4805 for path in entries {
4806 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4807 continue;
4808 };
4809 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4810 && buffer
4811 .read(cx)
4812 .file()
4813 .is_some_and(|file| file.disk_state().exists())
4814 && buffer.read(cx).has_unsaved_edits()
4815 {
4816 save_futures.push(buffer_store.save_buffer(buffer, cx));
4817 }
4818 }
4819 })
4820 }
4821 save_futures
4822 }
4823
4824 pub fn stage_entries(
4825 &mut self,
4826 entries: Vec<RepoPath>,
4827 cx: &mut Context<Self>,
4828 ) -> Task<anyhow::Result<()>> {
4829 self.stage_or_unstage_entries(true, entries, cx)
4830 }
4831
4832 pub fn unstage_entries(
4833 &mut self,
4834 entries: Vec<RepoPath>,
4835 cx: &mut Context<Self>,
4836 ) -> Task<anyhow::Result<()>> {
4837 self.stage_or_unstage_entries(false, entries, cx)
4838 }
4839
4840 fn stage_or_unstage_entries(
4841 &mut self,
4842 stage: bool,
4843 entries: Vec<RepoPath>,
4844 cx: &mut Context<Self>,
4845 ) -> Task<anyhow::Result<()>> {
4846 if entries.is_empty() {
4847 return Task::ready(Ok(()));
4848 }
4849 let Some(git_store) = self.git_store.upgrade() else {
4850 return Task::ready(Ok(()));
4851 };
4852 let id = self.id;
4853 let save_tasks = self.save_buffers(&entries, cx);
4854 let paths = entries
4855 .iter()
4856 .map(|p| p.as_unix_str())
4857 .collect::<Vec<_>>()
4858 .join(" ");
4859 let status = if stage {
4860 format!("git add {paths}")
4861 } else {
4862 format!("git reset {paths}")
4863 };
4864 let job_key = GitJobKey::WriteIndex(entries.clone());
4865
4866 self.spawn_job_with_tracking(
4867 entries.clone(),
4868 if stage {
4869 pending_op::GitStatus::Staged
4870 } else {
4871 pending_op::GitStatus::Unstaged
4872 },
4873 cx,
4874 async move |this, cx| {
4875 for save_task in save_tasks {
4876 save_task.await?;
4877 }
4878
4879 this.update(cx, |this, cx| {
4880 let weak_this = cx.weak_entity();
4881 this.send_keyed_job(
4882 Some(job_key),
4883 Some(status.into()),
4884 move |git_repo, mut cx| async move {
4885 let hunk_staging_operation_counts = weak_this
4886 .update(&mut cx, |this, cx| {
4887 let mut hunk_staging_operation_counts = HashMap::default();
4888 for path in &entries {
4889 let Some(project_path) =
4890 this.repo_path_to_project_path(path, cx)
4891 else {
4892 continue;
4893 };
4894 let Some(buffer) = git_store
4895 .read(cx)
4896 .buffer_store
4897 .read(cx)
4898 .get_by_path(&project_path)
4899 else {
4900 continue;
4901 };
4902 let Some(diff_state) = git_store
4903 .read(cx)
4904 .diffs
4905 .get(&buffer.read(cx).remote_id())
4906 .cloned()
4907 else {
4908 continue;
4909 };
4910 let Some(uncommitted_diff) =
4911 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4912 |uncommitted_diff| uncommitted_diff.upgrade(),
4913 )
4914 else {
4915 continue;
4916 };
4917 let buffer_snapshot = buffer.read(cx).text_snapshot();
4918 let file_exists = buffer
4919 .read(cx)
4920 .file()
4921 .is_some_and(|file| file.disk_state().exists());
4922 let hunk_staging_operation_count =
4923 diff_state.update(cx, |diff_state, cx| {
4924 uncommitted_diff.update(
4925 cx,
4926 |uncommitted_diff, cx| {
4927 uncommitted_diff
4928 .stage_or_unstage_all_hunks(
4929 stage,
4930 &buffer_snapshot,
4931 file_exists,
4932 cx,
4933 );
4934 },
4935 );
4936
4937 diff_state.hunk_staging_operation_count += 1;
4938 diff_state.hunk_staging_operation_count
4939 });
4940 hunk_staging_operation_counts.insert(
4941 diff_state.downgrade(),
4942 hunk_staging_operation_count,
4943 );
4944 }
4945 hunk_staging_operation_counts
4946 })
4947 .unwrap_or_default();
4948
4949 let result = match git_repo {
4950 RepositoryState::Local(LocalRepositoryState {
4951 backend,
4952 environment,
4953 ..
4954 }) => {
4955 if stage {
4956 backend.stage_paths(entries, environment.clone()).await
4957 } else {
4958 backend.unstage_paths(entries, environment.clone()).await
4959 }
4960 }
4961 RepositoryState::Remote(RemoteRepositoryState {
4962 project_id,
4963 client,
4964 }) => {
4965 if stage {
4966 client
4967 .request(proto::Stage {
4968 project_id: project_id.0,
4969 repository_id: id.to_proto(),
4970 paths: entries
4971 .into_iter()
4972 .map(|repo_path| repo_path.to_proto())
4973 .collect(),
4974 })
4975 .await
4976 .context("sending stage request")
4977 .map(|_| ())
4978 } else {
4979 client
4980 .request(proto::Unstage {
4981 project_id: project_id.0,
4982 repository_id: id.to_proto(),
4983 paths: entries
4984 .into_iter()
4985 .map(|repo_path| repo_path.to_proto())
4986 .collect(),
4987 })
4988 .await
4989 .context("sending unstage request")
4990 .map(|_| ())
4991 }
4992 }
4993 };
4994
4995 for (diff_state, hunk_staging_operation_count) in
4996 hunk_staging_operation_counts
4997 {
4998 diff_state
4999 .update(&mut cx, |diff_state, cx| {
5000 if result.is_ok() {
5001 diff_state.hunk_staging_operation_count_as_of_write =
5002 hunk_staging_operation_count;
5003 } else if let Some(uncommitted_diff) =
5004 &diff_state.uncommitted_diff
5005 {
5006 uncommitted_diff
5007 .update(cx, |uncommitted_diff, cx| {
5008 uncommitted_diff.clear_pending_hunks(cx);
5009 })
5010 .ok();
5011 }
5012 })
5013 .ok();
5014 }
5015
5016 result
5017 },
5018 )
5019 })?
5020 .await?
5021 },
5022 )
5023 }
5024
5025 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5026 let snapshot = self.snapshot.clone();
5027 let pending_ops = self.pending_ops.clone();
5028 let to_stage = cx.background_spawn(async move {
5029 snapshot
5030 .status()
5031 .filter_map(|entry| {
5032 if let Some(ops) =
5033 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5034 {
5035 if ops.staging() || ops.staged() {
5036 None
5037 } else {
5038 Some(entry.repo_path)
5039 }
5040 } else if entry.status.staging().is_fully_staged() {
5041 None
5042 } else {
5043 Some(entry.repo_path)
5044 }
5045 })
5046 .collect()
5047 });
5048
5049 cx.spawn(async move |this, cx| {
5050 let to_stage = to_stage.await;
5051 this.update(cx, |this, cx| {
5052 this.stage_or_unstage_entries(true, to_stage, cx)
5053 })?
5054 .await
5055 })
5056 }
5057
5058 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5059 let snapshot = self.snapshot.clone();
5060 let pending_ops = self.pending_ops.clone();
5061 let to_unstage = cx.background_spawn(async move {
5062 snapshot
5063 .status()
5064 .filter_map(|entry| {
5065 if let Some(ops) =
5066 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5067 {
5068 if !ops.staging() && !ops.staged() {
5069 None
5070 } else {
5071 Some(entry.repo_path)
5072 }
5073 } else if entry.status.staging().is_fully_unstaged() {
5074 None
5075 } else {
5076 Some(entry.repo_path)
5077 }
5078 })
5079 .collect()
5080 });
5081
5082 cx.spawn(async move |this, cx| {
5083 let to_unstage = to_unstage.await;
5084 this.update(cx, |this, cx| {
5085 this.stage_or_unstage_entries(false, to_unstage, cx)
5086 })?
5087 .await
5088 })
5089 }
5090
5091 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5092 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5093
5094 self.stash_entries(to_stash, cx)
5095 }
5096
5097 pub fn stash_entries(
5098 &mut self,
5099 entries: Vec<RepoPath>,
5100 cx: &mut Context<Self>,
5101 ) -> Task<anyhow::Result<()>> {
5102 let id = self.id;
5103
5104 cx.spawn(async move |this, cx| {
5105 this.update(cx, |this, _| {
5106 this.send_job(None, move |git_repo, _cx| async move {
5107 match git_repo {
5108 RepositoryState::Local(LocalRepositoryState {
5109 backend,
5110 environment,
5111 ..
5112 }) => backend.stash_paths(entries, environment).await,
5113 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5114 client
5115 .request(proto::Stash {
5116 project_id: project_id.0,
5117 repository_id: id.to_proto(),
5118 paths: entries
5119 .into_iter()
5120 .map(|repo_path| repo_path.to_proto())
5121 .collect(),
5122 })
5123 .await?;
5124 Ok(())
5125 }
5126 }
5127 })
5128 })?
5129 .await??;
5130 Ok(())
5131 })
5132 }
5133
5134 pub fn stash_pop(
5135 &mut self,
5136 index: Option<usize>,
5137 cx: &mut Context<Self>,
5138 ) -> Task<anyhow::Result<()>> {
5139 let id = self.id;
5140 cx.spawn(async move |this, cx| {
5141 this.update(cx, |this, _| {
5142 this.send_job(None, move |git_repo, _cx| async move {
5143 match git_repo {
5144 RepositoryState::Local(LocalRepositoryState {
5145 backend,
5146 environment,
5147 ..
5148 }) => backend.stash_pop(index, environment).await,
5149 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5150 client
5151 .request(proto::StashPop {
5152 project_id: project_id.0,
5153 repository_id: id.to_proto(),
5154 stash_index: index.map(|i| i as u64),
5155 })
5156 .await
5157 .context("sending stash pop request")?;
5158 Ok(())
5159 }
5160 }
5161 })
5162 })?
5163 .await??;
5164 Ok(())
5165 })
5166 }
5167
5168 pub fn stash_apply(
5169 &mut self,
5170 index: Option<usize>,
5171 cx: &mut Context<Self>,
5172 ) -> Task<anyhow::Result<()>> {
5173 let id = self.id;
5174 cx.spawn(async move |this, cx| {
5175 this.update(cx, |this, _| {
5176 this.send_job(None, move |git_repo, _cx| async move {
5177 match git_repo {
5178 RepositoryState::Local(LocalRepositoryState {
5179 backend,
5180 environment,
5181 ..
5182 }) => backend.stash_apply(index, environment).await,
5183 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5184 client
5185 .request(proto::StashApply {
5186 project_id: project_id.0,
5187 repository_id: id.to_proto(),
5188 stash_index: index.map(|i| i as u64),
5189 })
5190 .await
5191 .context("sending stash apply request")?;
5192 Ok(())
5193 }
5194 }
5195 })
5196 })?
5197 .await??;
5198 Ok(())
5199 })
5200 }
5201
5202 pub fn stash_drop(
5203 &mut self,
5204 index: Option<usize>,
5205 cx: &mut Context<Self>,
5206 ) -> oneshot::Receiver<anyhow::Result<()>> {
5207 let id = self.id;
5208 let updates_tx = self
5209 .git_store()
5210 .and_then(|git_store| match &git_store.read(cx).state {
5211 GitStoreState::Local { downstream, .. } => downstream
5212 .as_ref()
5213 .map(|downstream| downstream.updates_tx.clone()),
5214 _ => None,
5215 });
5216 let this = cx.weak_entity();
5217 self.send_job(None, move |git_repo, mut cx| async move {
5218 match git_repo {
5219 RepositoryState::Local(LocalRepositoryState {
5220 backend,
5221 environment,
5222 ..
5223 }) => {
5224 // TODO would be nice to not have to do this manually
5225 let result = backend.stash_drop(index, environment).await;
5226 if result.is_ok()
5227 && let Ok(stash_entries) = backend.stash_entries().await
5228 {
5229 let snapshot = this.update(&mut cx, |this, cx| {
5230 this.snapshot.stash_entries = stash_entries;
5231 cx.emit(RepositoryEvent::StashEntriesChanged);
5232 this.snapshot.clone()
5233 })?;
5234 if let Some(updates_tx) = updates_tx {
5235 updates_tx
5236 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5237 .ok();
5238 }
5239 }
5240
5241 result
5242 }
5243 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5244 client
5245 .request(proto::StashDrop {
5246 project_id: project_id.0,
5247 repository_id: id.to_proto(),
5248 stash_index: index.map(|i| i as u64),
5249 })
5250 .await
5251 .context("sending stash pop request")?;
5252 Ok(())
5253 }
5254 }
5255 })
5256 }
5257
5258 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5259 let id = self.id;
5260 self.send_job(
5261 Some(format!("git hook {}", hook.as_str()).into()),
5262 move |git_repo, _cx| async move {
5263 match git_repo {
5264 RepositoryState::Local(LocalRepositoryState {
5265 backend,
5266 environment,
5267 ..
5268 }) => backend.run_hook(hook, environment.clone()).await,
5269 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5270 client
5271 .request(proto::RunGitHook {
5272 project_id: project_id.0,
5273 repository_id: id.to_proto(),
5274 hook: hook.to_proto(),
5275 })
5276 .await?;
5277
5278 Ok(())
5279 }
5280 }
5281 },
5282 )
5283 }
5284
5285 pub fn commit(
5286 &mut self,
5287 message: SharedString,
5288 name_and_email: Option<(SharedString, SharedString)>,
5289 options: CommitOptions,
5290 askpass: AskPassDelegate,
5291 cx: &mut App,
5292 ) -> oneshot::Receiver<Result<()>> {
5293 let id = self.id;
5294 let askpass_delegates = self.askpass_delegates.clone();
5295 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5296
5297 let rx = self.run_hook(RunHook::PreCommit, cx);
5298
5299 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5300 rx.await??;
5301
5302 match git_repo {
5303 RepositoryState::Local(LocalRepositoryState {
5304 backend,
5305 environment,
5306 ..
5307 }) => {
5308 backend
5309 .commit(message, name_and_email, options, askpass, environment)
5310 .await
5311 }
5312 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5313 askpass_delegates.lock().insert(askpass_id, askpass);
5314 let _defer = util::defer(|| {
5315 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5316 debug_assert!(askpass_delegate.is_some());
5317 });
5318 let (name, email) = name_and_email.unzip();
5319 client
5320 .request(proto::Commit {
5321 project_id: project_id.0,
5322 repository_id: id.to_proto(),
5323 message: String::from(message),
5324 name: name.map(String::from),
5325 email: email.map(String::from),
5326 options: Some(proto::commit::CommitOptions {
5327 amend: options.amend,
5328 signoff: options.signoff,
5329 }),
5330 askpass_id,
5331 })
5332 .await?;
5333
5334 Ok(())
5335 }
5336 }
5337 })
5338 }
5339
5340 pub fn fetch(
5341 &mut self,
5342 fetch_options: FetchOptions,
5343 askpass: AskPassDelegate,
5344 _cx: &mut App,
5345 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5346 let askpass_delegates = self.askpass_delegates.clone();
5347 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5348 let id = self.id;
5349
5350 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5351 match git_repo {
5352 RepositoryState::Local(LocalRepositoryState {
5353 backend,
5354 environment,
5355 ..
5356 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5357 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5358 askpass_delegates.lock().insert(askpass_id, askpass);
5359 let _defer = util::defer(|| {
5360 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5361 debug_assert!(askpass_delegate.is_some());
5362 });
5363
5364 let response = client
5365 .request(proto::Fetch {
5366 project_id: project_id.0,
5367 repository_id: id.to_proto(),
5368 askpass_id,
5369 remote: fetch_options.to_proto(),
5370 })
5371 .await?;
5372
5373 Ok(RemoteCommandOutput {
5374 stdout: response.stdout,
5375 stderr: response.stderr,
5376 })
5377 }
5378 }
5379 })
5380 }
5381
5382 pub fn push(
5383 &mut self,
5384 branch: SharedString,
5385 remote_branch: SharedString,
5386 remote: SharedString,
5387 options: Option<PushOptions>,
5388 askpass: AskPassDelegate,
5389 cx: &mut Context<Self>,
5390 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5391 let askpass_delegates = self.askpass_delegates.clone();
5392 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5393 let id = self.id;
5394
5395 let args = options
5396 .map(|option| match option {
5397 PushOptions::SetUpstream => " --set-upstream",
5398 PushOptions::Force => " --force-with-lease",
5399 })
5400 .unwrap_or("");
5401
5402 let updates_tx = self
5403 .git_store()
5404 .and_then(|git_store| match &git_store.read(cx).state {
5405 GitStoreState::Local { downstream, .. } => downstream
5406 .as_ref()
5407 .map(|downstream| downstream.updates_tx.clone()),
5408 _ => None,
5409 });
5410
5411 let this = cx.weak_entity();
5412 self.send_job(
5413 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5414 move |git_repo, mut cx| async move {
5415 match git_repo {
5416 RepositoryState::Local(LocalRepositoryState {
5417 backend,
5418 environment,
5419 ..
5420 }) => {
5421 let result = backend
5422 .push(
5423 branch.to_string(),
5424 remote_branch.to_string(),
5425 remote.to_string(),
5426 options,
5427 askpass,
5428 environment.clone(),
5429 cx.clone(),
5430 )
5431 .await;
5432 // TODO would be nice to not have to do this manually
5433 if result.is_ok() {
5434 let branches = backend.branches().await?;
5435 let branch = branches.into_iter().find(|branch| branch.is_head);
5436 log::info!("head branch after scan is {branch:?}");
5437 let snapshot = this.update(&mut cx, |this, cx| {
5438 this.snapshot.branch = branch;
5439 cx.emit(RepositoryEvent::BranchChanged);
5440 this.snapshot.clone()
5441 })?;
5442 if let Some(updates_tx) = updates_tx {
5443 updates_tx
5444 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5445 .ok();
5446 }
5447 }
5448 result
5449 }
5450 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5451 askpass_delegates.lock().insert(askpass_id, askpass);
5452 let _defer = util::defer(|| {
5453 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5454 debug_assert!(askpass_delegate.is_some());
5455 });
5456 let response = client
5457 .request(proto::Push {
5458 project_id: project_id.0,
5459 repository_id: id.to_proto(),
5460 askpass_id,
5461 branch_name: branch.to_string(),
5462 remote_branch_name: remote_branch.to_string(),
5463 remote_name: remote.to_string(),
5464 options: options.map(|options| match options {
5465 PushOptions::Force => proto::push::PushOptions::Force,
5466 PushOptions::SetUpstream => {
5467 proto::push::PushOptions::SetUpstream
5468 }
5469 }
5470 as i32),
5471 })
5472 .await?;
5473
5474 Ok(RemoteCommandOutput {
5475 stdout: response.stdout,
5476 stderr: response.stderr,
5477 })
5478 }
5479 }
5480 },
5481 )
5482 }
5483
5484 pub fn pull(
5485 &mut self,
5486 branch: Option<SharedString>,
5487 remote: SharedString,
5488 rebase: bool,
5489 askpass: AskPassDelegate,
5490 _cx: &mut App,
5491 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5492 let askpass_delegates = self.askpass_delegates.clone();
5493 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5494 let id = self.id;
5495
5496 let mut status = "git pull".to_string();
5497 if rebase {
5498 status.push_str(" --rebase");
5499 }
5500 status.push_str(&format!(" {}", remote));
5501 if let Some(b) = &branch {
5502 status.push_str(&format!(" {}", b));
5503 }
5504
5505 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5506 match git_repo {
5507 RepositoryState::Local(LocalRepositoryState {
5508 backend,
5509 environment,
5510 ..
5511 }) => {
5512 backend
5513 .pull(
5514 branch.as_ref().map(|b| b.to_string()),
5515 remote.to_string(),
5516 rebase,
5517 askpass,
5518 environment.clone(),
5519 cx,
5520 )
5521 .await
5522 }
5523 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5524 askpass_delegates.lock().insert(askpass_id, askpass);
5525 let _defer = util::defer(|| {
5526 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5527 debug_assert!(askpass_delegate.is_some());
5528 });
5529 let response = client
5530 .request(proto::Pull {
5531 project_id: project_id.0,
5532 repository_id: id.to_proto(),
5533 askpass_id,
5534 rebase,
5535 branch_name: branch.as_ref().map(|b| b.to_string()),
5536 remote_name: remote.to_string(),
5537 })
5538 .await?;
5539
5540 Ok(RemoteCommandOutput {
5541 stdout: response.stdout,
5542 stderr: response.stderr,
5543 })
5544 }
5545 }
5546 })
5547 }
5548
5549 fn spawn_set_index_text_job(
5550 &mut self,
5551 path: RepoPath,
5552 content: Option<String>,
5553 hunk_staging_operation_count: Option<usize>,
5554 cx: &mut Context<Self>,
5555 ) -> oneshot::Receiver<anyhow::Result<()>> {
5556 let id = self.id;
5557 let this = cx.weak_entity();
5558 let git_store = self.git_store.clone();
5559 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5560 self.send_keyed_job(
5561 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5562 None,
5563 move |git_repo, mut cx| async move {
5564 log::debug!(
5565 "start updating index text for buffer {}",
5566 path.as_unix_str()
5567 );
5568
5569 match git_repo {
5570 RepositoryState::Local(LocalRepositoryState {
5571 fs,
5572 backend,
5573 environment,
5574 ..
5575 }) => {
5576 let executable = match fs.metadata(&abs_path).await {
5577 Ok(Some(meta)) => meta.is_executable,
5578 Ok(None) => false,
5579 Err(_err) => false,
5580 };
5581 backend
5582 .set_index_text(path.clone(), content, environment.clone(), executable)
5583 .await?;
5584 }
5585 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5586 client
5587 .request(proto::SetIndexText {
5588 project_id: project_id.0,
5589 repository_id: id.to_proto(),
5590 path: path.to_proto(),
5591 text: content,
5592 })
5593 .await?;
5594 }
5595 }
5596 log::debug!(
5597 "finish updating index text for buffer {}",
5598 path.as_unix_str()
5599 );
5600
5601 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5602 let project_path = this
5603 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5604 .ok()
5605 .flatten();
5606 git_store
5607 .update(&mut cx, |git_store, cx| {
5608 let buffer_id = git_store
5609 .buffer_store
5610 .read(cx)
5611 .get_by_path(&project_path?)?
5612 .read(cx)
5613 .remote_id();
5614 let diff_state = git_store.diffs.get(&buffer_id)?;
5615 diff_state.update(cx, |diff_state, _| {
5616 diff_state.hunk_staging_operation_count_as_of_write =
5617 hunk_staging_operation_count;
5618 });
5619 Some(())
5620 })
5621 .context("Git store dropped")?;
5622 }
5623 Ok(())
5624 },
5625 )
5626 }
5627
5628 pub fn create_remote(
5629 &mut self,
5630 remote_name: String,
5631 remote_url: String,
5632 ) -> oneshot::Receiver<Result<()>> {
5633 let id = self.id;
5634 self.send_job(
5635 Some(format!("git remote add {remote_name} {remote_url}").into()),
5636 move |repo, _cx| async move {
5637 match repo {
5638 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5639 backend.create_remote(remote_name, remote_url).await
5640 }
5641 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5642 client
5643 .request(proto::GitCreateRemote {
5644 project_id: project_id.0,
5645 repository_id: id.to_proto(),
5646 remote_name,
5647 remote_url,
5648 })
5649 .await?;
5650
5651 Ok(())
5652 }
5653 }
5654 },
5655 )
5656 }
5657
5658 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5659 let id = self.id;
5660 self.send_job(
5661 Some(format!("git remove remote {remote_name}").into()),
5662 move |repo, _cx| async move {
5663 match repo {
5664 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5665 backend.remove_remote(remote_name).await
5666 }
5667 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5668 client
5669 .request(proto::GitRemoveRemote {
5670 project_id: project_id.0,
5671 repository_id: id.to_proto(),
5672 remote_name,
5673 })
5674 .await?;
5675
5676 Ok(())
5677 }
5678 }
5679 },
5680 )
5681 }
5682
5683 pub fn get_remotes(
5684 &mut self,
5685 branch_name: Option<String>,
5686 is_push: bool,
5687 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5688 let id = self.id;
5689 self.send_job(None, move |repo, _cx| async move {
5690 match repo {
5691 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5692 let remote = if let Some(branch_name) = branch_name {
5693 if is_push {
5694 backend.get_push_remote(branch_name).await?
5695 } else {
5696 backend.get_branch_remote(branch_name).await?
5697 }
5698 } else {
5699 None
5700 };
5701
5702 match remote {
5703 Some(remote) => Ok(vec![remote]),
5704 None => backend.get_all_remotes().await,
5705 }
5706 }
5707 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5708 let response = client
5709 .request(proto::GetRemotes {
5710 project_id: project_id.0,
5711 repository_id: id.to_proto(),
5712 branch_name,
5713 is_push,
5714 })
5715 .await?;
5716
5717 let remotes = response
5718 .remotes
5719 .into_iter()
5720 .map(|remotes| Remote {
5721 name: remotes.name.into(),
5722 })
5723 .collect();
5724
5725 Ok(remotes)
5726 }
5727 }
5728 })
5729 }
5730
5731 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5732 let id = self.id;
5733 self.send_job(None, move |repo, _| async move {
5734 match repo {
5735 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5736 backend.branches().await
5737 }
5738 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5739 let response = client
5740 .request(proto::GitGetBranches {
5741 project_id: project_id.0,
5742 repository_id: id.to_proto(),
5743 })
5744 .await?;
5745
5746 let branches = response
5747 .branches
5748 .into_iter()
5749 .map(|branch| proto_to_branch(&branch))
5750 .collect();
5751
5752 Ok(branches)
5753 }
5754 }
5755 })
5756 }
5757
5758 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5759 /// returns the pathed for the linked worktree.
5760 ///
5761 /// Returns None if this is the main checkout.
5762 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5763 if self.work_directory_abs_path != self.original_repo_abs_path {
5764 Some(&self.work_directory_abs_path)
5765 } else {
5766 None
5767 }
5768 }
5769
5770 pub fn path_for_new_linked_worktree(
5771 &self,
5772 branch_name: &str,
5773 worktree_directory_setting: &str,
5774 ) -> Result<PathBuf> {
5775 let original_repo = self.original_repo_abs_path.clone();
5776 let project_name = original_repo
5777 .file_name()
5778 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5779 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5780 Ok(directory.join(branch_name).join(project_name))
5781 }
5782
5783 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5784 let id = self.id;
5785 self.send_job(None, move |repo, _| async move {
5786 match repo {
5787 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5788 backend.worktrees().await
5789 }
5790 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5791 let response = client
5792 .request(proto::GitGetWorktrees {
5793 project_id: project_id.0,
5794 repository_id: id.to_proto(),
5795 })
5796 .await?;
5797
5798 let worktrees = response
5799 .worktrees
5800 .into_iter()
5801 .map(|worktree| proto_to_worktree(&worktree))
5802 .collect();
5803
5804 Ok(worktrees)
5805 }
5806 }
5807 })
5808 }
5809
5810 pub fn create_worktree(
5811 &mut self,
5812 branch_name: String,
5813 path: PathBuf,
5814 commit: Option<String>,
5815 ) -> oneshot::Receiver<Result<()>> {
5816 let id = self.id;
5817 self.send_job(
5818 Some(format!("git worktree add: {}", branch_name).into()),
5819 move |repo, _cx| async move {
5820 match repo {
5821 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5822 backend.create_worktree(branch_name, path, commit).await
5823 }
5824 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5825 client
5826 .request(proto::GitCreateWorktree {
5827 project_id: project_id.0,
5828 repository_id: id.to_proto(),
5829 name: branch_name,
5830 directory: path.to_string_lossy().to_string(),
5831 commit,
5832 })
5833 .await?;
5834
5835 Ok(())
5836 }
5837 }
5838 },
5839 )
5840 }
5841
5842 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5843 let id = self.id;
5844 self.send_job(
5845 Some(format!("git worktree remove: {}", path.display()).into()),
5846 move |repo, _cx| async move {
5847 match repo {
5848 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5849 backend.remove_worktree(path, force).await
5850 }
5851 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5852 client
5853 .request(proto::GitRemoveWorktree {
5854 project_id: project_id.0,
5855 repository_id: id.to_proto(),
5856 path: path.to_string_lossy().to_string(),
5857 force,
5858 })
5859 .await?;
5860
5861 Ok(())
5862 }
5863 }
5864 },
5865 )
5866 }
5867
5868 pub fn rename_worktree(
5869 &mut self,
5870 old_path: PathBuf,
5871 new_path: PathBuf,
5872 ) -> oneshot::Receiver<Result<()>> {
5873 let id = self.id;
5874 self.send_job(
5875 Some(format!("git worktree move: {}", old_path.display()).into()),
5876 move |repo, _cx| async move {
5877 match repo {
5878 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5879 backend.rename_worktree(old_path, new_path).await
5880 }
5881 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5882 client
5883 .request(proto::GitRenameWorktree {
5884 project_id: project_id.0,
5885 repository_id: id.to_proto(),
5886 old_path: old_path.to_string_lossy().to_string(),
5887 new_path: new_path.to_string_lossy().to_string(),
5888 })
5889 .await?;
5890
5891 Ok(())
5892 }
5893 }
5894 },
5895 )
5896 }
5897
5898 pub fn default_branch(
5899 &mut self,
5900 include_remote_name: bool,
5901 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5902 let id = self.id;
5903 self.send_job(None, move |repo, _| async move {
5904 match repo {
5905 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5906 backend.default_branch(include_remote_name).await
5907 }
5908 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5909 let response = client
5910 .request(proto::GetDefaultBranch {
5911 project_id: project_id.0,
5912 repository_id: id.to_proto(),
5913 })
5914 .await?;
5915
5916 anyhow::Ok(response.branch.map(SharedString::from))
5917 }
5918 }
5919 })
5920 }
5921
5922 pub fn diff_tree(
5923 &mut self,
5924 diff_type: DiffTreeType,
5925 _cx: &App,
5926 ) -> oneshot::Receiver<Result<TreeDiff>> {
5927 let repository_id = self.snapshot.id;
5928 self.send_job(None, move |repo, _cx| async move {
5929 match repo {
5930 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5931 backend.diff_tree(diff_type).await
5932 }
5933 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5934 let response = client
5935 .request(proto::GetTreeDiff {
5936 project_id: project_id.0,
5937 repository_id: repository_id.0,
5938 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5939 base: diff_type.base().to_string(),
5940 head: diff_type.head().to_string(),
5941 })
5942 .await?;
5943
5944 let entries = response
5945 .entries
5946 .into_iter()
5947 .filter_map(|entry| {
5948 let status = match entry.status() {
5949 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5950 proto::tree_diff_status::Status::Modified => {
5951 TreeDiffStatus::Modified {
5952 old: git::Oid::from_str(
5953 &entry.oid.context("missing oid").log_err()?,
5954 )
5955 .log_err()?,
5956 }
5957 }
5958 proto::tree_diff_status::Status::Deleted => {
5959 TreeDiffStatus::Deleted {
5960 old: git::Oid::from_str(
5961 &entry.oid.context("missing oid").log_err()?,
5962 )
5963 .log_err()?,
5964 }
5965 }
5966 };
5967 Some((
5968 RepoPath::from_rel_path(
5969 &RelPath::from_proto(&entry.path).log_err()?,
5970 ),
5971 status,
5972 ))
5973 })
5974 .collect();
5975
5976 Ok(TreeDiff { entries })
5977 }
5978 }
5979 })
5980 }
5981
5982 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5983 let id = self.id;
5984 self.send_job(None, move |repo, _cx| async move {
5985 match repo {
5986 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5987 backend.diff(diff_type).await
5988 }
5989 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5990 let (proto_diff_type, merge_base_ref) = match &diff_type {
5991 DiffType::HeadToIndex => {
5992 (proto::git_diff::DiffType::HeadToIndex.into(), None)
5993 }
5994 DiffType::HeadToWorktree => {
5995 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
5996 }
5997 DiffType::MergeBase { base_ref } => (
5998 proto::git_diff::DiffType::MergeBase.into(),
5999 Some(base_ref.to_string()),
6000 ),
6001 };
6002 let response = client
6003 .request(proto::GitDiff {
6004 project_id: project_id.0,
6005 repository_id: id.to_proto(),
6006 diff_type: proto_diff_type,
6007 merge_base_ref,
6008 })
6009 .await?;
6010
6011 Ok(response.diff)
6012 }
6013 }
6014 })
6015 }
6016
6017 pub fn create_branch(
6018 &mut self,
6019 branch_name: String,
6020 base_branch: Option<String>,
6021 ) -> oneshot::Receiver<Result<()>> {
6022 let id = self.id;
6023 let status_msg = if let Some(ref base) = base_branch {
6024 format!("git switch -c {branch_name} {base}").into()
6025 } else {
6026 format!("git switch -c {branch_name}").into()
6027 };
6028 self.send_job(Some(status_msg), move |repo, _cx| async move {
6029 match repo {
6030 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6031 backend.create_branch(branch_name, base_branch).await
6032 }
6033 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6034 client
6035 .request(proto::GitCreateBranch {
6036 project_id: project_id.0,
6037 repository_id: id.to_proto(),
6038 branch_name,
6039 })
6040 .await?;
6041
6042 Ok(())
6043 }
6044 }
6045 })
6046 }
6047
6048 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6049 let id = self.id;
6050 self.send_job(
6051 Some(format!("git switch {branch_name}").into()),
6052 move |repo, _cx| async move {
6053 match repo {
6054 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6055 backend.change_branch(branch_name).await
6056 }
6057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6058 client
6059 .request(proto::GitChangeBranch {
6060 project_id: project_id.0,
6061 repository_id: id.to_proto(),
6062 branch_name,
6063 })
6064 .await?;
6065
6066 Ok(())
6067 }
6068 }
6069 },
6070 )
6071 }
6072
6073 pub fn delete_branch(
6074 &mut self,
6075 is_remote: bool,
6076 branch_name: String,
6077 ) -> oneshot::Receiver<Result<()>> {
6078 let id = self.id;
6079 self.send_job(
6080 Some(
6081 format!(
6082 "git branch {} {}",
6083 if is_remote { "-dr" } else { "-d" },
6084 branch_name
6085 )
6086 .into(),
6087 ),
6088 move |repo, _cx| async move {
6089 match repo {
6090 RepositoryState::Local(state) => {
6091 state.backend.delete_branch(is_remote, branch_name).await
6092 }
6093 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6094 client
6095 .request(proto::GitDeleteBranch {
6096 project_id: project_id.0,
6097 repository_id: id.to_proto(),
6098 is_remote,
6099 branch_name,
6100 })
6101 .await?;
6102
6103 Ok(())
6104 }
6105 }
6106 },
6107 )
6108 }
6109
6110 pub fn rename_branch(
6111 &mut self,
6112 branch: String,
6113 new_name: String,
6114 ) -> oneshot::Receiver<Result<()>> {
6115 let id = self.id;
6116 self.send_job(
6117 Some(format!("git branch -m {branch} {new_name}").into()),
6118 move |repo, _cx| async move {
6119 match repo {
6120 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6121 backend.rename_branch(branch, new_name).await
6122 }
6123 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6124 client
6125 .request(proto::GitRenameBranch {
6126 project_id: project_id.0,
6127 repository_id: id.to_proto(),
6128 branch,
6129 new_name,
6130 })
6131 .await?;
6132
6133 Ok(())
6134 }
6135 }
6136 },
6137 )
6138 }
6139
6140 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6141 let id = self.id;
6142 self.send_job(None, move |repo, _cx| async move {
6143 match repo {
6144 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6145 backend.check_for_pushed_commit().await
6146 }
6147 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6148 let response = client
6149 .request(proto::CheckForPushedCommits {
6150 project_id: project_id.0,
6151 repository_id: id.to_proto(),
6152 })
6153 .await?;
6154
6155 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6156
6157 Ok(branches)
6158 }
6159 }
6160 })
6161 }
6162
6163 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6164 self.send_job(None, |repo, _cx| async move {
6165 match repo {
6166 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6167 backend.checkpoint().await
6168 }
6169 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6170 }
6171 })
6172 }
6173
6174 pub fn restore_checkpoint(
6175 &mut self,
6176 checkpoint: GitRepositoryCheckpoint,
6177 ) -> oneshot::Receiver<Result<()>> {
6178 self.send_job(None, move |repo, _cx| async move {
6179 match repo {
6180 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6181 backend.restore_checkpoint(checkpoint).await
6182 }
6183 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6184 }
6185 })
6186 }
6187
6188 pub(crate) fn apply_remote_update(
6189 &mut self,
6190 update: proto::UpdateRepository,
6191 cx: &mut Context<Self>,
6192 ) -> Result<()> {
6193 if let Some(main_path) = &update.original_repo_abs_path {
6194 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6195 }
6196
6197 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6198 let new_head_commit = update
6199 .head_commit_details
6200 .as_ref()
6201 .map(proto_to_commit_details);
6202 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6203 cx.emit(RepositoryEvent::BranchChanged)
6204 }
6205 self.snapshot.branch = new_branch;
6206 self.snapshot.head_commit = new_head_commit;
6207
6208 // We don't store any merge head state for downstream projects; the upstream
6209 // will track it and we will just get the updated conflicts
6210 let new_merge_heads = TreeMap::from_ordered_entries(
6211 update
6212 .current_merge_conflicts
6213 .into_iter()
6214 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6215 );
6216 let conflicts_changed =
6217 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6218 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6219 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6220 let new_stash_entries = GitStash {
6221 entries: update
6222 .stash_entries
6223 .iter()
6224 .filter_map(|entry| proto_to_stash(entry).ok())
6225 .collect(),
6226 };
6227 if self.snapshot.stash_entries != new_stash_entries {
6228 cx.emit(RepositoryEvent::StashEntriesChanged)
6229 }
6230 self.snapshot.stash_entries = new_stash_entries;
6231 let new_linked_worktrees: Arc<[GitWorktree]> = update
6232 .linked_worktrees
6233 .iter()
6234 .map(proto_to_worktree)
6235 .collect();
6236 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6237 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6238 }
6239 self.snapshot.linked_worktrees = new_linked_worktrees;
6240 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6241 self.snapshot.remote_origin_url = update.remote_origin_url;
6242
6243 let edits = update
6244 .removed_statuses
6245 .into_iter()
6246 .filter_map(|path| {
6247 Some(sum_tree::Edit::Remove(PathKey(
6248 RelPath::from_proto(&path).log_err()?,
6249 )))
6250 })
6251 .chain(
6252 update
6253 .updated_statuses
6254 .into_iter()
6255 .filter_map(|updated_status| {
6256 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6257 }),
6258 )
6259 .collect::<Vec<_>>();
6260 if conflicts_changed || !edits.is_empty() {
6261 cx.emit(RepositoryEvent::StatusesChanged);
6262 }
6263 self.snapshot.statuses_by_path.edit(edits, ());
6264
6265 if update.is_last_update {
6266 self.snapshot.scan_id = update.scan_id;
6267 }
6268 self.clear_pending_ops(cx);
6269 Ok(())
6270 }
6271
6272 pub fn compare_checkpoints(
6273 &mut self,
6274 left: GitRepositoryCheckpoint,
6275 right: GitRepositoryCheckpoint,
6276 ) -> oneshot::Receiver<Result<bool>> {
6277 self.send_job(None, move |repo, _cx| async move {
6278 match repo {
6279 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6280 backend.compare_checkpoints(left, right).await
6281 }
6282 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6283 }
6284 })
6285 }
6286
6287 pub fn diff_checkpoints(
6288 &mut self,
6289 base_checkpoint: GitRepositoryCheckpoint,
6290 target_checkpoint: GitRepositoryCheckpoint,
6291 ) -> oneshot::Receiver<Result<String>> {
6292 self.send_job(None, move |repo, _cx| async move {
6293 match repo {
6294 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6295 backend
6296 .diff_checkpoints(base_checkpoint, target_checkpoint)
6297 .await
6298 }
6299 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6300 }
6301 })
6302 }
6303
6304 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6305 let updated = SumTree::from_iter(
6306 self.pending_ops.iter().filter_map(|ops| {
6307 let inner_ops: Vec<PendingOp> =
6308 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6309 if inner_ops.is_empty() {
6310 None
6311 } else {
6312 Some(PendingOps {
6313 repo_path: ops.repo_path.clone(),
6314 ops: inner_ops,
6315 })
6316 }
6317 }),
6318 (),
6319 );
6320
6321 if updated != self.pending_ops {
6322 cx.emit(RepositoryEvent::PendingOpsChanged {
6323 pending_ops: self.pending_ops.clone(),
6324 })
6325 }
6326
6327 self.pending_ops = updated;
6328 }
6329
6330 fn schedule_scan(
6331 &mut self,
6332 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6333 cx: &mut Context<Self>,
6334 ) {
6335 let this = cx.weak_entity();
6336 let _ = self.send_keyed_job(
6337 Some(GitJobKey::ReloadGitState),
6338 None,
6339 |state, mut cx| async move {
6340 log::debug!("run scheduled git status scan");
6341
6342 let Some(this) = this.upgrade() else {
6343 return Ok(());
6344 };
6345 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6346 bail!("not a local repository")
6347 };
6348 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6349 this.update(&mut cx, |this, cx| {
6350 this.clear_pending_ops(cx);
6351 });
6352 if let Some(updates_tx) = updates_tx {
6353 updates_tx
6354 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6355 .ok();
6356 }
6357 Ok(())
6358 },
6359 );
6360 }
6361
6362 fn spawn_local_git_worker(
6363 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6364 cx: &mut Context<Self>,
6365 ) -> mpsc::UnboundedSender<GitJob> {
6366 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6367
6368 cx.spawn(async move |_, cx| {
6369 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6370 if let Some(git_hosting_provider_registry) =
6371 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6372 {
6373 git_hosting_providers::register_additional_providers(
6374 git_hosting_provider_registry,
6375 state.backend.clone(),
6376 )
6377 .await;
6378 }
6379 let state = RepositoryState::Local(state);
6380 let mut jobs = VecDeque::new();
6381 loop {
6382 while let Ok(Some(next_job)) = job_rx.try_next() {
6383 jobs.push_back(next_job);
6384 }
6385
6386 if let Some(job) = jobs.pop_front() {
6387 if let Some(current_key) = &job.key
6388 && jobs
6389 .iter()
6390 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6391 {
6392 continue;
6393 }
6394 (job.job)(state.clone(), cx).await;
6395 } else if let Some(job) = job_rx.next().await {
6396 jobs.push_back(job);
6397 } else {
6398 break;
6399 }
6400 }
6401 anyhow::Ok(())
6402 })
6403 .detach_and_log_err(cx);
6404
6405 job_tx
6406 }
6407
6408 fn spawn_remote_git_worker(
6409 state: RemoteRepositoryState,
6410 cx: &mut Context<Self>,
6411 ) -> mpsc::UnboundedSender<GitJob> {
6412 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6413
6414 cx.spawn(async move |_, cx| {
6415 let state = RepositoryState::Remote(state);
6416 let mut jobs = VecDeque::new();
6417 loop {
6418 while let Ok(Some(next_job)) = job_rx.try_next() {
6419 jobs.push_back(next_job);
6420 }
6421
6422 if let Some(job) = jobs.pop_front() {
6423 if let Some(current_key) = &job.key
6424 && jobs
6425 .iter()
6426 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6427 {
6428 continue;
6429 }
6430 (job.job)(state.clone(), cx).await;
6431 } else if let Some(job) = job_rx.next().await {
6432 jobs.push_back(job);
6433 } else {
6434 break;
6435 }
6436 }
6437 anyhow::Ok(())
6438 })
6439 .detach_and_log_err(cx);
6440
6441 job_tx
6442 }
6443
6444 fn load_staged_text(
6445 &mut self,
6446 buffer_id: BufferId,
6447 repo_path: RepoPath,
6448 cx: &App,
6449 ) -> Task<Result<Option<String>>> {
6450 let rx = self.send_job(None, move |state, _| async move {
6451 match state {
6452 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6453 anyhow::Ok(backend.load_index_text(repo_path).await)
6454 }
6455 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6456 let response = client
6457 .request(proto::OpenUnstagedDiff {
6458 project_id: project_id.to_proto(),
6459 buffer_id: buffer_id.to_proto(),
6460 })
6461 .await?;
6462 Ok(response.staged_text)
6463 }
6464 }
6465 });
6466 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6467 }
6468
6469 fn load_committed_text(
6470 &mut self,
6471 buffer_id: BufferId,
6472 repo_path: RepoPath,
6473 cx: &App,
6474 ) -> Task<Result<DiffBasesChange>> {
6475 let rx = self.send_job(None, move |state, _| async move {
6476 match state {
6477 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6478 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6479 let staged_text = backend.load_index_text(repo_path).await;
6480 let diff_bases_change = if committed_text == staged_text {
6481 DiffBasesChange::SetBoth(committed_text)
6482 } else {
6483 DiffBasesChange::SetEach {
6484 index: staged_text,
6485 head: committed_text,
6486 }
6487 };
6488 anyhow::Ok(diff_bases_change)
6489 }
6490 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6491 use proto::open_uncommitted_diff_response::Mode;
6492
6493 let response = client
6494 .request(proto::OpenUncommittedDiff {
6495 project_id: project_id.to_proto(),
6496 buffer_id: buffer_id.to_proto(),
6497 })
6498 .await?;
6499 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6500 let bases = match mode {
6501 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6502 Mode::IndexAndHead => DiffBasesChange::SetEach {
6503 head: response.committed_text,
6504 index: response.staged_text,
6505 },
6506 };
6507 Ok(bases)
6508 }
6509 }
6510 });
6511
6512 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6513 }
6514
6515 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6516 let repository_id = self.snapshot.id;
6517 let rx = self.send_job(None, move |state, _| async move {
6518 match state {
6519 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6520 backend.load_blob_content(oid).await
6521 }
6522 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6523 let response = client
6524 .request(proto::GetBlobContent {
6525 project_id: project_id.to_proto(),
6526 repository_id: repository_id.0,
6527 oid: oid.to_string(),
6528 })
6529 .await?;
6530 Ok(response.content)
6531 }
6532 }
6533 });
6534 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6535 }
6536
6537 fn paths_changed(
6538 &mut self,
6539 paths: Vec<RepoPath>,
6540 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6541 cx: &mut Context<Self>,
6542 ) {
6543 if !paths.is_empty() {
6544 self.paths_needing_status_update.push(paths);
6545 }
6546
6547 let this = cx.weak_entity();
6548 let _ = self.send_keyed_job(
6549 Some(GitJobKey::RefreshStatuses),
6550 None,
6551 |state, mut cx| async move {
6552 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6553 (
6554 this.snapshot.clone(),
6555 mem::take(&mut this.paths_needing_status_update),
6556 )
6557 })?;
6558 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6559 bail!("not a local repository")
6560 };
6561
6562 if changed_paths.is_empty() {
6563 return Ok(());
6564 }
6565
6566 let has_head = prev_snapshot.head_commit.is_some();
6567
6568 let stash_entries = backend.stash_entries().await?;
6569 let changed_path_statuses = cx
6570 .background_spawn(async move {
6571 let mut changed_paths =
6572 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6573 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6574
6575 let status_task = backend.status(&changed_paths_vec);
6576 let diff_stat_future = if has_head {
6577 backend.diff_stat(&changed_paths_vec)
6578 } else {
6579 future::ready(Ok(status::GitDiffStat {
6580 entries: Arc::default(),
6581 }))
6582 .boxed()
6583 };
6584
6585 let (statuses, diff_stats) =
6586 futures::future::try_join(status_task, diff_stat_future).await?;
6587
6588 let diff_stats: HashMap<RepoPath, DiffStat> =
6589 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6590
6591 let mut changed_path_statuses = Vec::new();
6592 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6593 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6594
6595 for (repo_path, status) in &*statuses.entries {
6596 let current_diff_stat = diff_stats.get(repo_path).copied();
6597
6598 changed_paths.remove(repo_path);
6599 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6600 && cursor.item().is_some_and(|entry| {
6601 entry.status == *status && entry.diff_stat == current_diff_stat
6602 })
6603 {
6604 continue;
6605 }
6606
6607 changed_path_statuses.push(Edit::Insert(StatusEntry {
6608 repo_path: repo_path.clone(),
6609 status: *status,
6610 diff_stat: current_diff_stat,
6611 }));
6612 }
6613 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6614 for path in changed_paths.into_iter() {
6615 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6616 changed_path_statuses
6617 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6618 }
6619 }
6620 anyhow::Ok(changed_path_statuses)
6621 })
6622 .await?;
6623
6624 this.update(&mut cx, |this, cx| {
6625 if this.snapshot.stash_entries != stash_entries {
6626 cx.emit(RepositoryEvent::StashEntriesChanged);
6627 this.snapshot.stash_entries = stash_entries;
6628 }
6629
6630 if !changed_path_statuses.is_empty() {
6631 cx.emit(RepositoryEvent::StatusesChanged);
6632 this.snapshot
6633 .statuses_by_path
6634 .edit(changed_path_statuses, ());
6635 this.snapshot.scan_id += 1;
6636 }
6637
6638 if let Some(updates_tx) = updates_tx {
6639 updates_tx
6640 .unbounded_send(DownstreamUpdate::UpdateRepository(
6641 this.snapshot.clone(),
6642 ))
6643 .ok();
6644 }
6645 })
6646 },
6647 );
6648 }
6649
6650 /// currently running git command and when it started
6651 pub fn current_job(&self) -> Option<JobInfo> {
6652 self.active_jobs.values().next().cloned()
6653 }
6654
6655 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6656 self.send_job(None, |_, _| async {})
6657 }
6658
6659 fn spawn_job_with_tracking<AsyncFn>(
6660 &mut self,
6661 paths: Vec<RepoPath>,
6662 git_status: pending_op::GitStatus,
6663 cx: &mut Context<Self>,
6664 f: AsyncFn,
6665 ) -> Task<Result<()>>
6666 where
6667 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6668 {
6669 let ids = self.new_pending_ops_for_paths(paths, git_status);
6670
6671 cx.spawn(async move |this, cx| {
6672 let (job_status, result) = match f(this.clone(), cx).await {
6673 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6674 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6675 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6676 };
6677
6678 this.update(cx, |this, _| {
6679 let mut edits = Vec::with_capacity(ids.len());
6680 for (id, entry) in ids {
6681 if let Some(mut ops) = this
6682 .pending_ops
6683 .get(&PathKey(entry.as_ref().clone()), ())
6684 .cloned()
6685 {
6686 if let Some(op) = ops.op_by_id_mut(id) {
6687 op.job_status = job_status;
6688 }
6689 edits.push(sum_tree::Edit::Insert(ops));
6690 }
6691 }
6692 this.pending_ops.edit(edits, ());
6693 })?;
6694
6695 result
6696 })
6697 }
6698
6699 fn new_pending_ops_for_paths(
6700 &mut self,
6701 paths: Vec<RepoPath>,
6702 git_status: pending_op::GitStatus,
6703 ) -> Vec<(PendingOpId, RepoPath)> {
6704 let mut edits = Vec::with_capacity(paths.len());
6705 let mut ids = Vec::with_capacity(paths.len());
6706 for path in paths {
6707 let mut ops = self
6708 .pending_ops
6709 .get(&PathKey(path.as_ref().clone()), ())
6710 .cloned()
6711 .unwrap_or_else(|| PendingOps::new(&path));
6712 let id = ops.max_id() + 1;
6713 ops.ops.push(PendingOp {
6714 id,
6715 git_status,
6716 job_status: pending_op::JobStatus::Running,
6717 });
6718 edits.push(sum_tree::Edit::Insert(ops));
6719 ids.push((id, path));
6720 }
6721 self.pending_ops.edit(edits, ());
6722 ids
6723 }
6724 pub fn default_remote_url(&self) -> Option<String> {
6725 self.remote_upstream_url
6726 .clone()
6727 .or(self.remote_origin_url.clone())
6728 }
6729}
6730
6731/// If `path` is a git linked worktree checkout, resolves it to the main
6732/// repository's working directory path. Returns `None` if `path` is a normal
6733/// repository, not a git repo, or if resolution fails.
6734///
6735/// Resolution works by:
6736/// 1. Reading the `.git` file to get the `gitdir:` pointer
6737/// 2. Following that to the worktree-specific git directory
6738/// 3. Reading the `commondir` file to find the shared `.git` directory
6739/// 4. Deriving the main repo's working directory from the common dir
6740pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6741 let dot_git = path.join(".git");
6742 let metadata = fs.metadata(&dot_git).await.ok()??;
6743 if metadata.is_dir {
6744 return None; // Normal repo, not a linked worktree
6745 }
6746 // It's a .git file — parse the gitdir: pointer
6747 let content = fs.load(&dot_git).await.ok()?;
6748 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6749 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6750 // Read commondir to find the main .git directory
6751 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6752 let common_dir = fs
6753 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6754 .await
6755 .ok()?;
6756 Some(git::repository::original_repo_path_from_common_dir(
6757 &common_dir,
6758 ))
6759}
6760
6761/// Validates that the resolved worktree directory is acceptable:
6762/// - The setting must not be an absolute path.
6763/// - The resolved path must be either a subdirectory of the working
6764/// directory or a subdirectory of its parent (i.e., a sibling).
6765///
6766/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6767pub fn worktrees_directory_for_repo(
6768 original_repo_abs_path: &Path,
6769 worktree_directory_setting: &str,
6770) -> Result<PathBuf> {
6771 // Check the original setting before trimming, since a path like "///"
6772 // is absolute but becomes "" after stripping trailing separators.
6773 // Also check for leading `/` or `\` explicitly, because on Windows
6774 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6775 // would slip through even though it's clearly not a relative path.
6776 if Path::new(worktree_directory_setting).is_absolute()
6777 || worktree_directory_setting.starts_with('/')
6778 || worktree_directory_setting.starts_with('\\')
6779 {
6780 anyhow::bail!(
6781 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6782 );
6783 }
6784
6785 if worktree_directory_setting.is_empty() {
6786 anyhow::bail!("git.worktree_directory must not be empty");
6787 }
6788
6789 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6790 if trimmed == ".." {
6791 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6792 }
6793
6794 let joined = original_repo_abs_path.join(trimmed);
6795 let resolved = util::normalize_path(&joined);
6796 let resolved = if resolved.starts_with(original_repo_abs_path) {
6797 resolved
6798 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
6799 resolved.join(repo_dir_name)
6800 } else {
6801 resolved
6802 };
6803
6804 let parent = original_repo_abs_path
6805 .parent()
6806 .unwrap_or(original_repo_abs_path);
6807
6808 if !resolved.starts_with(parent) {
6809 anyhow::bail!(
6810 "git.worktree_directory resolved to {resolved:?}, which is outside \
6811 the project root and its parent directory. It must resolve to a \
6812 subdirectory of {original_repo_abs_path:?} or a sibling of it."
6813 );
6814 }
6815
6816 Ok(resolved)
6817}
6818
6819/// Returns a short name for a linked worktree suitable for UI display
6820///
6821/// Uses the main worktree path to come up with a short name that disambiguates
6822/// the linked worktree from the main worktree.
6823pub fn linked_worktree_short_name(
6824 main_worktree_path: &Path,
6825 linked_worktree_path: &Path,
6826) -> Option<SharedString> {
6827 if main_worktree_path == linked_worktree_path {
6828 return None;
6829 }
6830
6831 let project_name = main_worktree_path.file_name()?.to_str()?;
6832 let directory_name = linked_worktree_path.file_name()?.to_str()?;
6833 let name = if directory_name != project_name {
6834 directory_name.to_string()
6835 } else {
6836 linked_worktree_path
6837 .parent()?
6838 .file_name()?
6839 .to_str()?
6840 .to_string()
6841 };
6842 Some(name.into())
6843}
6844
6845fn get_permalink_in_rust_registry_src(
6846 provider_registry: Arc<GitHostingProviderRegistry>,
6847 path: PathBuf,
6848 selection: Range<u32>,
6849) -> Result<url::Url> {
6850 #[derive(Deserialize)]
6851 struct CargoVcsGit {
6852 sha1: String,
6853 }
6854
6855 #[derive(Deserialize)]
6856 struct CargoVcsInfo {
6857 git: CargoVcsGit,
6858 path_in_vcs: String,
6859 }
6860
6861 #[derive(Deserialize)]
6862 struct CargoPackage {
6863 repository: String,
6864 }
6865
6866 #[derive(Deserialize)]
6867 struct CargoToml {
6868 package: CargoPackage,
6869 }
6870
6871 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6872 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6873 Some((dir, json))
6874 }) else {
6875 bail!("No .cargo_vcs_info.json found in parent directories")
6876 };
6877 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6878 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6879 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6880 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6881 .context("parsing package.repository field of manifest")?;
6882 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6883 let permalink = provider.build_permalink(
6884 remote,
6885 BuildPermalinkParams::new(
6886 &cargo_vcs_info.git.sha1,
6887 &RepoPath::from_rel_path(
6888 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6889 ),
6890 Some(selection),
6891 ),
6892 );
6893 Ok(permalink)
6894}
6895
6896fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6897 let Some(blame) = blame else {
6898 return proto::BlameBufferResponse {
6899 blame_response: None,
6900 };
6901 };
6902
6903 let entries = blame
6904 .entries
6905 .into_iter()
6906 .map(|entry| proto::BlameEntry {
6907 sha: entry.sha.as_bytes().into(),
6908 start_line: entry.range.start,
6909 end_line: entry.range.end,
6910 original_line_number: entry.original_line_number,
6911 author: entry.author,
6912 author_mail: entry.author_mail,
6913 author_time: entry.author_time,
6914 author_tz: entry.author_tz,
6915 committer: entry.committer_name,
6916 committer_mail: entry.committer_email,
6917 committer_time: entry.committer_time,
6918 committer_tz: entry.committer_tz,
6919 summary: entry.summary,
6920 previous: entry.previous,
6921 filename: entry.filename,
6922 })
6923 .collect::<Vec<_>>();
6924
6925 let messages = blame
6926 .messages
6927 .into_iter()
6928 .map(|(oid, message)| proto::CommitMessage {
6929 oid: oid.as_bytes().into(),
6930 message,
6931 })
6932 .collect::<Vec<_>>();
6933
6934 proto::BlameBufferResponse {
6935 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6936 }
6937}
6938
6939fn deserialize_blame_buffer_response(
6940 response: proto::BlameBufferResponse,
6941) -> Option<git::blame::Blame> {
6942 let response = response.blame_response?;
6943 let entries = response
6944 .entries
6945 .into_iter()
6946 .filter_map(|entry| {
6947 Some(git::blame::BlameEntry {
6948 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6949 range: entry.start_line..entry.end_line,
6950 original_line_number: entry.original_line_number,
6951 committer_name: entry.committer,
6952 committer_time: entry.committer_time,
6953 committer_tz: entry.committer_tz,
6954 committer_email: entry.committer_mail,
6955 author: entry.author,
6956 author_mail: entry.author_mail,
6957 author_time: entry.author_time,
6958 author_tz: entry.author_tz,
6959 summary: entry.summary,
6960 previous: entry.previous,
6961 filename: entry.filename,
6962 })
6963 })
6964 .collect::<Vec<_>>();
6965
6966 let messages = response
6967 .messages
6968 .into_iter()
6969 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6970 .collect::<HashMap<_, _>>();
6971
6972 Some(Blame { entries, messages })
6973}
6974
6975fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6976 proto::Branch {
6977 is_head: branch.is_head,
6978 ref_name: branch.ref_name.to_string(),
6979 unix_timestamp: branch
6980 .most_recent_commit
6981 .as_ref()
6982 .map(|commit| commit.commit_timestamp as u64),
6983 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6984 ref_name: upstream.ref_name.to_string(),
6985 tracking: upstream
6986 .tracking
6987 .status()
6988 .map(|upstream| proto::UpstreamTracking {
6989 ahead: upstream.ahead as u64,
6990 behind: upstream.behind as u64,
6991 }),
6992 }),
6993 most_recent_commit: branch
6994 .most_recent_commit
6995 .as_ref()
6996 .map(|commit| proto::CommitSummary {
6997 sha: commit.sha.to_string(),
6998 subject: commit.subject.to_string(),
6999 commit_timestamp: commit.commit_timestamp,
7000 author_name: commit.author_name.to_string(),
7001 }),
7002 }
7003}
7004
7005fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7006 proto::Worktree {
7007 path: worktree.path.to_string_lossy().to_string(),
7008 ref_name: worktree
7009 .ref_name
7010 .as_ref()
7011 .map(|s| s.to_string())
7012 .unwrap_or_default(),
7013 sha: worktree.sha.to_string(),
7014 }
7015}
7016
7017fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7018 git::repository::Worktree {
7019 path: PathBuf::from(proto.path.clone()),
7020 ref_name: Some(SharedString::from(&proto.ref_name)),
7021 sha: proto.sha.clone().into(),
7022 }
7023}
7024
7025fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7026 git::repository::Branch {
7027 is_head: proto.is_head,
7028 ref_name: proto.ref_name.clone().into(),
7029 upstream: proto
7030 .upstream
7031 .as_ref()
7032 .map(|upstream| git::repository::Upstream {
7033 ref_name: upstream.ref_name.to_string().into(),
7034 tracking: upstream
7035 .tracking
7036 .as_ref()
7037 .map(|tracking| {
7038 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7039 ahead: tracking.ahead as u32,
7040 behind: tracking.behind as u32,
7041 })
7042 })
7043 .unwrap_or(git::repository::UpstreamTracking::Gone),
7044 }),
7045 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7046 git::repository::CommitSummary {
7047 sha: commit.sha.to_string().into(),
7048 subject: commit.subject.to_string().into(),
7049 commit_timestamp: commit.commit_timestamp,
7050 author_name: commit.author_name.to_string().into(),
7051 has_parent: true,
7052 }
7053 }),
7054 }
7055}
7056
7057fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7058 proto::GitCommitDetails {
7059 sha: commit.sha.to_string(),
7060 message: commit.message.to_string(),
7061 commit_timestamp: commit.commit_timestamp,
7062 author_email: commit.author_email.to_string(),
7063 author_name: commit.author_name.to_string(),
7064 }
7065}
7066
7067fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7068 CommitDetails {
7069 sha: proto.sha.clone().into(),
7070 message: proto.message.clone().into(),
7071 commit_timestamp: proto.commit_timestamp,
7072 author_email: proto.author_email.clone().into(),
7073 author_name: proto.author_name.clone().into(),
7074 }
7075}
7076
7077/// This snapshot computes the repository state on the foreground thread while
7078/// running the git commands on the background thread. We update branch, head,
7079/// remotes, and worktrees first so the UI can react sooner, then compute file
7080/// state and emit those events immediately after.
7081async fn compute_snapshot(
7082 this: Entity<Repository>,
7083 backend: Arc<dyn GitRepository>,
7084 cx: &mut AsyncApp,
7085) -> Result<RepositorySnapshot> {
7086 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7087 this.paths_needing_status_update.clear();
7088 (
7089 this.id,
7090 this.work_directory_abs_path.clone(),
7091 this.snapshot.clone(),
7092 )
7093 });
7094
7095 let head_commit_future = {
7096 let backend = backend.clone();
7097 async move {
7098 Ok(match backend.head_sha().await {
7099 Some(head_sha) => backend.show(head_sha).await.log_err(),
7100 None => None,
7101 })
7102 }
7103 };
7104 let (branches, head_commit, all_worktrees) = cx
7105 .background_spawn({
7106 let backend = backend.clone();
7107 async move {
7108 futures::future::try_join3(
7109 backend.branches(),
7110 head_commit_future,
7111 backend.worktrees(),
7112 )
7113 .await
7114 }
7115 })
7116 .await?;
7117 let branch = branches.into_iter().find(|branch| branch.is_head);
7118
7119 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7120 .into_iter()
7121 .filter(|wt| wt.path != *work_directory_abs_path)
7122 .collect();
7123
7124 let (remote_origin_url, remote_upstream_url) = cx
7125 .background_spawn({
7126 let backend = backend.clone();
7127 async move {
7128 Ok::<_, anyhow::Error>(
7129 futures::future::join(
7130 backend.remote_url("origin"),
7131 backend.remote_url("upstream"),
7132 )
7133 .await,
7134 )
7135 }
7136 })
7137 .await?;
7138
7139 let snapshot = this.update(cx, |this, cx| {
7140 let branch_changed =
7141 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7142 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7143
7144 this.snapshot = RepositorySnapshot {
7145 id,
7146 work_directory_abs_path,
7147 branch,
7148 head_commit,
7149 remote_origin_url,
7150 remote_upstream_url,
7151 linked_worktrees,
7152 scan_id: prev_snapshot.scan_id + 1,
7153 ..prev_snapshot
7154 };
7155
7156 if branch_changed {
7157 cx.emit(RepositoryEvent::BranchChanged);
7158 }
7159
7160 if worktrees_changed {
7161 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7162 }
7163
7164 this.snapshot.clone()
7165 });
7166
7167 let (statuses, diff_stats, stash_entries) = cx
7168 .background_spawn({
7169 let backend = backend.clone();
7170 let snapshot = snapshot.clone();
7171 async move {
7172 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7173 if snapshot.head_commit.is_some() {
7174 backend.diff_stat(&[])
7175 } else {
7176 future::ready(Ok(status::GitDiffStat {
7177 entries: Arc::default(),
7178 }))
7179 .boxed()
7180 };
7181 futures::future::try_join3(
7182 backend.status(&[RepoPath::from_rel_path(
7183 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7184 )]),
7185 diff_stat_future,
7186 backend.stash_entries(),
7187 )
7188 .await
7189 }
7190 })
7191 .await?;
7192
7193 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7194 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7195 let mut conflicted_paths = Vec::new();
7196 let statuses_by_path = SumTree::from_iter(
7197 statuses.entries.iter().map(|(repo_path, status)| {
7198 if status.is_conflicted() {
7199 conflicted_paths.push(repo_path.clone());
7200 }
7201 StatusEntry {
7202 repo_path: repo_path.clone(),
7203 status: *status,
7204 diff_stat: diff_stat_map.get(repo_path).copied(),
7205 }
7206 }),
7207 (),
7208 );
7209
7210 let merge_details = cx
7211 .background_spawn({
7212 let backend = backend.clone();
7213 let mut merge_details = snapshot.merge.clone();
7214 async move {
7215 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7216 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7217 }
7218 })
7219 .await?;
7220 let (merge_details, conflicts_changed) = merge_details;
7221 log::debug!("new merge details: {merge_details:?}");
7222
7223 Ok(this.update(cx, |this, cx| {
7224 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7225 cx.emit(RepositoryEvent::StatusesChanged);
7226 }
7227 if stash_entries != this.snapshot.stash_entries {
7228 cx.emit(RepositoryEvent::StashEntriesChanged);
7229 }
7230
7231 this.snapshot.scan_id += 1;
7232 this.snapshot.merge = merge_details;
7233 this.snapshot.statuses_by_path = statuses_by_path;
7234 this.snapshot.stash_entries = stash_entries;
7235
7236 this.snapshot.clone()
7237 }))
7238}
7239
7240fn status_from_proto(
7241 simple_status: i32,
7242 status: Option<proto::GitFileStatus>,
7243) -> anyhow::Result<FileStatus> {
7244 use proto::git_file_status::Variant;
7245
7246 let Some(variant) = status.and_then(|status| status.variant) else {
7247 let code = proto::GitStatus::from_i32(simple_status)
7248 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7249 let result = match code {
7250 proto::GitStatus::Added => TrackedStatus {
7251 worktree_status: StatusCode::Added,
7252 index_status: StatusCode::Unmodified,
7253 }
7254 .into(),
7255 proto::GitStatus::Modified => TrackedStatus {
7256 worktree_status: StatusCode::Modified,
7257 index_status: StatusCode::Unmodified,
7258 }
7259 .into(),
7260 proto::GitStatus::Conflict => UnmergedStatus {
7261 first_head: UnmergedStatusCode::Updated,
7262 second_head: UnmergedStatusCode::Updated,
7263 }
7264 .into(),
7265 proto::GitStatus::Deleted => TrackedStatus {
7266 worktree_status: StatusCode::Deleted,
7267 index_status: StatusCode::Unmodified,
7268 }
7269 .into(),
7270 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7271 };
7272 return Ok(result);
7273 };
7274
7275 let result = match variant {
7276 Variant::Untracked(_) => FileStatus::Untracked,
7277 Variant::Ignored(_) => FileStatus::Ignored,
7278 Variant::Unmerged(unmerged) => {
7279 let [first_head, second_head] =
7280 [unmerged.first_head, unmerged.second_head].map(|head| {
7281 let code = proto::GitStatus::from_i32(head)
7282 .with_context(|| format!("Invalid git status code: {head}"))?;
7283 let result = match code {
7284 proto::GitStatus::Added => UnmergedStatusCode::Added,
7285 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7286 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7287 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7288 };
7289 Ok(result)
7290 });
7291 let [first_head, second_head] = [first_head?, second_head?];
7292 UnmergedStatus {
7293 first_head,
7294 second_head,
7295 }
7296 .into()
7297 }
7298 Variant::Tracked(tracked) => {
7299 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7300 .map(|status| {
7301 let code = proto::GitStatus::from_i32(status)
7302 .with_context(|| format!("Invalid git status code: {status}"))?;
7303 let result = match code {
7304 proto::GitStatus::Modified => StatusCode::Modified,
7305 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7306 proto::GitStatus::Added => StatusCode::Added,
7307 proto::GitStatus::Deleted => StatusCode::Deleted,
7308 proto::GitStatus::Renamed => StatusCode::Renamed,
7309 proto::GitStatus::Copied => StatusCode::Copied,
7310 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7311 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7312 };
7313 Ok(result)
7314 });
7315 let [index_status, worktree_status] = [index_status?, worktree_status?];
7316 TrackedStatus {
7317 index_status,
7318 worktree_status,
7319 }
7320 .into()
7321 }
7322 };
7323 Ok(result)
7324}
7325
7326fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7327 use proto::git_file_status::{Tracked, Unmerged, Variant};
7328
7329 let variant = match status {
7330 FileStatus::Untracked => Variant::Untracked(Default::default()),
7331 FileStatus::Ignored => Variant::Ignored(Default::default()),
7332 FileStatus::Unmerged(UnmergedStatus {
7333 first_head,
7334 second_head,
7335 }) => Variant::Unmerged(Unmerged {
7336 first_head: unmerged_status_to_proto(first_head),
7337 second_head: unmerged_status_to_proto(second_head),
7338 }),
7339 FileStatus::Tracked(TrackedStatus {
7340 index_status,
7341 worktree_status,
7342 }) => Variant::Tracked(Tracked {
7343 index_status: tracked_status_to_proto(index_status),
7344 worktree_status: tracked_status_to_proto(worktree_status),
7345 }),
7346 };
7347 proto::GitFileStatus {
7348 variant: Some(variant),
7349 }
7350}
7351
7352fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7353 match code {
7354 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7355 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7356 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7357 }
7358}
7359
7360fn tracked_status_to_proto(code: StatusCode) -> i32 {
7361 match code {
7362 StatusCode::Added => proto::GitStatus::Added as _,
7363 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7364 StatusCode::Modified => proto::GitStatus::Modified as _,
7365 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7366 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7367 StatusCode::Copied => proto::GitStatus::Copied as _,
7368 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7369 }
7370}