1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let unstaged_diff = this
825 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
826 .await?;
827 buffer_diff.update(cx, |buffer_diff, _| {
828 buffer_diff.set_secondary_diff(unstaged_diff);
829 });
830
831 this.update(cx, |this, cx| {
832 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
833 .detach();
834
835 this.loading_diffs.remove(&(buffer_id, diff_kind));
836
837 let git_store = cx.weak_entity();
838 let diff_state = this
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842
843 diff_state.update(cx, |state, _| {
844 if let Some(oid) = oid {
845 if let Some(content) = content {
846 state.oid_texts.insert(oid, content);
847 }
848 }
849 state.oid_diffs.insert(oid, buffer_diff.downgrade());
850 });
851 })?;
852
853 Ok(buffer_diff)
854 }
855 .await;
856 result.map_err(Arc::new)
857 })
858 .shared();
859
860 self.loading_diffs
861 .insert((buffer_id, diff_kind), task.clone());
862 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
863 }
864
865 #[ztracing::instrument(skip_all)]
866 pub fn open_uncommitted_diff(
867 &mut self,
868 buffer: Entity<Buffer>,
869 cx: &mut Context<Self>,
870 ) -> Task<Result<Entity<BufferDiff>>> {
871 let buffer_id = buffer.read(cx).remote_id();
872
873 if let Some(diff_state) = self.diffs.get(&buffer_id)
874 && let Some(uncommitted_diff) = diff_state
875 .read(cx)
876 .uncommitted_diff
877 .as_ref()
878 .and_then(|weak| weak.upgrade())
879 {
880 if let Some(task) =
881 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
882 {
883 return cx.background_executor().spawn(async move {
884 task.await;
885 Ok(uncommitted_diff)
886 });
887 }
888 return Task::ready(Ok(uncommitted_diff));
889 }
890
891 let Some((repo, repo_path)) =
892 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
893 else {
894 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
895 };
896
897 let task = self
898 .loading_diffs
899 .entry((buffer_id, DiffKind::Uncommitted))
900 .or_insert_with(|| {
901 let changes = repo.update(cx, |repo, cx| {
902 repo.load_committed_text(buffer_id, repo_path, cx)
903 });
904
905 // todo(lw): hot foreground spawn
906 cx.spawn(async move |this, cx| {
907 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
908 .await
909 .map_err(Arc::new)
910 })
911 .shared()
912 })
913 .clone();
914
915 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
916 }
917
918 #[ztracing::instrument(skip_all)]
919 async fn open_diff_internal(
920 this: WeakEntity<Self>,
921 kind: DiffKind,
922 texts: Result<DiffBasesChange>,
923 buffer_entity: Entity<Buffer>,
924 cx: &mut AsyncApp,
925 ) -> Result<Entity<BufferDiff>> {
926 let diff_bases_change = match texts {
927 Err(e) => {
928 this.update(cx, |this, cx| {
929 let buffer = buffer_entity.read(cx);
930 let buffer_id = buffer.remote_id();
931 this.loading_diffs.remove(&(buffer_id, kind));
932 })?;
933 return Err(e);
934 }
935 Ok(change) => change,
936 };
937
938 this.update(cx, |this, cx| {
939 let buffer = buffer_entity.read(cx);
940 let buffer_id = buffer.remote_id();
941 let language = buffer.language().cloned();
942 let language_registry = buffer.language_registry();
943 let text_snapshot = buffer.text_snapshot();
944 this.loading_diffs.remove(&(buffer_id, kind));
945
946 let git_store = cx.weak_entity();
947 let diff_state = this
948 .diffs
949 .entry(buffer_id)
950 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
951
952 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953
954 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
955 diff_state.update(cx, |diff_state, cx| {
956 diff_state.language_changed = true;
957 diff_state.language = language;
958 diff_state.language_registry = language_registry;
959
960 match kind {
961 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
962 DiffKind::Uncommitted => {
963 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
964 diff
965 } else {
966 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
967 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
968 unstaged_diff
969 };
970
971 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
972 diff_state.uncommitted_diff = Some(diff.downgrade())
973 }
974 DiffKind::SinceOid(_) => {
975 unreachable!("open_diff_internal is not used for OID diffs")
976 }
977 }
978
979 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
980 let rx = diff_state.wait_for_recalculation();
981
982 anyhow::Ok(async move {
983 if let Some(rx) = rx {
984 rx.await;
985 }
986 Ok(diff)
987 })
988 })
989 })??
990 .await
991 }
992
993 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
994 let diff_state = self.diffs.get(&buffer_id)?;
995 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
996 }
997
998 pub fn get_uncommitted_diff(
999 &self,
1000 buffer_id: BufferId,
1001 cx: &App,
1002 ) -> Option<Entity<BufferDiff>> {
1003 let diff_state = self.diffs.get(&buffer_id)?;
1004 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1005 }
1006
1007 pub fn get_diff_since_oid(
1008 &self,
1009 buffer_id: BufferId,
1010 oid: Option<git::Oid>,
1011 cx: &App,
1012 ) -> Option<Entity<BufferDiff>> {
1013 let diff_state = self.diffs.get(&buffer_id)?;
1014 diff_state.read(cx).oid_diff(oid)
1015 }
1016
1017 pub fn open_conflict_set(
1018 &mut self,
1019 buffer: Entity<Buffer>,
1020 cx: &mut Context<Self>,
1021 ) -> Entity<ConflictSet> {
1022 log::debug!("open conflict set");
1023 let buffer_id = buffer.read(cx).remote_id();
1024
1025 if let Some(git_state) = self.diffs.get(&buffer_id)
1026 && let Some(conflict_set) = git_state
1027 .read(cx)
1028 .conflict_set
1029 .as_ref()
1030 .and_then(|weak| weak.upgrade())
1031 {
1032 let conflict_set = conflict_set;
1033 let buffer_snapshot = buffer.read(cx).text_snapshot();
1034
1035 git_state.update(cx, |state, cx| {
1036 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1037 });
1038
1039 return conflict_set;
1040 }
1041
1042 let is_unmerged = self
1043 .repository_and_path_for_buffer_id(buffer_id, cx)
1044 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1045 let git_store = cx.weak_entity();
1046 let buffer_git_state = self
1047 .diffs
1048 .entry(buffer_id)
1049 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1050 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1051
1052 self._subscriptions
1053 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1054 cx.emit(GitStoreEvent::ConflictsUpdated);
1055 }));
1056
1057 buffer_git_state.update(cx, |state, cx| {
1058 state.conflict_set = Some(conflict_set.downgrade());
1059 let buffer_snapshot = buffer.read(cx).text_snapshot();
1060 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1061 });
1062
1063 conflict_set
1064 }
1065
1066 pub fn project_path_git_status(
1067 &self,
1068 project_path: &ProjectPath,
1069 cx: &App,
1070 ) -> Option<FileStatus> {
1071 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1072 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1073 }
1074
1075 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1076 let mut work_directory_abs_paths = Vec::new();
1077 let mut checkpoints = Vec::new();
1078 for repository in self.repositories.values() {
1079 repository.update(cx, |repository, _| {
1080 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1081 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1082 });
1083 }
1084
1085 cx.background_executor().spawn(async move {
1086 let checkpoints = future::try_join_all(checkpoints).await?;
1087 Ok(GitStoreCheckpoint {
1088 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1089 .into_iter()
1090 .zip(checkpoints)
1091 .collect(),
1092 })
1093 })
1094 }
1095
1096 pub fn restore_checkpoint(
1097 &self,
1098 checkpoint: GitStoreCheckpoint,
1099 cx: &mut App,
1100 ) -> Task<Result<()>> {
1101 let repositories_by_work_dir_abs_path = self
1102 .repositories
1103 .values()
1104 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1105 .collect::<HashMap<_, _>>();
1106
1107 let mut tasks = Vec::new();
1108 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1109 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1110 let restore = repository.update(cx, |repository, _| {
1111 repository.restore_checkpoint(checkpoint)
1112 });
1113 tasks.push(async move { restore.await? });
1114 }
1115 }
1116 cx.background_spawn(async move {
1117 future::try_join_all(tasks).await?;
1118 Ok(())
1119 })
1120 }
1121
1122 /// Compares two checkpoints, returning true if they are equal.
1123 pub fn compare_checkpoints(
1124 &self,
1125 left: GitStoreCheckpoint,
1126 mut right: GitStoreCheckpoint,
1127 cx: &mut App,
1128 ) -> Task<Result<bool>> {
1129 let repositories_by_work_dir_abs_path = self
1130 .repositories
1131 .values()
1132 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1133 .collect::<HashMap<_, _>>();
1134
1135 let mut tasks = Vec::new();
1136 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1137 if let Some(right_checkpoint) = right
1138 .checkpoints_by_work_dir_abs_path
1139 .remove(&work_dir_abs_path)
1140 {
1141 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1142 {
1143 let compare = repository.update(cx, |repository, _| {
1144 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1145 });
1146
1147 tasks.push(async move { compare.await? });
1148 }
1149 } else {
1150 return Task::ready(Ok(false));
1151 }
1152 }
1153 cx.background_spawn(async move {
1154 Ok(future::try_join_all(tasks)
1155 .await?
1156 .into_iter()
1157 .all(|result| result))
1158 })
1159 }
1160
1161 /// Blames a buffer.
1162 pub fn blame_buffer(
1163 &self,
1164 buffer: &Entity<Buffer>,
1165 version: Option<clock::Global>,
1166 cx: &mut Context<Self>,
1167 ) -> Task<Result<Option<Blame>>> {
1168 let buffer = buffer.read(cx);
1169 let Some((repo, repo_path)) =
1170 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1171 else {
1172 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1173 };
1174 let content = match &version {
1175 Some(version) => buffer.rope_for_version(version),
1176 None => buffer.as_rope().clone(),
1177 };
1178 let line_ending = buffer.line_ending();
1179 let version = version.unwrap_or(buffer.version());
1180 let buffer_id = buffer.remote_id();
1181
1182 let repo = repo.downgrade();
1183 cx.spawn(async move |_, cx| {
1184 let repository_state = repo
1185 .update(cx, |repo, _| repo.repository_state.clone())?
1186 .await
1187 .map_err(|err| anyhow::anyhow!(err))?;
1188 match repository_state {
1189 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1190 .blame(repo_path.clone(), content, line_ending)
1191 .await
1192 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1193 .map(Some),
1194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1195 let response = client
1196 .request(proto::BlameBuffer {
1197 project_id: project_id.to_proto(),
1198 buffer_id: buffer_id.into(),
1199 version: serialize_version(&version),
1200 })
1201 .await?;
1202 Ok(deserialize_blame_buffer_response(response))
1203 }
1204 }
1205 })
1206 }
1207
1208 pub fn file_history(
1209 &self,
1210 repo: &Entity<Repository>,
1211 path: RepoPath,
1212 cx: &mut App,
1213 ) -> Task<Result<git::repository::FileHistory>> {
1214 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1215
1216 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1217 }
1218
1219 pub fn file_history_paginated(
1220 &self,
1221 repo: &Entity<Repository>,
1222 path: RepoPath,
1223 skip: usize,
1224 limit: Option<usize>,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn get_permalink_to_line(
1233 &self,
1234 buffer: &Entity<Buffer>,
1235 selection: Range<u32>,
1236 cx: &mut App,
1237 ) -> Task<Result<url::Url>> {
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 return Task::ready(Err(anyhow!("buffer has no file")));
1240 };
1241
1242 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1243 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1244 cx,
1245 ) else {
1246 // If we're not in a Git repo, check whether this is a Rust source
1247 // file in the Cargo registry (presumably opened with go-to-definition
1248 // from a normal Rust file). If so, we can put together a permalink
1249 // using crate metadata.
1250 if buffer
1251 .read(cx)
1252 .language()
1253 .is_none_or(|lang| lang.name() != "Rust")
1254 {
1255 return Task::ready(Err(anyhow!("no permalink available")));
1256 }
1257 let file_path = file.worktree.read(cx).absolutize(&file.path);
1258 return cx.spawn(async move |cx| {
1259 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1260 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1261 .context("no permalink available")
1262 });
1263 };
1264
1265 let buffer_id = buffer.read(cx).remote_id();
1266 let branch = repo.read(cx).branch.clone();
1267 let remote = branch
1268 .as_ref()
1269 .and_then(|b| b.upstream.as_ref())
1270 .and_then(|b| b.remote_name())
1271 .unwrap_or("origin")
1272 .to_string();
1273
1274 let rx = repo.update(cx, |repo, _| {
1275 repo.send_job(None, move |state, cx| async move {
1276 match state {
1277 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1278 let origin_url = backend
1279 .remote_url(&remote)
1280 .await
1281 .with_context(|| format!("remote \"{remote}\" not found"))?;
1282
1283 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1284
1285 let provider_registry =
1286 cx.update(GitHostingProviderRegistry::default_global);
1287
1288 let (provider, remote) =
1289 parse_git_remote_url(provider_registry, &origin_url)
1290 .context("parsing Git remote URL")?;
1291
1292 Ok(provider.build_permalink(
1293 remote,
1294 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1295 ))
1296 }
1297 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1298 let response = client
1299 .request(proto::GetPermalinkToLine {
1300 project_id: project_id.to_proto(),
1301 buffer_id: buffer_id.into(),
1302 selection: Some(proto::Range {
1303 start: selection.start as u64,
1304 end: selection.end as u64,
1305 }),
1306 })
1307 .await?;
1308
1309 url::Url::parse(&response.permalink).context("failed to parse permalink")
1310 }
1311 }
1312 })
1313 });
1314 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1315 }
1316
1317 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1318 match &self.state {
1319 GitStoreState::Local {
1320 downstream: downstream_client,
1321 ..
1322 } => downstream_client
1323 .as_ref()
1324 .map(|state| (state.client.clone(), state.project_id)),
1325 GitStoreState::Remote {
1326 downstream: downstream_client,
1327 ..
1328 } => downstream_client.clone(),
1329 }
1330 }
1331
1332 fn upstream_client(&self) -> Option<AnyProtoClient> {
1333 match &self.state {
1334 GitStoreState::Local { .. } => None,
1335 GitStoreState::Remote {
1336 upstream_client, ..
1337 } => Some(upstream_client.clone()),
1338 }
1339 }
1340
1341 fn on_worktree_store_event(
1342 &mut self,
1343 worktree_store: Entity<WorktreeStore>,
1344 event: &WorktreeStoreEvent,
1345 cx: &mut Context<Self>,
1346 ) {
1347 let GitStoreState::Local {
1348 project_environment,
1349 downstream,
1350 next_repository_id,
1351 fs,
1352 } = &self.state
1353 else {
1354 return;
1355 };
1356
1357 match event {
1358 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1359 if let Some(worktree) = self
1360 .worktree_store
1361 .read(cx)
1362 .worktree_for_id(*worktree_id, cx)
1363 {
1364 let paths_by_git_repo =
1365 self.process_updated_entries(&worktree, updated_entries, cx);
1366 let downstream = downstream
1367 .as_ref()
1368 .map(|downstream| downstream.updates_tx.clone());
1369 cx.spawn(async move |_, cx| {
1370 let paths_by_git_repo = paths_by_git_repo.await;
1371 for (repo, paths) in paths_by_git_repo {
1372 repo.update(cx, |repo, cx| {
1373 repo.paths_changed(paths, downstream.clone(), cx);
1374 });
1375 }
1376 })
1377 .detach();
1378 }
1379 }
1380 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1381 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1382 else {
1383 return;
1384 };
1385 if !worktree.read(cx).is_visible() {
1386 log::debug!(
1387 "not adding repositories for local worktree {:?} because it's not visible",
1388 worktree.read(cx).abs_path()
1389 );
1390 return;
1391 }
1392 self.update_repositories_from_worktree(
1393 *worktree_id,
1394 project_environment.clone(),
1395 next_repository_id.clone(),
1396 downstream
1397 .as_ref()
1398 .map(|downstream| downstream.updates_tx.clone()),
1399 changed_repos.clone(),
1400 fs.clone(),
1401 cx,
1402 );
1403 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1404 }
1405 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1406 let repos_without_worktree: Vec<RepositoryId> = self
1407 .worktree_ids
1408 .iter_mut()
1409 .filter_map(|(repo_id, worktree_ids)| {
1410 worktree_ids.remove(worktree_id);
1411 if worktree_ids.is_empty() {
1412 Some(*repo_id)
1413 } else {
1414 None
1415 }
1416 })
1417 .collect();
1418 let is_active_repo_removed = repos_without_worktree
1419 .iter()
1420 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1421
1422 for repo_id in repos_without_worktree {
1423 self.repositories.remove(&repo_id);
1424 self.worktree_ids.remove(&repo_id);
1425 if let Some(updates_tx) =
1426 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1427 {
1428 updates_tx
1429 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1430 .ok();
1431 }
1432 }
1433
1434 if is_active_repo_removed {
1435 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1436 self.active_repo_id = Some(repo_id);
1437 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1438 } else {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 }
1443 }
1444 _ => {}
1445 }
1446 }
1447 fn on_repository_event(
1448 &mut self,
1449 repo: Entity<Repository>,
1450 event: &RepositoryEvent,
1451 cx: &mut Context<Self>,
1452 ) {
1453 let id = repo.read(cx).id;
1454 let repo_snapshot = repo.read(cx).snapshot.clone();
1455 for (buffer_id, diff) in self.diffs.iter() {
1456 if let Some((buffer_repo, repo_path)) =
1457 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1458 && buffer_repo == repo
1459 {
1460 diff.update(cx, |diff, cx| {
1461 if let Some(conflict_set) = &diff.conflict_set {
1462 let conflict_status_changed =
1463 conflict_set.update(cx, |conflict_set, cx| {
1464 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1465 conflict_set.set_has_conflict(has_conflict, cx)
1466 })?;
1467 if conflict_status_changed {
1468 let buffer_store = self.buffer_store.read(cx);
1469 if let Some(buffer) = buffer_store.get(*buffer_id) {
1470 let _ = diff
1471 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1472 }
1473 }
1474 }
1475 anyhow::Ok(())
1476 })
1477 .ok();
1478 }
1479 }
1480 cx.emit(GitStoreEvent::RepositoryUpdated(
1481 id,
1482 event.clone(),
1483 self.active_repo_id == Some(id),
1484 ))
1485 }
1486
1487 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1488 cx.emit(GitStoreEvent::JobsUpdated)
1489 }
1490
1491 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1492 fn update_repositories_from_worktree(
1493 &mut self,
1494 worktree_id: WorktreeId,
1495 project_environment: Entity<ProjectEnvironment>,
1496 next_repository_id: Arc<AtomicU64>,
1497 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1498 updated_git_repositories: UpdatedGitRepositoriesSet,
1499 fs: Arc<dyn Fs>,
1500 cx: &mut Context<Self>,
1501 ) {
1502 let mut removed_ids = Vec::new();
1503 for update in updated_git_repositories.iter() {
1504 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1505 let existing_work_directory_abs_path =
1506 repo.read(cx).work_directory_abs_path.clone();
1507 Some(&existing_work_directory_abs_path)
1508 == update.old_work_directory_abs_path.as_ref()
1509 || Some(&existing_work_directory_abs_path)
1510 == update.new_work_directory_abs_path.as_ref()
1511 }) {
1512 let repo_id = *id;
1513 if let Some(new_work_directory_abs_path) =
1514 update.new_work_directory_abs_path.clone()
1515 {
1516 self.worktree_ids
1517 .entry(repo_id)
1518 .or_insert_with(HashSet::new)
1519 .insert(worktree_id);
1520 existing.update(cx, |existing, cx| {
1521 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1522 existing.schedule_scan(updates_tx.clone(), cx);
1523 });
1524 } else {
1525 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1526 worktree_ids.remove(&worktree_id);
1527 if worktree_ids.is_empty() {
1528 removed_ids.push(repo_id);
1529 }
1530 }
1531 }
1532 } else if let UpdatedGitRepository {
1533 new_work_directory_abs_path: Some(work_directory_abs_path),
1534 dot_git_abs_path: Some(dot_git_abs_path),
1535 repository_dir_abs_path: Some(_repository_dir_abs_path),
1536 common_dir_abs_path: Some(common_dir_abs_path),
1537 ..
1538 } = update
1539 {
1540 let original_repo_abs_path: Arc<Path> =
1541 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1542 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1543 let is_trusted = TrustedWorktrees::try_get_global(cx)
1544 .map(|trusted_worktrees| {
1545 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1546 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1547 })
1548 })
1549 .unwrap_or(false);
1550 let git_store = cx.weak_entity();
1551 let repo = cx.new(|cx| {
1552 let mut repo = Repository::local(
1553 id,
1554 work_directory_abs_path.clone(),
1555 original_repo_abs_path.clone(),
1556 dot_git_abs_path.clone(),
1557 project_environment.downgrade(),
1558 fs.clone(),
1559 is_trusted,
1560 git_store,
1561 cx,
1562 );
1563 if let Some(updates_tx) = updates_tx.as_ref() {
1564 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1565 updates_tx
1566 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1567 .ok();
1568 }
1569 repo.schedule_scan(updates_tx.clone(), cx);
1570 repo
1571 });
1572 self._subscriptions
1573 .push(cx.subscribe(&repo, Self::on_repository_event));
1574 self._subscriptions
1575 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1576 self.repositories.insert(id, repo);
1577 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1578 cx.emit(GitStoreEvent::RepositoryAdded);
1579 self.active_repo_id.get_or_insert_with(|| {
1580 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1581 id
1582 });
1583 }
1584 }
1585
1586 for id in removed_ids {
1587 if self.active_repo_id == Some(id) {
1588 self.active_repo_id = None;
1589 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1590 }
1591 self.repositories.remove(&id);
1592 if let Some(updates_tx) = updates_tx.as_ref() {
1593 updates_tx
1594 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1595 .ok();
1596 }
1597 }
1598 }
1599
1600 fn on_trusted_worktrees_event(
1601 &mut self,
1602 _: Entity<TrustedWorktreesStore>,
1603 event: &TrustedWorktreesEvent,
1604 cx: &mut Context<Self>,
1605 ) {
1606 if !matches!(self.state, GitStoreState::Local { .. }) {
1607 return;
1608 }
1609
1610 let (is_trusted, event_paths) = match event {
1611 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1612 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1613 };
1614
1615 for (repo_id, worktree_ids) in &self.worktree_ids {
1616 if worktree_ids
1617 .iter()
1618 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1619 {
1620 if let Some(repo) = self.repositories.get(repo_id) {
1621 let repository_state = repo.read(cx).repository_state.clone();
1622 cx.background_spawn(async move {
1623 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1624 state.backend.set_trusted(is_trusted);
1625 }
1626 })
1627 .detach();
1628 }
1629 }
1630 }
1631 }
1632
1633 fn on_buffer_store_event(
1634 &mut self,
1635 _: Entity<BufferStore>,
1636 event: &BufferStoreEvent,
1637 cx: &mut Context<Self>,
1638 ) {
1639 match event {
1640 BufferStoreEvent::BufferAdded(buffer) => {
1641 cx.subscribe(buffer, |this, buffer, event, cx| {
1642 if let BufferEvent::LanguageChanged(_) = event {
1643 let buffer_id = buffer.read(cx).remote_id();
1644 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1645 diff_state.update(cx, |diff_state, cx| {
1646 diff_state.buffer_language_changed(buffer, cx);
1647 });
1648 }
1649 }
1650 })
1651 .detach();
1652 }
1653 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1654 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1655 diffs.remove(buffer_id);
1656 }
1657 }
1658 BufferStoreEvent::BufferDropped(buffer_id) => {
1659 self.diffs.remove(buffer_id);
1660 for diffs in self.shared_diffs.values_mut() {
1661 diffs.remove(buffer_id);
1662 }
1663 }
1664 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1665 // Whenever a buffer's file path changes, it's possible that the
1666 // new path is actually a path that is being tracked by a git
1667 // repository. In that case, we'll want to update the buffer's
1668 // `BufferDiffState`, in case it already has one.
1669 let buffer_id = buffer.read(cx).remote_id();
1670 let diff_state = self.diffs.get(&buffer_id);
1671 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1672
1673 if let Some(diff_state) = diff_state
1674 && let Some((repo, repo_path)) = repo
1675 {
1676 let buffer = buffer.clone();
1677 let diff_state = diff_state.clone();
1678
1679 cx.spawn(async move |_git_store, cx| {
1680 async {
1681 let diff_bases_change = repo
1682 .update(cx, |repo, cx| {
1683 repo.load_committed_text(buffer_id, repo_path, cx)
1684 })
1685 .await?;
1686
1687 diff_state.update(cx, |diff_state, cx| {
1688 let buffer_snapshot = buffer.read(cx).text_snapshot();
1689 diff_state.diff_bases_changed(
1690 buffer_snapshot,
1691 Some(diff_bases_change),
1692 cx,
1693 );
1694 });
1695 anyhow::Ok(())
1696 }
1697 .await
1698 .log_err();
1699 })
1700 .detach();
1701 }
1702 }
1703 }
1704 }
1705
1706 pub fn recalculate_buffer_diffs(
1707 &mut self,
1708 buffers: Vec<Entity<Buffer>>,
1709 cx: &mut Context<Self>,
1710 ) -> impl Future<Output = ()> + use<> {
1711 let mut futures = Vec::new();
1712 for buffer in buffers {
1713 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1714 let buffer = buffer.read(cx).text_snapshot();
1715 diff_state.update(cx, |diff_state, cx| {
1716 diff_state.recalculate_diffs(buffer.clone(), cx);
1717 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1718 });
1719 futures.push(diff_state.update(cx, |diff_state, cx| {
1720 diff_state
1721 .reparse_conflict_markers(buffer, cx)
1722 .map(|_| {})
1723 .boxed()
1724 }));
1725 }
1726 }
1727 async move {
1728 futures::future::join_all(futures).await;
1729 }
1730 }
1731
1732 fn on_buffer_diff_event(
1733 &mut self,
1734 diff: Entity<buffer_diff::BufferDiff>,
1735 event: &BufferDiffEvent,
1736 cx: &mut Context<Self>,
1737 ) {
1738 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1739 let buffer_id = diff.read(cx).buffer_id;
1740 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1741 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1742 diff_state.hunk_staging_operation_count += 1;
1743 diff_state.hunk_staging_operation_count
1744 });
1745 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1746 let recv = repo.update(cx, |repo, cx| {
1747 log::debug!("hunks changed for {}", path.as_unix_str());
1748 repo.spawn_set_index_text_job(
1749 path,
1750 new_index_text.as_ref().map(|rope| rope.to_string()),
1751 Some(hunk_staging_operation_count),
1752 cx,
1753 )
1754 });
1755 let diff = diff.downgrade();
1756 cx.spawn(async move |this, cx| {
1757 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1758 diff.update(cx, |diff, cx| {
1759 diff.clear_pending_hunks(cx);
1760 })
1761 .ok();
1762 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1763 .ok();
1764 }
1765 })
1766 .detach();
1767 }
1768 }
1769 }
1770 }
1771
1772 fn local_worktree_git_repos_changed(
1773 &mut self,
1774 worktree: Entity<Worktree>,
1775 changed_repos: &UpdatedGitRepositoriesSet,
1776 cx: &mut Context<Self>,
1777 ) {
1778 log::debug!("local worktree repos changed");
1779 debug_assert!(worktree.read(cx).is_local());
1780
1781 for repository in self.repositories.values() {
1782 repository.update(cx, |repository, cx| {
1783 let repo_abs_path = &repository.work_directory_abs_path;
1784 if changed_repos.iter().any(|update| {
1785 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1786 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1787 }) {
1788 repository.reload_buffer_diff_bases(cx);
1789 }
1790 });
1791 }
1792 }
1793
1794 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1795 &self.repositories
1796 }
1797
1798 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1799 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1800 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1801 Some(status.status)
1802 }
1803
1804 pub fn repository_and_path_for_buffer_id(
1805 &self,
1806 buffer_id: BufferId,
1807 cx: &App,
1808 ) -> Option<(Entity<Repository>, RepoPath)> {
1809 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1810 let project_path = buffer.read(cx).project_path(cx)?;
1811 self.repository_and_path_for_project_path(&project_path, cx)
1812 }
1813
1814 pub fn repository_and_path_for_project_path(
1815 &self,
1816 path: &ProjectPath,
1817 cx: &App,
1818 ) -> Option<(Entity<Repository>, RepoPath)> {
1819 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1820 self.repositories
1821 .values()
1822 .filter_map(|repo| {
1823 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1824 Some((repo.clone(), repo_path))
1825 })
1826 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1827 }
1828
1829 pub fn git_init(
1830 &self,
1831 path: Arc<Path>,
1832 fallback_branch_name: String,
1833 cx: &App,
1834 ) -> Task<Result<()>> {
1835 match &self.state {
1836 GitStoreState::Local { fs, .. } => {
1837 let fs = fs.clone();
1838 cx.background_executor()
1839 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1840 }
1841 GitStoreState::Remote {
1842 upstream_client,
1843 upstream_project_id: project_id,
1844 ..
1845 } => {
1846 let client = upstream_client.clone();
1847 let project_id = *project_id;
1848 cx.background_executor().spawn(async move {
1849 client
1850 .request(proto::GitInit {
1851 project_id: project_id,
1852 abs_path: path.to_string_lossy().into_owned(),
1853 fallback_branch_name,
1854 })
1855 .await?;
1856 Ok(())
1857 })
1858 }
1859 }
1860 }
1861
1862 pub fn git_clone(
1863 &self,
1864 repo: String,
1865 path: impl Into<Arc<std::path::Path>>,
1866 cx: &App,
1867 ) -> Task<Result<()>> {
1868 let path = path.into();
1869 match &self.state {
1870 GitStoreState::Local { fs, .. } => {
1871 let fs = fs.clone();
1872 cx.background_executor()
1873 .spawn(async move { fs.git_clone(&repo, &path).await })
1874 }
1875 GitStoreState::Remote {
1876 upstream_client,
1877 upstream_project_id,
1878 ..
1879 } => {
1880 if upstream_client.is_via_collab() {
1881 return Task::ready(Err(anyhow!(
1882 "Git Clone isn't supported for project guests"
1883 )));
1884 }
1885 let request = upstream_client.request(proto::GitClone {
1886 project_id: *upstream_project_id,
1887 abs_path: path.to_string_lossy().into_owned(),
1888 remote_repo: repo,
1889 });
1890
1891 cx.background_spawn(async move {
1892 let result = request.await?;
1893
1894 match result.success {
1895 true => Ok(()),
1896 false => Err(anyhow!("Git Clone failed")),
1897 }
1898 })
1899 }
1900 }
1901 }
1902
1903 async fn handle_update_repository(
1904 this: Entity<Self>,
1905 envelope: TypedEnvelope<proto::UpdateRepository>,
1906 mut cx: AsyncApp,
1907 ) -> Result<()> {
1908 this.update(&mut cx, |this, cx| {
1909 let path_style = this.worktree_store.read(cx).path_style();
1910 let mut update = envelope.payload;
1911
1912 let id = RepositoryId::from_proto(update.id);
1913 let client = this.upstream_client().context("no upstream client")?;
1914
1915 let original_repo_abs_path: Option<Arc<Path>> = update
1916 .original_repo_abs_path
1917 .as_deref()
1918 .map(|p| Path::new(p).into());
1919
1920 let mut repo_subscription = None;
1921 let repo = this.repositories.entry(id).or_insert_with(|| {
1922 let git_store = cx.weak_entity();
1923 let repo = cx.new(|cx| {
1924 Repository::remote(
1925 id,
1926 Path::new(&update.abs_path).into(),
1927 original_repo_abs_path.clone(),
1928 path_style,
1929 ProjectId(update.project_id),
1930 client,
1931 git_store,
1932 cx,
1933 )
1934 });
1935 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1936 cx.emit(GitStoreEvent::RepositoryAdded);
1937 repo
1938 });
1939 this._subscriptions.extend(repo_subscription);
1940
1941 repo.update(cx, {
1942 let update = update.clone();
1943 |repo, cx| repo.apply_remote_update(update, cx)
1944 })?;
1945
1946 this.active_repo_id.get_or_insert_with(|| {
1947 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1948 id
1949 });
1950
1951 if let Some((client, project_id)) = this.downstream_client() {
1952 update.project_id = project_id.to_proto();
1953 client.send(update).log_err();
1954 }
1955 Ok(())
1956 })
1957 }
1958
1959 async fn handle_remove_repository(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::RemoveRepository>,
1962 mut cx: AsyncApp,
1963 ) -> Result<()> {
1964 this.update(&mut cx, |this, cx| {
1965 let mut update = envelope.payload;
1966 let id = RepositoryId::from_proto(update.id);
1967 this.repositories.remove(&id);
1968 if let Some((client, project_id)) = this.downstream_client() {
1969 update.project_id = project_id.to_proto();
1970 client.send(update).log_err();
1971 }
1972 if this.active_repo_id == Some(id) {
1973 this.active_repo_id = None;
1974 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1975 }
1976 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1977 });
1978 Ok(())
1979 }
1980
1981 async fn handle_git_init(
1982 this: Entity<Self>,
1983 envelope: TypedEnvelope<proto::GitInit>,
1984 cx: AsyncApp,
1985 ) -> Result<proto::Ack> {
1986 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1987 let name = envelope.payload.fallback_branch_name;
1988 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1989 .await?;
1990
1991 Ok(proto::Ack {})
1992 }
1993
1994 async fn handle_git_clone(
1995 this: Entity<Self>,
1996 envelope: TypedEnvelope<proto::GitClone>,
1997 cx: AsyncApp,
1998 ) -> Result<proto::GitCloneResponse> {
1999 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2000 let repo_name = envelope.payload.remote_repo;
2001 let result = cx
2002 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2003 .await;
2004
2005 Ok(proto::GitCloneResponse {
2006 success: result.is_ok(),
2007 })
2008 }
2009
2010 async fn handle_fetch(
2011 this: Entity<Self>,
2012 envelope: TypedEnvelope<proto::Fetch>,
2013 mut cx: AsyncApp,
2014 ) -> Result<proto::RemoteMessageResponse> {
2015 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2016 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2017 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2018 let askpass_id = envelope.payload.askpass_id;
2019
2020 let askpass = make_remote_delegate(
2021 this,
2022 envelope.payload.project_id,
2023 repository_id,
2024 askpass_id,
2025 &mut cx,
2026 );
2027
2028 let remote_output = repository_handle
2029 .update(&mut cx, |repository_handle, cx| {
2030 repository_handle.fetch(fetch_options, askpass, cx)
2031 })
2032 .await??;
2033
2034 Ok(proto::RemoteMessageResponse {
2035 stdout: remote_output.stdout,
2036 stderr: remote_output.stderr,
2037 })
2038 }
2039
2040 async fn handle_push(
2041 this: Entity<Self>,
2042 envelope: TypedEnvelope<proto::Push>,
2043 mut cx: AsyncApp,
2044 ) -> Result<proto::RemoteMessageResponse> {
2045 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2046 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2047
2048 let askpass_id = envelope.payload.askpass_id;
2049 let askpass = make_remote_delegate(
2050 this,
2051 envelope.payload.project_id,
2052 repository_id,
2053 askpass_id,
2054 &mut cx,
2055 );
2056
2057 let options = envelope
2058 .payload
2059 .options
2060 .as_ref()
2061 .map(|_| match envelope.payload.options() {
2062 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2063 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2064 });
2065
2066 let branch_name = envelope.payload.branch_name.into();
2067 let remote_branch_name = envelope.payload.remote_branch_name.into();
2068 let remote_name = envelope.payload.remote_name.into();
2069
2070 let remote_output = repository_handle
2071 .update(&mut cx, |repository_handle, cx| {
2072 repository_handle.push(
2073 branch_name,
2074 remote_branch_name,
2075 remote_name,
2076 options,
2077 askpass,
2078 cx,
2079 )
2080 })
2081 .await??;
2082 Ok(proto::RemoteMessageResponse {
2083 stdout: remote_output.stdout,
2084 stderr: remote_output.stderr,
2085 })
2086 }
2087
2088 async fn handle_pull(
2089 this: Entity<Self>,
2090 envelope: TypedEnvelope<proto::Pull>,
2091 mut cx: AsyncApp,
2092 ) -> Result<proto::RemoteMessageResponse> {
2093 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2094 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2095 let askpass_id = envelope.payload.askpass_id;
2096 let askpass = make_remote_delegate(
2097 this,
2098 envelope.payload.project_id,
2099 repository_id,
2100 askpass_id,
2101 &mut cx,
2102 );
2103
2104 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2105 let remote_name = envelope.payload.remote_name.into();
2106 let rebase = envelope.payload.rebase;
2107
2108 let remote_message = repository_handle
2109 .update(&mut cx, |repository_handle, cx| {
2110 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2111 })
2112 .await??;
2113
2114 Ok(proto::RemoteMessageResponse {
2115 stdout: remote_message.stdout,
2116 stderr: remote_message.stderr,
2117 })
2118 }
2119
2120 async fn handle_stage(
2121 this: Entity<Self>,
2122 envelope: TypedEnvelope<proto::Stage>,
2123 mut cx: AsyncApp,
2124 ) -> Result<proto::Ack> {
2125 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2126 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2127
2128 let entries = envelope
2129 .payload
2130 .paths
2131 .into_iter()
2132 .map(|path| RepoPath::new(&path))
2133 .collect::<Result<Vec<_>>>()?;
2134
2135 repository_handle
2136 .update(&mut cx, |repository_handle, cx| {
2137 repository_handle.stage_entries(entries, cx)
2138 })
2139 .await?;
2140 Ok(proto::Ack {})
2141 }
2142
2143 async fn handle_unstage(
2144 this: Entity<Self>,
2145 envelope: TypedEnvelope<proto::Unstage>,
2146 mut cx: AsyncApp,
2147 ) -> Result<proto::Ack> {
2148 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2149 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2150
2151 let entries = envelope
2152 .payload
2153 .paths
2154 .into_iter()
2155 .map(|path| RepoPath::new(&path))
2156 .collect::<Result<Vec<_>>>()?;
2157
2158 repository_handle
2159 .update(&mut cx, |repository_handle, cx| {
2160 repository_handle.unstage_entries(entries, cx)
2161 })
2162 .await?;
2163
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_stash(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::Stash>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::Ack> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174
2175 let entries = envelope
2176 .payload
2177 .paths
2178 .into_iter()
2179 .map(|path| RepoPath::new(&path))
2180 .collect::<Result<Vec<_>>>()?;
2181
2182 repository_handle
2183 .update(&mut cx, |repository_handle, cx| {
2184 repository_handle.stash_entries(entries, cx)
2185 })
2186 .await?;
2187
2188 Ok(proto::Ack {})
2189 }
2190
2191 async fn handle_stash_pop(
2192 this: Entity<Self>,
2193 envelope: TypedEnvelope<proto::StashPop>,
2194 mut cx: AsyncApp,
2195 ) -> Result<proto::Ack> {
2196 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2197 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2198 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2199
2200 repository_handle
2201 .update(&mut cx, |repository_handle, cx| {
2202 repository_handle.stash_pop(stash_index, cx)
2203 })
2204 .await?;
2205
2206 Ok(proto::Ack {})
2207 }
2208
2209 async fn handle_stash_apply(
2210 this: Entity<Self>,
2211 envelope: TypedEnvelope<proto::StashApply>,
2212 mut cx: AsyncApp,
2213 ) -> Result<proto::Ack> {
2214 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2215 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2216 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2217
2218 repository_handle
2219 .update(&mut cx, |repository_handle, cx| {
2220 repository_handle.stash_apply(stash_index, cx)
2221 })
2222 .await?;
2223
2224 Ok(proto::Ack {})
2225 }
2226
2227 async fn handle_stash_drop(
2228 this: Entity<Self>,
2229 envelope: TypedEnvelope<proto::StashDrop>,
2230 mut cx: AsyncApp,
2231 ) -> Result<proto::Ack> {
2232 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2233 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2234 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2235
2236 repository_handle
2237 .update(&mut cx, |repository_handle, cx| {
2238 repository_handle.stash_drop(stash_index, cx)
2239 })
2240 .await??;
2241
2242 Ok(proto::Ack {})
2243 }
2244
2245 async fn handle_set_index_text(
2246 this: Entity<Self>,
2247 envelope: TypedEnvelope<proto::SetIndexText>,
2248 mut cx: AsyncApp,
2249 ) -> Result<proto::Ack> {
2250 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2251 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2252 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2253
2254 repository_handle
2255 .update(&mut cx, |repository_handle, cx| {
2256 repository_handle.spawn_set_index_text_job(
2257 repo_path,
2258 envelope.payload.text,
2259 None,
2260 cx,
2261 )
2262 })
2263 .await??;
2264 Ok(proto::Ack {})
2265 }
2266
2267 async fn handle_run_hook(
2268 this: Entity<Self>,
2269 envelope: TypedEnvelope<proto::RunGitHook>,
2270 mut cx: AsyncApp,
2271 ) -> Result<proto::Ack> {
2272 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2273 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2274 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2275 repository_handle
2276 .update(&mut cx, |repository_handle, cx| {
2277 repository_handle.run_hook(hook, cx)
2278 })
2279 .await??;
2280 Ok(proto::Ack {})
2281 }
2282
2283 async fn handle_commit(
2284 this: Entity<Self>,
2285 envelope: TypedEnvelope<proto::Commit>,
2286 mut cx: AsyncApp,
2287 ) -> Result<proto::Ack> {
2288 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2289 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2290 let askpass_id = envelope.payload.askpass_id;
2291
2292 let askpass = make_remote_delegate(
2293 this,
2294 envelope.payload.project_id,
2295 repository_id,
2296 askpass_id,
2297 &mut cx,
2298 );
2299
2300 let message = SharedString::from(envelope.payload.message);
2301 let name = envelope.payload.name.map(SharedString::from);
2302 let email = envelope.payload.email.map(SharedString::from);
2303 let options = envelope.payload.options.unwrap_or_default();
2304
2305 repository_handle
2306 .update(&mut cx, |repository_handle, cx| {
2307 repository_handle.commit(
2308 message,
2309 name.zip(email),
2310 CommitOptions {
2311 amend: options.amend,
2312 signoff: options.signoff,
2313 },
2314 askpass,
2315 cx,
2316 )
2317 })
2318 .await??;
2319 Ok(proto::Ack {})
2320 }
2321
2322 async fn handle_get_remotes(
2323 this: Entity<Self>,
2324 envelope: TypedEnvelope<proto::GetRemotes>,
2325 mut cx: AsyncApp,
2326 ) -> Result<proto::GetRemotesResponse> {
2327 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2328 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2329
2330 let branch_name = envelope.payload.branch_name;
2331 let is_push = envelope.payload.is_push;
2332
2333 let remotes = repository_handle
2334 .update(&mut cx, |repository_handle, _| {
2335 repository_handle.get_remotes(branch_name, is_push)
2336 })
2337 .await??;
2338
2339 Ok(proto::GetRemotesResponse {
2340 remotes: remotes
2341 .into_iter()
2342 .map(|remotes| proto::get_remotes_response::Remote {
2343 name: remotes.name.to_string(),
2344 })
2345 .collect::<Vec<_>>(),
2346 })
2347 }
2348
2349 async fn handle_get_worktrees(
2350 this: Entity<Self>,
2351 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2352 mut cx: AsyncApp,
2353 ) -> Result<proto::GitWorktreesResponse> {
2354 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2355 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2356
2357 let worktrees = repository_handle
2358 .update(&mut cx, |repository_handle, _| {
2359 repository_handle.worktrees()
2360 })
2361 .await??;
2362
2363 Ok(proto::GitWorktreesResponse {
2364 worktrees: worktrees
2365 .into_iter()
2366 .map(|worktree| worktree_to_proto(&worktree))
2367 .collect::<Vec<_>>(),
2368 })
2369 }
2370
2371 async fn handle_create_worktree(
2372 this: Entity<Self>,
2373 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2374 mut cx: AsyncApp,
2375 ) -> Result<proto::Ack> {
2376 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2377 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2378 let directory = PathBuf::from(envelope.payload.directory);
2379 let name = envelope.payload.name;
2380 let commit = envelope.payload.commit;
2381
2382 repository_handle
2383 .update(&mut cx, |repository_handle, _| {
2384 repository_handle.create_worktree(name, directory, commit)
2385 })
2386 .await??;
2387
2388 Ok(proto::Ack {})
2389 }
2390
2391 async fn handle_remove_worktree(
2392 this: Entity<Self>,
2393 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2394 mut cx: AsyncApp,
2395 ) -> Result<proto::Ack> {
2396 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2397 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2398 let path = PathBuf::from(envelope.payload.path);
2399 let force = envelope.payload.force;
2400
2401 repository_handle
2402 .update(&mut cx, |repository_handle, _| {
2403 repository_handle.remove_worktree(path, force)
2404 })
2405 .await??;
2406
2407 Ok(proto::Ack {})
2408 }
2409
2410 async fn handle_rename_worktree(
2411 this: Entity<Self>,
2412 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2413 mut cx: AsyncApp,
2414 ) -> Result<proto::Ack> {
2415 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2416 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2417 let old_path = PathBuf::from(envelope.payload.old_path);
2418 let new_path = PathBuf::from(envelope.payload.new_path);
2419
2420 repository_handle
2421 .update(&mut cx, |repository_handle, _| {
2422 repository_handle.rename_worktree(old_path, new_path)
2423 })
2424 .await??;
2425
2426 Ok(proto::Ack {})
2427 }
2428
2429 async fn handle_get_branches(
2430 this: Entity<Self>,
2431 envelope: TypedEnvelope<proto::GitGetBranches>,
2432 mut cx: AsyncApp,
2433 ) -> Result<proto::GitBranchesResponse> {
2434 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2435 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2436
2437 let branches = repository_handle
2438 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2439 .await??;
2440
2441 Ok(proto::GitBranchesResponse {
2442 branches: branches
2443 .into_iter()
2444 .map(|branch| branch_to_proto(&branch))
2445 .collect::<Vec<_>>(),
2446 })
2447 }
2448 async fn handle_get_default_branch(
2449 this: Entity<Self>,
2450 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2451 mut cx: AsyncApp,
2452 ) -> Result<proto::GetDefaultBranchResponse> {
2453 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2454 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2455
2456 let branch = repository_handle
2457 .update(&mut cx, |repository_handle, _| {
2458 repository_handle.default_branch(false)
2459 })
2460 .await??
2461 .map(Into::into);
2462
2463 Ok(proto::GetDefaultBranchResponse { branch })
2464 }
2465 async fn handle_create_branch(
2466 this: Entity<Self>,
2467 envelope: TypedEnvelope<proto::GitCreateBranch>,
2468 mut cx: AsyncApp,
2469 ) -> Result<proto::Ack> {
2470 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2471 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2472 let branch_name = envelope.payload.branch_name;
2473
2474 repository_handle
2475 .update(&mut cx, |repository_handle, _| {
2476 repository_handle.create_branch(branch_name, None)
2477 })
2478 .await??;
2479
2480 Ok(proto::Ack {})
2481 }
2482
2483 async fn handle_change_branch(
2484 this: Entity<Self>,
2485 envelope: TypedEnvelope<proto::GitChangeBranch>,
2486 mut cx: AsyncApp,
2487 ) -> Result<proto::Ack> {
2488 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2489 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2490 let branch_name = envelope.payload.branch_name;
2491
2492 repository_handle
2493 .update(&mut cx, |repository_handle, _| {
2494 repository_handle.change_branch(branch_name)
2495 })
2496 .await??;
2497
2498 Ok(proto::Ack {})
2499 }
2500
2501 async fn handle_rename_branch(
2502 this: Entity<Self>,
2503 envelope: TypedEnvelope<proto::GitRenameBranch>,
2504 mut cx: AsyncApp,
2505 ) -> Result<proto::Ack> {
2506 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2507 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2508 let branch = envelope.payload.branch;
2509 let new_name = envelope.payload.new_name;
2510
2511 repository_handle
2512 .update(&mut cx, |repository_handle, _| {
2513 repository_handle.rename_branch(branch, new_name)
2514 })
2515 .await??;
2516
2517 Ok(proto::Ack {})
2518 }
2519
2520 async fn handle_create_remote(
2521 this: Entity<Self>,
2522 envelope: TypedEnvelope<proto::GitCreateRemote>,
2523 mut cx: AsyncApp,
2524 ) -> Result<proto::Ack> {
2525 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2526 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2527 let remote_name = envelope.payload.remote_name;
2528 let remote_url = envelope.payload.remote_url;
2529
2530 repository_handle
2531 .update(&mut cx, |repository_handle, _| {
2532 repository_handle.create_remote(remote_name, remote_url)
2533 })
2534 .await??;
2535
2536 Ok(proto::Ack {})
2537 }
2538
2539 async fn handle_delete_branch(
2540 this: Entity<Self>,
2541 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2542 mut cx: AsyncApp,
2543 ) -> Result<proto::Ack> {
2544 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2545 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2546 let is_remote = envelope.payload.is_remote;
2547 let branch_name = envelope.payload.branch_name;
2548
2549 repository_handle
2550 .update(&mut cx, |repository_handle, _| {
2551 repository_handle.delete_branch(is_remote, branch_name)
2552 })
2553 .await??;
2554
2555 Ok(proto::Ack {})
2556 }
2557
2558 async fn handle_remove_remote(
2559 this: Entity<Self>,
2560 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2561 mut cx: AsyncApp,
2562 ) -> Result<proto::Ack> {
2563 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2564 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2565 let remote_name = envelope.payload.remote_name;
2566
2567 repository_handle
2568 .update(&mut cx, |repository_handle, _| {
2569 repository_handle.remove_remote(remote_name)
2570 })
2571 .await??;
2572
2573 Ok(proto::Ack {})
2574 }
2575
2576 async fn handle_show(
2577 this: Entity<Self>,
2578 envelope: TypedEnvelope<proto::GitShow>,
2579 mut cx: AsyncApp,
2580 ) -> Result<proto::GitCommitDetails> {
2581 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2582 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2583
2584 let commit = repository_handle
2585 .update(&mut cx, |repository_handle, _| {
2586 repository_handle.show(envelope.payload.commit)
2587 })
2588 .await??;
2589 Ok(proto::GitCommitDetails {
2590 sha: commit.sha.into(),
2591 message: commit.message.into(),
2592 commit_timestamp: commit.commit_timestamp,
2593 author_email: commit.author_email.into(),
2594 author_name: commit.author_name.into(),
2595 })
2596 }
2597
2598 async fn handle_load_commit_diff(
2599 this: Entity<Self>,
2600 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2601 mut cx: AsyncApp,
2602 ) -> Result<proto::LoadCommitDiffResponse> {
2603 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2604 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2605
2606 let commit_diff = repository_handle
2607 .update(&mut cx, |repository_handle, _| {
2608 repository_handle.load_commit_diff(envelope.payload.commit)
2609 })
2610 .await??;
2611 Ok(proto::LoadCommitDiffResponse {
2612 files: commit_diff
2613 .files
2614 .into_iter()
2615 .map(|file| proto::CommitFile {
2616 path: file.path.to_proto(),
2617 old_text: file.old_text,
2618 new_text: file.new_text,
2619 is_binary: file.is_binary,
2620 })
2621 .collect(),
2622 })
2623 }
2624
2625 async fn handle_file_history(
2626 this: Entity<Self>,
2627 envelope: TypedEnvelope<proto::GitFileHistory>,
2628 mut cx: AsyncApp,
2629 ) -> Result<proto::GitFileHistoryResponse> {
2630 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2631 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2632 let path = RepoPath::from_proto(&envelope.payload.path)?;
2633 let skip = envelope.payload.skip as usize;
2634 let limit = envelope.payload.limit.map(|l| l as usize);
2635
2636 let file_history = repository_handle
2637 .update(&mut cx, |repository_handle, _| {
2638 repository_handle.file_history_paginated(path, skip, limit)
2639 })
2640 .await??;
2641
2642 Ok(proto::GitFileHistoryResponse {
2643 entries: file_history
2644 .entries
2645 .into_iter()
2646 .map(|entry| proto::FileHistoryEntry {
2647 sha: entry.sha.to_string(),
2648 subject: entry.subject.to_string(),
2649 message: entry.message.to_string(),
2650 commit_timestamp: entry.commit_timestamp,
2651 author_name: entry.author_name.to_string(),
2652 author_email: entry.author_email.to_string(),
2653 })
2654 .collect(),
2655 path: file_history.path.to_proto(),
2656 })
2657 }
2658
2659 async fn handle_reset(
2660 this: Entity<Self>,
2661 envelope: TypedEnvelope<proto::GitReset>,
2662 mut cx: AsyncApp,
2663 ) -> Result<proto::Ack> {
2664 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2665 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2666
2667 let mode = match envelope.payload.mode() {
2668 git_reset::ResetMode::Soft => ResetMode::Soft,
2669 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2670 };
2671
2672 repository_handle
2673 .update(&mut cx, |repository_handle, cx| {
2674 repository_handle.reset(envelope.payload.commit, mode, cx)
2675 })
2676 .await??;
2677 Ok(proto::Ack {})
2678 }
2679
2680 async fn handle_checkout_files(
2681 this: Entity<Self>,
2682 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2683 mut cx: AsyncApp,
2684 ) -> Result<proto::Ack> {
2685 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2686 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2687 let paths = envelope
2688 .payload
2689 .paths
2690 .iter()
2691 .map(|s| RepoPath::from_proto(s))
2692 .collect::<Result<Vec<_>>>()?;
2693
2694 repository_handle
2695 .update(&mut cx, |repository_handle, cx| {
2696 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2697 })
2698 .await?;
2699 Ok(proto::Ack {})
2700 }
2701
2702 async fn handle_open_commit_message_buffer(
2703 this: Entity<Self>,
2704 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2705 mut cx: AsyncApp,
2706 ) -> Result<proto::OpenBufferResponse> {
2707 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2708 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2709 let buffer = repository
2710 .update(&mut cx, |repository, cx| {
2711 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2712 })
2713 .await?;
2714
2715 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2716 this.update(&mut cx, |this, cx| {
2717 this.buffer_store.update(cx, |buffer_store, cx| {
2718 buffer_store
2719 .create_buffer_for_peer(
2720 &buffer,
2721 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2722 cx,
2723 )
2724 .detach_and_log_err(cx);
2725 })
2726 });
2727
2728 Ok(proto::OpenBufferResponse {
2729 buffer_id: buffer_id.to_proto(),
2730 })
2731 }
2732
2733 async fn handle_askpass(
2734 this: Entity<Self>,
2735 envelope: TypedEnvelope<proto::AskPassRequest>,
2736 mut cx: AsyncApp,
2737 ) -> Result<proto::AskPassResponse> {
2738 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2739 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2740
2741 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2742 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2743 debug_panic!("no askpass found");
2744 anyhow::bail!("no askpass found");
2745 };
2746
2747 let response = askpass
2748 .ask_password(envelope.payload.prompt)
2749 .await
2750 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2751
2752 delegates
2753 .lock()
2754 .insert(envelope.payload.askpass_id, askpass);
2755
2756 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2757 Ok(proto::AskPassResponse {
2758 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2759 })
2760 }
2761
2762 async fn handle_check_for_pushed_commits(
2763 this: Entity<Self>,
2764 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2765 mut cx: AsyncApp,
2766 ) -> Result<proto::CheckForPushedCommitsResponse> {
2767 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2768 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2769
2770 let branches = repository_handle
2771 .update(&mut cx, |repository_handle, _| {
2772 repository_handle.check_for_pushed_commits()
2773 })
2774 .await??;
2775 Ok(proto::CheckForPushedCommitsResponse {
2776 pushed_to: branches
2777 .into_iter()
2778 .map(|commit| commit.to_string())
2779 .collect(),
2780 })
2781 }
2782
2783 async fn handle_git_diff(
2784 this: Entity<Self>,
2785 envelope: TypedEnvelope<proto::GitDiff>,
2786 mut cx: AsyncApp,
2787 ) -> Result<proto::GitDiffResponse> {
2788 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2789 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2790 let diff_type = match envelope.payload.diff_type() {
2791 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2792 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2793 proto::git_diff::DiffType::MergeBase => {
2794 let base_ref = envelope
2795 .payload
2796 .merge_base_ref
2797 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2798 DiffType::MergeBase {
2799 base_ref: base_ref.into(),
2800 }
2801 }
2802 };
2803
2804 let mut diff = repository_handle
2805 .update(&mut cx, |repository_handle, cx| {
2806 repository_handle.diff(diff_type, cx)
2807 })
2808 .await??;
2809 const ONE_MB: usize = 1_000_000;
2810 if diff.len() > ONE_MB {
2811 diff = diff.chars().take(ONE_MB).collect()
2812 }
2813
2814 Ok(proto::GitDiffResponse { diff })
2815 }
2816
2817 async fn handle_tree_diff(
2818 this: Entity<Self>,
2819 request: TypedEnvelope<proto::GetTreeDiff>,
2820 mut cx: AsyncApp,
2821 ) -> Result<proto::GetTreeDiffResponse> {
2822 let repository_id = RepositoryId(request.payload.repository_id);
2823 let diff_type = if request.payload.is_merge {
2824 DiffTreeType::MergeBase {
2825 base: request.payload.base.into(),
2826 head: request.payload.head.into(),
2827 }
2828 } else {
2829 DiffTreeType::Since {
2830 base: request.payload.base.into(),
2831 head: request.payload.head.into(),
2832 }
2833 };
2834
2835 let diff = this
2836 .update(&mut cx, |this, cx| {
2837 let repository = this.repositories().get(&repository_id)?;
2838 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2839 })
2840 .context("missing repository")?
2841 .await??;
2842
2843 Ok(proto::GetTreeDiffResponse {
2844 entries: diff
2845 .entries
2846 .into_iter()
2847 .map(|(path, status)| proto::TreeDiffStatus {
2848 path: path.as_ref().to_proto(),
2849 status: match status {
2850 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2851 TreeDiffStatus::Modified { .. } => {
2852 proto::tree_diff_status::Status::Modified.into()
2853 }
2854 TreeDiffStatus::Deleted { .. } => {
2855 proto::tree_diff_status::Status::Deleted.into()
2856 }
2857 },
2858 oid: match status {
2859 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2860 Some(old.to_string())
2861 }
2862 TreeDiffStatus::Added => None,
2863 },
2864 })
2865 .collect(),
2866 })
2867 }
2868
2869 async fn handle_get_blob_content(
2870 this: Entity<Self>,
2871 request: TypedEnvelope<proto::GetBlobContent>,
2872 mut cx: AsyncApp,
2873 ) -> Result<proto::GetBlobContentResponse> {
2874 let oid = git::Oid::from_str(&request.payload.oid)?;
2875 let repository_id = RepositoryId(request.payload.repository_id);
2876 let content = this
2877 .update(&mut cx, |this, cx| {
2878 let repository = this.repositories().get(&repository_id)?;
2879 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2880 })
2881 .context("missing repository")?
2882 .await?;
2883 Ok(proto::GetBlobContentResponse { content })
2884 }
2885
2886 async fn handle_open_unstaged_diff(
2887 this: Entity<Self>,
2888 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2889 mut cx: AsyncApp,
2890 ) -> Result<proto::OpenUnstagedDiffResponse> {
2891 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2892 let diff = this
2893 .update(&mut cx, |this, cx| {
2894 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2895 Some(this.open_unstaged_diff(buffer, cx))
2896 })
2897 .context("missing buffer")?
2898 .await?;
2899 this.update(&mut cx, |this, _| {
2900 let shared_diffs = this
2901 .shared_diffs
2902 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2903 .or_default();
2904 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2905 });
2906 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2907 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2908 }
2909
2910 async fn handle_open_uncommitted_diff(
2911 this: Entity<Self>,
2912 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2913 mut cx: AsyncApp,
2914 ) -> Result<proto::OpenUncommittedDiffResponse> {
2915 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2916 let diff = this
2917 .update(&mut cx, |this, cx| {
2918 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2919 Some(this.open_uncommitted_diff(buffer, cx))
2920 })
2921 .context("missing buffer")?
2922 .await?;
2923 this.update(&mut cx, |this, _| {
2924 let shared_diffs = this
2925 .shared_diffs
2926 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2927 .or_default();
2928 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2929 });
2930 Ok(diff.read_with(&cx, |diff, cx| {
2931 use proto::open_uncommitted_diff_response::Mode;
2932
2933 let unstaged_diff = diff.secondary_diff();
2934 let index_snapshot = unstaged_diff.and_then(|diff| {
2935 let diff = diff.read(cx);
2936 diff.base_text_exists().then(|| diff.base_text(cx))
2937 });
2938
2939 let mode;
2940 let staged_text;
2941 let committed_text;
2942 if diff.base_text_exists() {
2943 let committed_snapshot = diff.base_text(cx);
2944 committed_text = Some(committed_snapshot.text());
2945 if let Some(index_text) = index_snapshot {
2946 if index_text.remote_id() == committed_snapshot.remote_id() {
2947 mode = Mode::IndexMatchesHead;
2948 staged_text = None;
2949 } else {
2950 mode = Mode::IndexAndHead;
2951 staged_text = Some(index_text.text());
2952 }
2953 } else {
2954 mode = Mode::IndexAndHead;
2955 staged_text = None;
2956 }
2957 } else {
2958 mode = Mode::IndexAndHead;
2959 committed_text = None;
2960 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2961 }
2962
2963 proto::OpenUncommittedDiffResponse {
2964 committed_text,
2965 staged_text,
2966 mode: mode.into(),
2967 }
2968 }))
2969 }
2970
2971 async fn handle_update_diff_bases(
2972 this: Entity<Self>,
2973 request: TypedEnvelope<proto::UpdateDiffBases>,
2974 mut cx: AsyncApp,
2975 ) -> Result<()> {
2976 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2977 this.update(&mut cx, |this, cx| {
2978 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2979 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2980 {
2981 let buffer = buffer.read(cx).text_snapshot();
2982 diff_state.update(cx, |diff_state, cx| {
2983 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2984 })
2985 }
2986 });
2987 Ok(())
2988 }
2989
2990 async fn handle_blame_buffer(
2991 this: Entity<Self>,
2992 envelope: TypedEnvelope<proto::BlameBuffer>,
2993 mut cx: AsyncApp,
2994 ) -> Result<proto::BlameBufferResponse> {
2995 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2996 let version = deserialize_version(&envelope.payload.version);
2997 let buffer = this.read_with(&cx, |this, cx| {
2998 this.buffer_store.read(cx).get_existing(buffer_id)
2999 })?;
3000 buffer
3001 .update(&mut cx, |buffer, _| {
3002 buffer.wait_for_version(version.clone())
3003 })
3004 .await?;
3005 let blame = this
3006 .update(&mut cx, |this, cx| {
3007 this.blame_buffer(&buffer, Some(version), cx)
3008 })
3009 .await?;
3010 Ok(serialize_blame_buffer_response(blame))
3011 }
3012
3013 async fn handle_get_permalink_to_line(
3014 this: Entity<Self>,
3015 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3016 mut cx: AsyncApp,
3017 ) -> Result<proto::GetPermalinkToLineResponse> {
3018 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3019 // let version = deserialize_version(&envelope.payload.version);
3020 let selection = {
3021 let proto_selection = envelope
3022 .payload
3023 .selection
3024 .context("no selection to get permalink for defined")?;
3025 proto_selection.start as u32..proto_selection.end as u32
3026 };
3027 let buffer = this.read_with(&cx, |this, cx| {
3028 this.buffer_store.read(cx).get_existing(buffer_id)
3029 })?;
3030 let permalink = this
3031 .update(&mut cx, |this, cx| {
3032 this.get_permalink_to_line(&buffer, selection, cx)
3033 })
3034 .await?;
3035 Ok(proto::GetPermalinkToLineResponse {
3036 permalink: permalink.to_string(),
3037 })
3038 }
3039
3040 fn repository_for_request(
3041 this: &Entity<Self>,
3042 id: RepositoryId,
3043 cx: &mut AsyncApp,
3044 ) -> Result<Entity<Repository>> {
3045 this.read_with(cx, |this, _| {
3046 this.repositories
3047 .get(&id)
3048 .context("missing repository handle")
3049 .cloned()
3050 })
3051 }
3052
3053 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3054 self.repositories
3055 .iter()
3056 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3057 .collect()
3058 }
3059
3060 fn process_updated_entries(
3061 &self,
3062 worktree: &Entity<Worktree>,
3063 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3064 cx: &mut App,
3065 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3066 let path_style = worktree.read(cx).path_style();
3067 let mut repo_paths = self
3068 .repositories
3069 .values()
3070 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3071 .collect::<Vec<_>>();
3072 let mut entries: Vec<_> = updated_entries
3073 .iter()
3074 .map(|(path, _, _)| path.clone())
3075 .collect();
3076 entries.sort();
3077 let worktree = worktree.read(cx);
3078
3079 let entries = entries
3080 .into_iter()
3081 .map(|path| worktree.absolutize(&path))
3082 .collect::<Arc<[_]>>();
3083
3084 let executor = cx.background_executor().clone();
3085 cx.background_executor().spawn(async move {
3086 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3087 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3088 let mut tasks = FuturesOrdered::new();
3089 for (repo_path, repo) in repo_paths.into_iter().rev() {
3090 let entries = entries.clone();
3091 let task = executor.spawn(async move {
3092 // Find all repository paths that belong to this repo
3093 let mut ix = entries.partition_point(|path| path < &*repo_path);
3094 if ix == entries.len() {
3095 return None;
3096 };
3097
3098 let mut paths = Vec::new();
3099 // All paths prefixed by a given repo will constitute a continuous range.
3100 while let Some(path) = entries.get(ix)
3101 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3102 &repo_path, path, path_style,
3103 )
3104 {
3105 paths.push((repo_path, ix));
3106 ix += 1;
3107 }
3108 if paths.is_empty() {
3109 None
3110 } else {
3111 Some((repo, paths))
3112 }
3113 });
3114 tasks.push_back(task);
3115 }
3116
3117 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3118 let mut path_was_used = vec![false; entries.len()];
3119 let tasks = tasks.collect::<Vec<_>>().await;
3120 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3121 // We always want to assign a path to it's innermost repository.
3122 for t in tasks {
3123 let Some((repo, paths)) = t else {
3124 continue;
3125 };
3126 let entry = paths_by_git_repo.entry(repo).or_default();
3127 for (repo_path, ix) in paths {
3128 if path_was_used[ix] {
3129 continue;
3130 }
3131 path_was_used[ix] = true;
3132 entry.push(repo_path);
3133 }
3134 }
3135
3136 paths_by_git_repo
3137 })
3138 }
3139}
3140
3141impl BufferGitState {
3142 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3143 Self {
3144 unstaged_diff: Default::default(),
3145 uncommitted_diff: Default::default(),
3146 oid_diffs: Default::default(),
3147 recalculate_diff_task: Default::default(),
3148 language: Default::default(),
3149 language_registry: Default::default(),
3150 recalculating_tx: postage::watch::channel_with(false).0,
3151 hunk_staging_operation_count: 0,
3152 hunk_staging_operation_count_as_of_write: 0,
3153 head_text: Default::default(),
3154 index_text: Default::default(),
3155 oid_texts: Default::default(),
3156 head_changed: Default::default(),
3157 index_changed: Default::default(),
3158 language_changed: Default::default(),
3159 conflict_updated_futures: Default::default(),
3160 conflict_set: Default::default(),
3161 reparse_conflict_markers_task: Default::default(),
3162 }
3163 }
3164
3165 #[ztracing::instrument(skip_all)]
3166 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3167 self.language = buffer.read(cx).language().cloned();
3168 self.language_changed = true;
3169 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3170 }
3171
3172 fn reparse_conflict_markers(
3173 &mut self,
3174 buffer: text::BufferSnapshot,
3175 cx: &mut Context<Self>,
3176 ) -> oneshot::Receiver<()> {
3177 let (tx, rx) = oneshot::channel();
3178
3179 let Some(conflict_set) = self
3180 .conflict_set
3181 .as_ref()
3182 .and_then(|conflict_set| conflict_set.upgrade())
3183 else {
3184 return rx;
3185 };
3186
3187 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3188 if conflict_set.has_conflict {
3189 Some(conflict_set.snapshot())
3190 } else {
3191 None
3192 }
3193 });
3194
3195 if let Some(old_snapshot) = old_snapshot {
3196 self.conflict_updated_futures.push(tx);
3197 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3198 let (snapshot, changed_range) = cx
3199 .background_spawn(async move {
3200 let new_snapshot = ConflictSet::parse(&buffer);
3201 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3202 (new_snapshot, changed_range)
3203 })
3204 .await;
3205 this.update(cx, |this, cx| {
3206 if let Some(conflict_set) = &this.conflict_set {
3207 conflict_set
3208 .update(cx, |conflict_set, cx| {
3209 conflict_set.set_snapshot(snapshot, changed_range, cx);
3210 })
3211 .ok();
3212 }
3213 let futures = std::mem::take(&mut this.conflict_updated_futures);
3214 for tx in futures {
3215 tx.send(()).ok();
3216 }
3217 })
3218 }))
3219 }
3220
3221 rx
3222 }
3223
3224 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3225 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3226 }
3227
3228 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3229 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3230 }
3231
3232 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3233 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3234 }
3235
3236 fn handle_base_texts_updated(
3237 &mut self,
3238 buffer: text::BufferSnapshot,
3239 message: proto::UpdateDiffBases,
3240 cx: &mut Context<Self>,
3241 ) {
3242 use proto::update_diff_bases::Mode;
3243
3244 let Some(mode) = Mode::from_i32(message.mode) else {
3245 return;
3246 };
3247
3248 let diff_bases_change = match mode {
3249 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3250 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3251 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3252 Mode::IndexAndHead => DiffBasesChange::SetEach {
3253 index: message.staged_text,
3254 head: message.committed_text,
3255 },
3256 };
3257
3258 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3259 }
3260
3261 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3262 if *self.recalculating_tx.borrow() {
3263 let mut rx = self.recalculating_tx.subscribe();
3264 Some(async move {
3265 loop {
3266 let is_recalculating = rx.recv().await;
3267 if is_recalculating != Some(true) {
3268 break;
3269 }
3270 }
3271 })
3272 } else {
3273 None
3274 }
3275 }
3276
3277 fn diff_bases_changed(
3278 &mut self,
3279 buffer: text::BufferSnapshot,
3280 diff_bases_change: Option<DiffBasesChange>,
3281 cx: &mut Context<Self>,
3282 ) {
3283 match diff_bases_change {
3284 Some(DiffBasesChange::SetIndex(index)) => {
3285 self.index_text = index.map(|mut index| {
3286 text::LineEnding::normalize(&mut index);
3287 Arc::from(index.as_str())
3288 });
3289 self.index_changed = true;
3290 }
3291 Some(DiffBasesChange::SetHead(head)) => {
3292 self.head_text = head.map(|mut head| {
3293 text::LineEnding::normalize(&mut head);
3294 Arc::from(head.as_str())
3295 });
3296 self.head_changed = true;
3297 }
3298 Some(DiffBasesChange::SetBoth(text)) => {
3299 let text = text.map(|mut text| {
3300 text::LineEnding::normalize(&mut text);
3301 Arc::from(text.as_str())
3302 });
3303 self.head_text = text.clone();
3304 self.index_text = text;
3305 self.head_changed = true;
3306 self.index_changed = true;
3307 }
3308 Some(DiffBasesChange::SetEach { index, head }) => {
3309 self.index_text = index.map(|mut index| {
3310 text::LineEnding::normalize(&mut index);
3311 Arc::from(index.as_str())
3312 });
3313 self.index_changed = true;
3314 self.head_text = head.map(|mut head| {
3315 text::LineEnding::normalize(&mut head);
3316 Arc::from(head.as_str())
3317 });
3318 self.head_changed = true;
3319 }
3320 None => {}
3321 }
3322
3323 self.recalculate_diffs(buffer, cx)
3324 }
3325
3326 #[ztracing::instrument(skip_all)]
3327 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3328 *self.recalculating_tx.borrow_mut() = true;
3329
3330 let language = self.language.clone();
3331 let language_registry = self.language_registry.clone();
3332 let unstaged_diff = self.unstaged_diff();
3333 let uncommitted_diff = self.uncommitted_diff();
3334 let head = self.head_text.clone();
3335 let index = self.index_text.clone();
3336 let index_changed = self.index_changed;
3337 let head_changed = self.head_changed;
3338 let language_changed = self.language_changed;
3339 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3340 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3341 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3342 (None, None) => true,
3343 _ => false,
3344 };
3345
3346 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3347 .oid_diffs
3348 .iter()
3349 .filter_map(|(oid, weak)| {
3350 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3351 weak.upgrade().map(|diff| (*oid, diff, base_text))
3352 })
3353 .collect();
3354
3355 self.oid_diffs.retain(|oid, weak| {
3356 let alive = weak.upgrade().is_some();
3357 if !alive {
3358 if let Some(oid) = oid {
3359 self.oid_texts.remove(oid);
3360 }
3361 }
3362 alive
3363 });
3364 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3365 log::debug!(
3366 "start recalculating diffs for buffer {}",
3367 buffer.remote_id()
3368 );
3369
3370 let mut new_unstaged_diff = None;
3371 if let Some(unstaged_diff) = &unstaged_diff {
3372 new_unstaged_diff = Some(
3373 cx.update(|cx| {
3374 unstaged_diff.read(cx).update_diff(
3375 buffer.clone(),
3376 index,
3377 index_changed.then_some(false),
3378 language.clone(),
3379 cx,
3380 )
3381 })
3382 .await,
3383 );
3384 }
3385
3386 // Dropping BufferDiff can be expensive, so yield back to the event loop
3387 // for a bit
3388 yield_now().await;
3389
3390 let mut new_uncommitted_diff = None;
3391 if let Some(uncommitted_diff) = &uncommitted_diff {
3392 new_uncommitted_diff = if index_matches_head {
3393 new_unstaged_diff.clone()
3394 } else {
3395 Some(
3396 cx.update(|cx| {
3397 uncommitted_diff.read(cx).update_diff(
3398 buffer.clone(),
3399 head,
3400 head_changed.then_some(true),
3401 language.clone(),
3402 cx,
3403 )
3404 })
3405 .await,
3406 )
3407 }
3408 }
3409
3410 // Dropping BufferDiff can be expensive, so yield back to the event loop
3411 // for a bit
3412 yield_now().await;
3413
3414 let cancel = this.update(cx, |this, _| {
3415 // This checks whether all pending stage/unstage operations
3416 // have quiesced (i.e. both the corresponding write and the
3417 // read of that write have completed). If not, then we cancel
3418 // this recalculation attempt to avoid invalidating pending
3419 // state too quickly; another recalculation will come along
3420 // later and clear the pending state once the state of the index has settled.
3421 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3422 *this.recalculating_tx.borrow_mut() = false;
3423 true
3424 } else {
3425 false
3426 }
3427 })?;
3428 if cancel {
3429 log::debug!(
3430 concat!(
3431 "aborting recalculating diffs for buffer {}",
3432 "due to subsequent hunk operations",
3433 ),
3434 buffer.remote_id()
3435 );
3436 return Ok(());
3437 }
3438
3439 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3440 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3441 {
3442 let task = unstaged_diff.update(cx, |diff, cx| {
3443 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3444 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3445 // view multibuffers.
3446 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3447 });
3448 Some(task.await)
3449 } else {
3450 None
3451 };
3452
3453 yield_now().await;
3454
3455 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3456 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3457 {
3458 uncommitted_diff
3459 .update(cx, |diff, cx| {
3460 if language_changed {
3461 diff.language_changed(language.clone(), language_registry, cx);
3462 }
3463 diff.set_snapshot_with_secondary(
3464 new_uncommitted_diff,
3465 &buffer,
3466 unstaged_changed_range.flatten(),
3467 true,
3468 cx,
3469 )
3470 })
3471 .await;
3472 }
3473
3474 yield_now().await;
3475
3476 for (oid, oid_diff, base_text) in oid_diffs {
3477 let new_oid_diff = cx
3478 .update(|cx| {
3479 oid_diff.read(cx).update_diff(
3480 buffer.clone(),
3481 base_text,
3482 None,
3483 language.clone(),
3484 cx,
3485 )
3486 })
3487 .await;
3488
3489 oid_diff
3490 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3491 .await;
3492
3493 log::debug!(
3494 "finished recalculating oid diff for buffer {} oid {:?}",
3495 buffer.remote_id(),
3496 oid
3497 );
3498
3499 yield_now().await;
3500 }
3501
3502 log::debug!(
3503 "finished recalculating diffs for buffer {}",
3504 buffer.remote_id()
3505 );
3506
3507 if let Some(this) = this.upgrade() {
3508 this.update(cx, |this, _| {
3509 this.index_changed = false;
3510 this.head_changed = false;
3511 this.language_changed = false;
3512 *this.recalculating_tx.borrow_mut() = false;
3513 });
3514 }
3515
3516 Ok(())
3517 }));
3518 }
3519}
3520
3521fn make_remote_delegate(
3522 this: Entity<GitStore>,
3523 project_id: u64,
3524 repository_id: RepositoryId,
3525 askpass_id: u64,
3526 cx: &mut AsyncApp,
3527) -> AskPassDelegate {
3528 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3529 this.update(cx, |this, cx| {
3530 let Some((client, _)) = this.downstream_client() else {
3531 return;
3532 };
3533 let response = client.request(proto::AskPassRequest {
3534 project_id,
3535 repository_id: repository_id.to_proto(),
3536 askpass_id,
3537 prompt,
3538 });
3539 cx.spawn(async move |_, _| {
3540 let mut response = response.await?.response;
3541 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3542 .ok();
3543 response.zeroize();
3544 anyhow::Ok(())
3545 })
3546 .detach_and_log_err(cx);
3547 });
3548 })
3549}
3550
3551impl RepositoryId {
3552 pub fn to_proto(self) -> u64 {
3553 self.0
3554 }
3555
3556 pub fn from_proto(id: u64) -> Self {
3557 RepositoryId(id)
3558 }
3559}
3560
3561impl RepositorySnapshot {
3562 fn empty(
3563 id: RepositoryId,
3564 work_directory_abs_path: Arc<Path>,
3565 original_repo_abs_path: Option<Arc<Path>>,
3566 path_style: PathStyle,
3567 ) -> Self {
3568 Self {
3569 id,
3570 statuses_by_path: Default::default(),
3571 original_repo_abs_path: original_repo_abs_path
3572 .unwrap_or_else(|| work_directory_abs_path.clone()),
3573 work_directory_abs_path,
3574 branch: None,
3575 head_commit: None,
3576 scan_id: 0,
3577 merge: Default::default(),
3578 remote_origin_url: None,
3579 remote_upstream_url: None,
3580 stash_entries: Default::default(),
3581 linked_worktrees: Arc::from([]),
3582 path_style,
3583 }
3584 }
3585
3586 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3587 proto::UpdateRepository {
3588 branch_summary: self.branch.as_ref().map(branch_to_proto),
3589 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3590 updated_statuses: self
3591 .statuses_by_path
3592 .iter()
3593 .map(|entry| entry.to_proto())
3594 .collect(),
3595 removed_statuses: Default::default(),
3596 current_merge_conflicts: self
3597 .merge
3598 .merge_heads_by_conflicted_path
3599 .iter()
3600 .map(|(repo_path, _)| repo_path.to_proto())
3601 .collect(),
3602 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3603 project_id,
3604 id: self.id.to_proto(),
3605 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3606 entry_ids: vec![self.id.to_proto()],
3607 scan_id: self.scan_id,
3608 is_last_update: true,
3609 stash_entries: self
3610 .stash_entries
3611 .entries
3612 .iter()
3613 .map(stash_to_proto)
3614 .collect(),
3615 remote_upstream_url: self.remote_upstream_url.clone(),
3616 remote_origin_url: self.remote_origin_url.clone(),
3617 original_repo_abs_path: Some(
3618 self.original_repo_abs_path.to_string_lossy().into_owned(),
3619 ),
3620 linked_worktrees: self
3621 .linked_worktrees
3622 .iter()
3623 .map(worktree_to_proto)
3624 .collect(),
3625 }
3626 }
3627
3628 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3629 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3630 let mut removed_statuses: Vec<String> = Vec::new();
3631
3632 let mut new_statuses = self.statuses_by_path.iter().peekable();
3633 let mut old_statuses = old.statuses_by_path.iter().peekable();
3634
3635 let mut current_new_entry = new_statuses.next();
3636 let mut current_old_entry = old_statuses.next();
3637 loop {
3638 match (current_new_entry, current_old_entry) {
3639 (Some(new_entry), Some(old_entry)) => {
3640 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3641 Ordering::Less => {
3642 updated_statuses.push(new_entry.to_proto());
3643 current_new_entry = new_statuses.next();
3644 }
3645 Ordering::Equal => {
3646 if new_entry.status != old_entry.status
3647 || new_entry.diff_stat != old_entry.diff_stat
3648 {
3649 updated_statuses.push(new_entry.to_proto());
3650 }
3651 current_old_entry = old_statuses.next();
3652 current_new_entry = new_statuses.next();
3653 }
3654 Ordering::Greater => {
3655 removed_statuses.push(old_entry.repo_path.to_proto());
3656 current_old_entry = old_statuses.next();
3657 }
3658 }
3659 }
3660 (None, Some(old_entry)) => {
3661 removed_statuses.push(old_entry.repo_path.to_proto());
3662 current_old_entry = old_statuses.next();
3663 }
3664 (Some(new_entry), None) => {
3665 updated_statuses.push(new_entry.to_proto());
3666 current_new_entry = new_statuses.next();
3667 }
3668 (None, None) => break,
3669 }
3670 }
3671
3672 proto::UpdateRepository {
3673 branch_summary: self.branch.as_ref().map(branch_to_proto),
3674 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3675 updated_statuses,
3676 removed_statuses,
3677 current_merge_conflicts: self
3678 .merge
3679 .merge_heads_by_conflicted_path
3680 .iter()
3681 .map(|(path, _)| path.to_proto())
3682 .collect(),
3683 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3684 project_id,
3685 id: self.id.to_proto(),
3686 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3687 entry_ids: vec![],
3688 scan_id: self.scan_id,
3689 is_last_update: true,
3690 stash_entries: self
3691 .stash_entries
3692 .entries
3693 .iter()
3694 .map(stash_to_proto)
3695 .collect(),
3696 remote_upstream_url: self.remote_upstream_url.clone(),
3697 remote_origin_url: self.remote_origin_url.clone(),
3698 original_repo_abs_path: Some(
3699 self.original_repo_abs_path.to_string_lossy().into_owned(),
3700 ),
3701 linked_worktrees: self
3702 .linked_worktrees
3703 .iter()
3704 .map(worktree_to_proto)
3705 .collect(),
3706 }
3707 }
3708
3709 /// The main worktree is the original checkout that other worktrees were
3710 /// created from.
3711 ///
3712 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3713 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3714 pub fn is_main_worktree(&self) -> bool {
3715 self.work_directory_abs_path == self.original_repo_abs_path
3716 }
3717
3718 /// Returns true if this repository is a linked worktree, that is, one that
3719 /// was created from another worktree.
3720 ///
3721 /// This is by definition the opposite of [`Self::is_main_worktree`].
3722 pub fn is_linked_worktree(&self) -> bool {
3723 !self.is_main_worktree()
3724 }
3725
3726 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3727 &self.linked_worktrees
3728 }
3729
3730 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3731 self.statuses_by_path.iter().cloned()
3732 }
3733
3734 pub fn status_summary(&self) -> GitSummary {
3735 self.statuses_by_path.summary().item_summary
3736 }
3737
3738 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3739 self.statuses_by_path
3740 .get(&PathKey(path.as_ref().clone()), ())
3741 .cloned()
3742 }
3743
3744 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3745 self.statuses_by_path
3746 .get(&PathKey(path.as_ref().clone()), ())
3747 .and_then(|entry| entry.diff_stat)
3748 }
3749
3750 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3751 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3752 }
3753
3754 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3755 self.path_style
3756 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3757 .unwrap()
3758 .into()
3759 }
3760
3761 #[inline]
3762 fn abs_path_to_repo_path_inner(
3763 work_directory_abs_path: &Path,
3764 abs_path: &Path,
3765 path_style: PathStyle,
3766 ) -> Option<RepoPath> {
3767 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3768 Some(RepoPath::from_rel_path(&rel_path))
3769 }
3770
3771 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3772 self.merge
3773 .merge_heads_by_conflicted_path
3774 .contains_key(repo_path)
3775 }
3776
3777 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3778 let had_conflict_on_last_merge_head_change = self
3779 .merge
3780 .merge_heads_by_conflicted_path
3781 .contains_key(repo_path);
3782 let has_conflict_currently = self
3783 .status_for_path(repo_path)
3784 .is_some_and(|entry| entry.status.is_conflicted());
3785 had_conflict_on_last_merge_head_change || has_conflict_currently
3786 }
3787
3788 /// This is the name that will be displayed in the repository selector for this repository.
3789 pub fn display_name(&self) -> SharedString {
3790 self.work_directory_abs_path
3791 .file_name()
3792 .unwrap_or_default()
3793 .to_string_lossy()
3794 .to_string()
3795 .into()
3796 }
3797}
3798
3799pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3800 proto::StashEntry {
3801 oid: entry.oid.as_bytes().to_vec(),
3802 message: entry.message.clone(),
3803 branch: entry.branch.clone(),
3804 index: entry.index as u64,
3805 timestamp: entry.timestamp,
3806 }
3807}
3808
3809pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3810 Ok(StashEntry {
3811 oid: Oid::from_bytes(&entry.oid)?,
3812 message: entry.message.clone(),
3813 index: entry.index as usize,
3814 branch: entry.branch.clone(),
3815 timestamp: entry.timestamp,
3816 })
3817}
3818
3819impl MergeDetails {
3820 async fn update(
3821 &mut self,
3822 backend: &Arc<dyn GitRepository>,
3823 current_conflicted_paths: Vec<RepoPath>,
3824 ) -> Result<bool> {
3825 log::debug!("load merge details");
3826 self.message = backend.merge_message().await.map(SharedString::from);
3827 let heads = backend
3828 .revparse_batch(vec![
3829 "MERGE_HEAD".into(),
3830 "CHERRY_PICK_HEAD".into(),
3831 "REBASE_HEAD".into(),
3832 "REVERT_HEAD".into(),
3833 "APPLY_HEAD".into(),
3834 ])
3835 .await
3836 .log_err()
3837 .unwrap_or_default()
3838 .into_iter()
3839 .map(|opt| opt.map(SharedString::from))
3840 .collect::<Vec<_>>();
3841
3842 let mut conflicts_changed = false;
3843
3844 // Record the merge state for newly conflicted paths
3845 for path in ¤t_conflicted_paths {
3846 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3847 conflicts_changed = true;
3848 self.merge_heads_by_conflicted_path
3849 .insert(path.clone(), heads.clone());
3850 }
3851 }
3852
3853 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3854 self.merge_heads_by_conflicted_path
3855 .retain(|path, old_merge_heads| {
3856 let keep = current_conflicted_paths.contains(path)
3857 || (old_merge_heads == &heads
3858 && old_merge_heads.iter().any(|head| head.is_some()));
3859 if !keep {
3860 conflicts_changed = true;
3861 }
3862 keep
3863 });
3864
3865 Ok(conflicts_changed)
3866 }
3867}
3868
3869impl Repository {
3870 pub fn is_trusted(&self) -> bool {
3871 match self.repository_state.peek() {
3872 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3873 _ => false,
3874 }
3875 }
3876
3877 pub fn snapshot(&self) -> RepositorySnapshot {
3878 self.snapshot.clone()
3879 }
3880
3881 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3882 self.pending_ops.iter().cloned()
3883 }
3884
3885 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3886 self.pending_ops.summary().clone()
3887 }
3888
3889 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3890 self.pending_ops
3891 .get(&PathKey(path.as_ref().clone()), ())
3892 .cloned()
3893 }
3894
3895 fn local(
3896 id: RepositoryId,
3897 work_directory_abs_path: Arc<Path>,
3898 original_repo_abs_path: Arc<Path>,
3899 dot_git_abs_path: Arc<Path>,
3900 project_environment: WeakEntity<ProjectEnvironment>,
3901 fs: Arc<dyn Fs>,
3902 is_trusted: bool,
3903 git_store: WeakEntity<GitStore>,
3904 cx: &mut Context<Self>,
3905 ) -> Self {
3906 let snapshot = RepositorySnapshot::empty(
3907 id,
3908 work_directory_abs_path.clone(),
3909 Some(original_repo_abs_path),
3910 PathStyle::local(),
3911 );
3912 let state = cx
3913 .spawn(async move |_, cx| {
3914 LocalRepositoryState::new(
3915 work_directory_abs_path,
3916 dot_git_abs_path,
3917 project_environment,
3918 fs,
3919 is_trusted,
3920 cx,
3921 )
3922 .await
3923 .map_err(|err| err.to_string())
3924 })
3925 .shared();
3926 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3927 let state = cx
3928 .spawn(async move |_, _| {
3929 let state = state.await?;
3930 Ok(RepositoryState::Local(state))
3931 })
3932 .shared();
3933
3934 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3935 RepositoryEvent::BranchChanged => {
3936 if this.scan_id > 1 {
3937 this.initial_graph_data.clear();
3938 }
3939 }
3940 _ => {}
3941 })
3942 .detach();
3943
3944 Repository {
3945 this: cx.weak_entity(),
3946 git_store,
3947 snapshot,
3948 pending_ops: Default::default(),
3949 repository_state: state,
3950 commit_message_buffer: None,
3951 askpass_delegates: Default::default(),
3952 paths_needing_status_update: Default::default(),
3953 latest_askpass_id: 0,
3954 job_sender,
3955 job_id: 0,
3956 active_jobs: Default::default(),
3957 initial_graph_data: Default::default(),
3958 commit_data: Default::default(),
3959 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3960 }
3961 }
3962
3963 fn remote(
3964 id: RepositoryId,
3965 work_directory_abs_path: Arc<Path>,
3966 original_repo_abs_path: Option<Arc<Path>>,
3967 path_style: PathStyle,
3968 project_id: ProjectId,
3969 client: AnyProtoClient,
3970 git_store: WeakEntity<GitStore>,
3971 cx: &mut Context<Self>,
3972 ) -> Self {
3973 let snapshot = RepositorySnapshot::empty(
3974 id,
3975 work_directory_abs_path,
3976 original_repo_abs_path,
3977 path_style,
3978 );
3979 let repository_state = RemoteRepositoryState { project_id, client };
3980 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3981 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3982 Self {
3983 this: cx.weak_entity(),
3984 snapshot,
3985 commit_message_buffer: None,
3986 git_store,
3987 pending_ops: Default::default(),
3988 paths_needing_status_update: Default::default(),
3989 job_sender,
3990 repository_state,
3991 askpass_delegates: Default::default(),
3992 latest_askpass_id: 0,
3993 active_jobs: Default::default(),
3994 job_id: 0,
3995 initial_graph_data: Default::default(),
3996 commit_data: Default::default(),
3997 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3998 }
3999 }
4000
4001 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4002 self.git_store.upgrade()
4003 }
4004
4005 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4006 let this = cx.weak_entity();
4007 let git_store = self.git_store.clone();
4008 let _ = self.send_keyed_job(
4009 Some(GitJobKey::ReloadBufferDiffBases),
4010 None,
4011 |state, mut cx| async move {
4012 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4013 log::error!("tried to recompute diffs for a non-local repository");
4014 return Ok(());
4015 };
4016
4017 let Some(this) = this.upgrade() else {
4018 return Ok(());
4019 };
4020
4021 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4022 git_store.update(cx, |git_store, cx| {
4023 git_store
4024 .diffs
4025 .iter()
4026 .filter_map(|(buffer_id, diff_state)| {
4027 let buffer_store = git_store.buffer_store.read(cx);
4028 let buffer = buffer_store.get(*buffer_id)?;
4029 let file = File::from_dyn(buffer.read(cx).file())?;
4030 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4031 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4032 log::debug!(
4033 "start reload diff bases for repo path {}",
4034 repo_path.as_unix_str()
4035 );
4036 diff_state.update(cx, |diff_state, _| {
4037 let has_unstaged_diff = diff_state
4038 .unstaged_diff
4039 .as_ref()
4040 .is_some_and(|diff| diff.is_upgradable());
4041 let has_uncommitted_diff = diff_state
4042 .uncommitted_diff
4043 .as_ref()
4044 .is_some_and(|set| set.is_upgradable());
4045
4046 Some((
4047 buffer,
4048 repo_path,
4049 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4050 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4051 ))
4052 })
4053 })
4054 .collect::<Vec<_>>()
4055 })
4056 })?;
4057
4058 let buffer_diff_base_changes = cx
4059 .background_spawn(async move {
4060 let mut changes = Vec::new();
4061 for (buffer, repo_path, current_index_text, current_head_text) in
4062 &repo_diff_state_updates
4063 {
4064 let index_text = if current_index_text.is_some() {
4065 backend.load_index_text(repo_path.clone()).await
4066 } else {
4067 None
4068 };
4069 let head_text = if current_head_text.is_some() {
4070 backend.load_committed_text(repo_path.clone()).await
4071 } else {
4072 None
4073 };
4074
4075 let change =
4076 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4077 (Some(current_index), Some(current_head)) => {
4078 let index_changed =
4079 index_text.as_deref() != current_index.as_deref();
4080 let head_changed =
4081 head_text.as_deref() != current_head.as_deref();
4082 if index_changed && head_changed {
4083 if index_text == head_text {
4084 Some(DiffBasesChange::SetBoth(head_text))
4085 } else {
4086 Some(DiffBasesChange::SetEach {
4087 index: index_text,
4088 head: head_text,
4089 })
4090 }
4091 } else if index_changed {
4092 Some(DiffBasesChange::SetIndex(index_text))
4093 } else if head_changed {
4094 Some(DiffBasesChange::SetHead(head_text))
4095 } else {
4096 None
4097 }
4098 }
4099 (Some(current_index), None) => {
4100 let index_changed =
4101 index_text.as_deref() != current_index.as_deref();
4102 index_changed
4103 .then_some(DiffBasesChange::SetIndex(index_text))
4104 }
4105 (None, Some(current_head)) => {
4106 let head_changed =
4107 head_text.as_deref() != current_head.as_deref();
4108 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4109 }
4110 (None, None) => None,
4111 };
4112
4113 changes.push((buffer.clone(), change))
4114 }
4115 changes
4116 })
4117 .await;
4118
4119 git_store.update(&mut cx, |git_store, cx| {
4120 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4121 let buffer_snapshot = buffer.read(cx).text_snapshot();
4122 let buffer_id = buffer_snapshot.remote_id();
4123 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4124 continue;
4125 };
4126
4127 let downstream_client = git_store.downstream_client();
4128 diff_state.update(cx, |diff_state, cx| {
4129 use proto::update_diff_bases::Mode;
4130
4131 if let Some((diff_bases_change, (client, project_id))) =
4132 diff_bases_change.clone().zip(downstream_client)
4133 {
4134 let (staged_text, committed_text, mode) = match diff_bases_change {
4135 DiffBasesChange::SetIndex(index) => {
4136 (index, None, Mode::IndexOnly)
4137 }
4138 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4139 DiffBasesChange::SetEach { index, head } => {
4140 (index, head, Mode::IndexAndHead)
4141 }
4142 DiffBasesChange::SetBoth(text) => {
4143 (None, text, Mode::IndexMatchesHead)
4144 }
4145 };
4146 client
4147 .send(proto::UpdateDiffBases {
4148 project_id: project_id.to_proto(),
4149 buffer_id: buffer_id.to_proto(),
4150 staged_text,
4151 committed_text,
4152 mode: mode as i32,
4153 })
4154 .log_err();
4155 }
4156
4157 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4158 });
4159 }
4160 })
4161 },
4162 );
4163 }
4164
4165 pub fn send_job<F, Fut, R>(
4166 &mut self,
4167 status: Option<SharedString>,
4168 job: F,
4169 ) -> oneshot::Receiver<R>
4170 where
4171 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4172 Fut: Future<Output = R> + 'static,
4173 R: Send + 'static,
4174 {
4175 self.send_keyed_job(None, status, job)
4176 }
4177
4178 fn send_keyed_job<F, Fut, R>(
4179 &mut self,
4180 key: Option<GitJobKey>,
4181 status: Option<SharedString>,
4182 job: F,
4183 ) -> oneshot::Receiver<R>
4184 where
4185 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4186 Fut: Future<Output = R> + 'static,
4187 R: Send + 'static,
4188 {
4189 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4190 let job_id = post_inc(&mut self.job_id);
4191 let this = self.this.clone();
4192 self.job_sender
4193 .unbounded_send(GitJob {
4194 key,
4195 job: Box::new(move |state, cx: &mut AsyncApp| {
4196 let job = job(state, cx.clone());
4197 cx.spawn(async move |cx| {
4198 if let Some(s) = status.clone() {
4199 this.update(cx, |this, cx| {
4200 this.active_jobs.insert(
4201 job_id,
4202 JobInfo {
4203 start: Instant::now(),
4204 message: s.clone(),
4205 },
4206 );
4207
4208 cx.notify();
4209 })
4210 .ok();
4211 }
4212 let result = job.await;
4213
4214 this.update(cx, |this, cx| {
4215 this.active_jobs.remove(&job_id);
4216 cx.notify();
4217 })
4218 .ok();
4219
4220 result_tx.send(result).ok();
4221 })
4222 }),
4223 })
4224 .ok();
4225 result_rx
4226 }
4227
4228 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4229 let Some(git_store) = self.git_store.upgrade() else {
4230 return;
4231 };
4232 let entity = cx.entity();
4233 git_store.update(cx, |git_store, cx| {
4234 let Some((&id, _)) = git_store
4235 .repositories
4236 .iter()
4237 .find(|(_, handle)| *handle == &entity)
4238 else {
4239 return;
4240 };
4241 git_store.active_repo_id = Some(id);
4242 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4243 });
4244 }
4245
4246 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4247 self.snapshot.status()
4248 }
4249
4250 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4251 self.snapshot.diff_stat_for_path(path)
4252 }
4253
4254 pub fn cached_stash(&self) -> GitStash {
4255 self.snapshot.stash_entries.clone()
4256 }
4257
4258 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4259 let git_store = self.git_store.upgrade()?;
4260 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4261 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4262 let abs_path = SanitizedPath::new(&abs_path);
4263 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4264 Some(ProjectPath {
4265 worktree_id: worktree.read(cx).id(),
4266 path: relative_path,
4267 })
4268 }
4269
4270 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4271 let git_store = self.git_store.upgrade()?;
4272 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4273 let abs_path = worktree_store.absolutize(path, cx)?;
4274 self.snapshot.abs_path_to_repo_path(&abs_path)
4275 }
4276
4277 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4278 other
4279 .read(cx)
4280 .snapshot
4281 .work_directory_abs_path
4282 .starts_with(&self.snapshot.work_directory_abs_path)
4283 }
4284
4285 pub fn open_commit_buffer(
4286 &mut self,
4287 languages: Option<Arc<LanguageRegistry>>,
4288 buffer_store: Entity<BufferStore>,
4289 cx: &mut Context<Self>,
4290 ) -> Task<Result<Entity<Buffer>>> {
4291 let id = self.id;
4292 if let Some(buffer) = self.commit_message_buffer.clone() {
4293 return Task::ready(Ok(buffer));
4294 }
4295 let this = cx.weak_entity();
4296
4297 let rx = self.send_job(None, move |state, mut cx| async move {
4298 let Some(this) = this.upgrade() else {
4299 bail!("git store was dropped");
4300 };
4301 match state {
4302 RepositoryState::Local(..) => {
4303 this.update(&mut cx, |_, cx| {
4304 Self::open_local_commit_buffer(languages, buffer_store, cx)
4305 })
4306 .await
4307 }
4308 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4309 let request = client.request(proto::OpenCommitMessageBuffer {
4310 project_id: project_id.0,
4311 repository_id: id.to_proto(),
4312 });
4313 let response = request.await.context("requesting to open commit buffer")?;
4314 let buffer_id = BufferId::new(response.buffer_id)?;
4315 let buffer = buffer_store
4316 .update(&mut cx, |buffer_store, cx| {
4317 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4318 })
4319 .await?;
4320 if let Some(language_registry) = languages {
4321 let git_commit_language =
4322 language_registry.language_for_name("Git Commit").await?;
4323 buffer.update(&mut cx, |buffer, cx| {
4324 buffer.set_language(Some(git_commit_language), cx);
4325 });
4326 }
4327 this.update(&mut cx, |this, _| {
4328 this.commit_message_buffer = Some(buffer.clone());
4329 });
4330 Ok(buffer)
4331 }
4332 }
4333 });
4334
4335 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4336 }
4337
4338 fn open_local_commit_buffer(
4339 language_registry: Option<Arc<LanguageRegistry>>,
4340 buffer_store: Entity<BufferStore>,
4341 cx: &mut Context<Self>,
4342 ) -> Task<Result<Entity<Buffer>>> {
4343 cx.spawn(async move |repository, cx| {
4344 let git_commit_language = match language_registry {
4345 Some(language_registry) => {
4346 Some(language_registry.language_for_name("Git Commit").await?)
4347 }
4348 None => None,
4349 };
4350 let buffer = buffer_store
4351 .update(cx, |buffer_store, cx| {
4352 buffer_store.create_buffer(git_commit_language, false, cx)
4353 })
4354 .await?;
4355
4356 repository.update(cx, |repository, _| {
4357 repository.commit_message_buffer = Some(buffer.clone());
4358 })?;
4359 Ok(buffer)
4360 })
4361 }
4362
4363 pub fn checkout_files(
4364 &mut self,
4365 commit: &str,
4366 paths: Vec<RepoPath>,
4367 cx: &mut Context<Self>,
4368 ) -> Task<Result<()>> {
4369 let commit = commit.to_string();
4370 let id = self.id;
4371
4372 self.spawn_job_with_tracking(
4373 paths.clone(),
4374 pending_op::GitStatus::Reverted,
4375 cx,
4376 async move |this, cx| {
4377 this.update(cx, |this, _cx| {
4378 this.send_job(
4379 Some(format!("git checkout {}", commit).into()),
4380 move |git_repo, _| async move {
4381 match git_repo {
4382 RepositoryState::Local(LocalRepositoryState {
4383 backend,
4384 environment,
4385 ..
4386 }) => {
4387 backend
4388 .checkout_files(commit, paths, environment.clone())
4389 .await
4390 }
4391 RepositoryState::Remote(RemoteRepositoryState {
4392 project_id,
4393 client,
4394 }) => {
4395 client
4396 .request(proto::GitCheckoutFiles {
4397 project_id: project_id.0,
4398 repository_id: id.to_proto(),
4399 commit,
4400 paths: paths
4401 .into_iter()
4402 .map(|p| p.to_proto())
4403 .collect(),
4404 })
4405 .await?;
4406
4407 Ok(())
4408 }
4409 }
4410 },
4411 )
4412 })?
4413 .await?
4414 },
4415 )
4416 }
4417
4418 pub fn reset(
4419 &mut self,
4420 commit: String,
4421 reset_mode: ResetMode,
4422 _cx: &mut App,
4423 ) -> oneshot::Receiver<Result<()>> {
4424 let id = self.id;
4425
4426 self.send_job(None, move |git_repo, _| async move {
4427 match git_repo {
4428 RepositoryState::Local(LocalRepositoryState {
4429 backend,
4430 environment,
4431 ..
4432 }) => backend.reset(commit, reset_mode, environment).await,
4433 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4434 client
4435 .request(proto::GitReset {
4436 project_id: project_id.0,
4437 repository_id: id.to_proto(),
4438 commit,
4439 mode: match reset_mode {
4440 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4441 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4442 },
4443 })
4444 .await?;
4445
4446 Ok(())
4447 }
4448 }
4449 })
4450 }
4451
4452 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4453 let id = self.id;
4454 self.send_job(None, move |git_repo, _cx| async move {
4455 match git_repo {
4456 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4457 backend.show(commit).await
4458 }
4459 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4460 let resp = client
4461 .request(proto::GitShow {
4462 project_id: project_id.0,
4463 repository_id: id.to_proto(),
4464 commit,
4465 })
4466 .await?;
4467
4468 Ok(CommitDetails {
4469 sha: resp.sha.into(),
4470 message: resp.message.into(),
4471 commit_timestamp: resp.commit_timestamp,
4472 author_email: resp.author_email.into(),
4473 author_name: resp.author_name.into(),
4474 })
4475 }
4476 }
4477 })
4478 }
4479
4480 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4481 let id = self.id;
4482 self.send_job(None, move |git_repo, cx| async move {
4483 match git_repo {
4484 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4485 backend.load_commit(commit, cx).await
4486 }
4487 RepositoryState::Remote(RemoteRepositoryState {
4488 client, project_id, ..
4489 }) => {
4490 let response = client
4491 .request(proto::LoadCommitDiff {
4492 project_id: project_id.0,
4493 repository_id: id.to_proto(),
4494 commit,
4495 })
4496 .await?;
4497 Ok(CommitDiff {
4498 files: response
4499 .files
4500 .into_iter()
4501 .map(|file| {
4502 Ok(CommitFile {
4503 path: RepoPath::from_proto(&file.path)?,
4504 old_text: file.old_text,
4505 new_text: file.new_text,
4506 is_binary: file.is_binary,
4507 })
4508 })
4509 .collect::<Result<Vec<_>>>()?,
4510 })
4511 }
4512 }
4513 })
4514 }
4515
4516 pub fn file_history(
4517 &mut self,
4518 path: RepoPath,
4519 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4520 self.file_history_paginated(path, 0, None)
4521 }
4522
4523 pub fn file_history_paginated(
4524 &mut self,
4525 path: RepoPath,
4526 skip: usize,
4527 limit: Option<usize>,
4528 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4529 let id = self.id;
4530 self.send_job(None, move |git_repo, _cx| async move {
4531 match git_repo {
4532 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4533 backend.file_history_paginated(path, skip, limit).await
4534 }
4535 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4536 let response = client
4537 .request(proto::GitFileHistory {
4538 project_id: project_id.0,
4539 repository_id: id.to_proto(),
4540 path: path.to_proto(),
4541 skip: skip as u64,
4542 limit: limit.map(|l| l as u64),
4543 })
4544 .await?;
4545 Ok(git::repository::FileHistory {
4546 entries: response
4547 .entries
4548 .into_iter()
4549 .map(|entry| git::repository::FileHistoryEntry {
4550 sha: entry.sha.into(),
4551 subject: entry.subject.into(),
4552 message: entry.message.into(),
4553 commit_timestamp: entry.commit_timestamp,
4554 author_name: entry.author_name.into(),
4555 author_email: entry.author_email.into(),
4556 })
4557 .collect(),
4558 path: RepoPath::from_proto(&response.path)?,
4559 })
4560 }
4561 }
4562 })
4563 }
4564
4565 pub fn get_graph_data(
4566 &self,
4567 log_source: LogSource,
4568 log_order: LogOrder,
4569 ) -> Option<&InitialGitGraphData> {
4570 self.initial_graph_data.get(&(log_source, log_order))
4571 }
4572
4573 pub fn graph_data(
4574 &mut self,
4575 log_source: LogSource,
4576 log_order: LogOrder,
4577 range: Range<usize>,
4578 cx: &mut Context<Self>,
4579 ) -> GraphDataResponse<'_> {
4580 let initial_commit_data = self
4581 .initial_graph_data
4582 .entry((log_source.clone(), log_order))
4583 .or_insert_with(|| {
4584 let state = self.repository_state.clone();
4585 let log_source = log_source.clone();
4586
4587 let fetch_task = cx.spawn(async move |repository, cx| {
4588 let state = state.await;
4589 let result = match state {
4590 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4591 Self::local_git_graph_data(
4592 repository.clone(),
4593 backend,
4594 log_source.clone(),
4595 log_order,
4596 cx,
4597 )
4598 .await
4599 }
4600 Ok(RepositoryState::Remote(_)) => {
4601 Err("Git graph is not supported for collab yet".into())
4602 }
4603 Err(e) => Err(SharedString::from(e)),
4604 };
4605
4606 if let Err(fetch_task_error) = result {
4607 repository
4608 .update(cx, |repository, _| {
4609 if let Some(data) = repository
4610 .initial_graph_data
4611 .get_mut(&(log_source, log_order))
4612 {
4613 data.error = Some(fetch_task_error);
4614 } else {
4615 debug_panic!(
4616 "This task would be dropped if this entry doesn't exist"
4617 );
4618 }
4619 })
4620 .ok();
4621 }
4622 });
4623
4624 InitialGitGraphData {
4625 fetch_task,
4626 error: None,
4627 commit_data: Vec::new(),
4628 commit_oid_to_index: HashMap::default(),
4629 }
4630 });
4631
4632 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4633 let max_end = initial_commit_data.commit_data.len();
4634
4635 GraphDataResponse {
4636 commits: &initial_commit_data.commit_data
4637 [range.start.min(max_start)..range.end.min(max_end)],
4638 is_loading: !initial_commit_data.fetch_task.is_ready(),
4639 error: initial_commit_data.error.clone(),
4640 }
4641 }
4642
4643 async fn local_git_graph_data(
4644 this: WeakEntity<Self>,
4645 backend: Arc<dyn GitRepository>,
4646 log_source: LogSource,
4647 log_order: LogOrder,
4648 cx: &mut AsyncApp,
4649 ) -> Result<(), SharedString> {
4650 let (request_tx, request_rx) =
4651 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4652
4653 let task = cx.background_executor().spawn({
4654 let log_source = log_source.clone();
4655 async move {
4656 backend
4657 .initial_graph_data(log_source, log_order, request_tx)
4658 .await
4659 .map_err(|err| SharedString::from(err.to_string()))
4660 }
4661 });
4662
4663 let graph_data_key = (log_source, log_order);
4664
4665 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4666 this.update(cx, |repository, cx| {
4667 let graph_data = repository
4668 .initial_graph_data
4669 .entry(graph_data_key.clone())
4670 .and_modify(|graph_data| {
4671 for commit_data in initial_graph_commit_data {
4672 graph_data
4673 .commit_oid_to_index
4674 .insert(commit_data.sha, graph_data.commit_data.len());
4675 graph_data.commit_data.push(commit_data);
4676
4677 cx.emit(RepositoryEvent::GraphEvent(
4678 graph_data_key.clone(),
4679 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4680 ));
4681 }
4682 });
4683
4684 match &graph_data {
4685 Entry::Occupied(_) => {}
4686 Entry::Vacant(_) => {
4687 debug_panic!("This task should be dropped if data doesn't exist");
4688 }
4689 }
4690 })
4691 .ok();
4692 }
4693
4694 task.await?;
4695 Ok(())
4696 }
4697
4698 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4699 if !self.commit_data.contains_key(&sha) {
4700 match &self.graph_commit_data_handler {
4701 GraphCommitHandlerState::Open(handler) => {
4702 if handler.commit_data_request.try_send(sha).is_ok() {
4703 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4704 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4705 }
4706 }
4707 GraphCommitHandlerState::Closed => {
4708 self.open_graph_commit_data_handler(cx);
4709 }
4710 GraphCommitHandlerState::Starting => {}
4711 }
4712 }
4713
4714 self.commit_data
4715 .get(&sha)
4716 .unwrap_or(&CommitDataState::Loading)
4717 }
4718
4719 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4720 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4721
4722 let state = self.repository_state.clone();
4723 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4724 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4725
4726 let foreground_task = cx.spawn(async move |this, cx| {
4727 while let Ok((sha, commit_data)) = result_rx.recv().await {
4728 let result = this.update(cx, |this, cx| {
4729 let old_value = this
4730 .commit_data
4731 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4732 debug_assert!(
4733 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4734 "We should never overwrite commit data"
4735 );
4736
4737 cx.notify();
4738 });
4739 if result.is_err() {
4740 break;
4741 }
4742 }
4743
4744 this.update(cx, |this, _cx| {
4745 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4746 })
4747 .ok();
4748 });
4749
4750 let request_tx_for_handler = request_tx;
4751 let background_executor = cx.background_executor().clone();
4752
4753 cx.background_spawn(async move {
4754 let backend = match state.await {
4755 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4756 Ok(RepositoryState::Remote(_)) => {
4757 log::error!("commit_data_reader not supported for remote repositories");
4758 return;
4759 }
4760 Err(error) => {
4761 log::error!("failed to get repository state: {error}");
4762 return;
4763 }
4764 };
4765
4766 let reader = match backend.commit_data_reader() {
4767 Ok(reader) => reader,
4768 Err(error) => {
4769 log::error!("failed to create commit data reader: {error:?}");
4770 return;
4771 }
4772 };
4773
4774 loop {
4775 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4776
4777 futures::select_biased! {
4778 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4779 let Ok(sha) = sha else {
4780 break;
4781 };
4782
4783 match reader.read(sha).await {
4784 Ok(commit_data) => {
4785 if result_tx.send((sha, commit_data)).await.is_err() {
4786 break;
4787 }
4788 }
4789 Err(error) => {
4790 log::error!("failed to read commit data for {sha}: {error:?}");
4791 }
4792 }
4793 }
4794 _ = futures::FutureExt::fuse(timeout) => {
4795 break;
4796 }
4797 }
4798 }
4799
4800 drop(result_tx);
4801 })
4802 .detach();
4803
4804 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4805 _task: foreground_task,
4806 commit_data_request: request_tx_for_handler,
4807 });
4808 }
4809
4810 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4811 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4812 }
4813
4814 fn save_buffers<'a>(
4815 &self,
4816 entries: impl IntoIterator<Item = &'a RepoPath>,
4817 cx: &mut Context<Self>,
4818 ) -> Vec<Task<anyhow::Result<()>>> {
4819 let mut save_futures = Vec::new();
4820 if let Some(buffer_store) = self.buffer_store(cx) {
4821 buffer_store.update(cx, |buffer_store, cx| {
4822 for path in entries {
4823 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4824 continue;
4825 };
4826 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4827 && buffer
4828 .read(cx)
4829 .file()
4830 .is_some_and(|file| file.disk_state().exists())
4831 && buffer.read(cx).has_unsaved_edits()
4832 {
4833 save_futures.push(buffer_store.save_buffer(buffer, cx));
4834 }
4835 }
4836 })
4837 }
4838 save_futures
4839 }
4840
4841 pub fn stage_entries(
4842 &mut self,
4843 entries: Vec<RepoPath>,
4844 cx: &mut Context<Self>,
4845 ) -> Task<anyhow::Result<()>> {
4846 self.stage_or_unstage_entries(true, entries, cx)
4847 }
4848
4849 pub fn unstage_entries(
4850 &mut self,
4851 entries: Vec<RepoPath>,
4852 cx: &mut Context<Self>,
4853 ) -> Task<anyhow::Result<()>> {
4854 self.stage_or_unstage_entries(false, entries, cx)
4855 }
4856
4857 fn stage_or_unstage_entries(
4858 &mut self,
4859 stage: bool,
4860 entries: Vec<RepoPath>,
4861 cx: &mut Context<Self>,
4862 ) -> Task<anyhow::Result<()>> {
4863 if entries.is_empty() {
4864 return Task::ready(Ok(()));
4865 }
4866 let Some(git_store) = self.git_store.upgrade() else {
4867 return Task::ready(Ok(()));
4868 };
4869 let id = self.id;
4870 let save_tasks = self.save_buffers(&entries, cx);
4871 let paths = entries
4872 .iter()
4873 .map(|p| p.as_unix_str())
4874 .collect::<Vec<_>>()
4875 .join(" ");
4876 let status = if stage {
4877 format!("git add {paths}")
4878 } else {
4879 format!("git reset {paths}")
4880 };
4881 let job_key = GitJobKey::WriteIndex(entries.clone());
4882
4883 self.spawn_job_with_tracking(
4884 entries.clone(),
4885 if stage {
4886 pending_op::GitStatus::Staged
4887 } else {
4888 pending_op::GitStatus::Unstaged
4889 },
4890 cx,
4891 async move |this, cx| {
4892 for save_task in save_tasks {
4893 save_task.await?;
4894 }
4895
4896 this.update(cx, |this, cx| {
4897 let weak_this = cx.weak_entity();
4898 this.send_keyed_job(
4899 Some(job_key),
4900 Some(status.into()),
4901 move |git_repo, mut cx| async move {
4902 let hunk_staging_operation_counts = weak_this
4903 .update(&mut cx, |this, cx| {
4904 let mut hunk_staging_operation_counts = HashMap::default();
4905 for path in &entries {
4906 let Some(project_path) =
4907 this.repo_path_to_project_path(path, cx)
4908 else {
4909 continue;
4910 };
4911 let Some(buffer) = git_store
4912 .read(cx)
4913 .buffer_store
4914 .read(cx)
4915 .get_by_path(&project_path)
4916 else {
4917 continue;
4918 };
4919 let Some(diff_state) = git_store
4920 .read(cx)
4921 .diffs
4922 .get(&buffer.read(cx).remote_id())
4923 .cloned()
4924 else {
4925 continue;
4926 };
4927 let Some(uncommitted_diff) =
4928 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4929 |uncommitted_diff| uncommitted_diff.upgrade(),
4930 )
4931 else {
4932 continue;
4933 };
4934 let buffer_snapshot = buffer.read(cx).text_snapshot();
4935 let file_exists = buffer
4936 .read(cx)
4937 .file()
4938 .is_some_and(|file| file.disk_state().exists());
4939 let hunk_staging_operation_count =
4940 diff_state.update(cx, |diff_state, cx| {
4941 uncommitted_diff.update(
4942 cx,
4943 |uncommitted_diff, cx| {
4944 uncommitted_diff
4945 .stage_or_unstage_all_hunks(
4946 stage,
4947 &buffer_snapshot,
4948 file_exists,
4949 cx,
4950 );
4951 },
4952 );
4953
4954 diff_state.hunk_staging_operation_count += 1;
4955 diff_state.hunk_staging_operation_count
4956 });
4957 hunk_staging_operation_counts.insert(
4958 diff_state.downgrade(),
4959 hunk_staging_operation_count,
4960 );
4961 }
4962 hunk_staging_operation_counts
4963 })
4964 .unwrap_or_default();
4965
4966 let result = match git_repo {
4967 RepositoryState::Local(LocalRepositoryState {
4968 backend,
4969 environment,
4970 ..
4971 }) => {
4972 if stage {
4973 backend.stage_paths(entries, environment.clone()).await
4974 } else {
4975 backend.unstage_paths(entries, environment.clone()).await
4976 }
4977 }
4978 RepositoryState::Remote(RemoteRepositoryState {
4979 project_id,
4980 client,
4981 }) => {
4982 if stage {
4983 client
4984 .request(proto::Stage {
4985 project_id: project_id.0,
4986 repository_id: id.to_proto(),
4987 paths: entries
4988 .into_iter()
4989 .map(|repo_path| repo_path.to_proto())
4990 .collect(),
4991 })
4992 .await
4993 .context("sending stage request")
4994 .map(|_| ())
4995 } else {
4996 client
4997 .request(proto::Unstage {
4998 project_id: project_id.0,
4999 repository_id: id.to_proto(),
5000 paths: entries
5001 .into_iter()
5002 .map(|repo_path| repo_path.to_proto())
5003 .collect(),
5004 })
5005 .await
5006 .context("sending unstage request")
5007 .map(|_| ())
5008 }
5009 }
5010 };
5011
5012 for (diff_state, hunk_staging_operation_count) in
5013 hunk_staging_operation_counts
5014 {
5015 diff_state
5016 .update(&mut cx, |diff_state, cx| {
5017 if result.is_ok() {
5018 diff_state.hunk_staging_operation_count_as_of_write =
5019 hunk_staging_operation_count;
5020 } else if let Some(uncommitted_diff) =
5021 &diff_state.uncommitted_diff
5022 {
5023 uncommitted_diff
5024 .update(cx, |uncommitted_diff, cx| {
5025 uncommitted_diff.clear_pending_hunks(cx);
5026 })
5027 .ok();
5028 }
5029 })
5030 .ok();
5031 }
5032
5033 result
5034 },
5035 )
5036 })?
5037 .await?
5038 },
5039 )
5040 }
5041
5042 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5043 let snapshot = self.snapshot.clone();
5044 let pending_ops = self.pending_ops.clone();
5045 let to_stage = cx.background_spawn(async move {
5046 snapshot
5047 .status()
5048 .filter_map(|entry| {
5049 if let Some(ops) =
5050 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5051 {
5052 if ops.staging() || ops.staged() {
5053 None
5054 } else {
5055 Some(entry.repo_path)
5056 }
5057 } else if entry.status.staging().is_fully_staged() {
5058 None
5059 } else {
5060 Some(entry.repo_path)
5061 }
5062 })
5063 .collect()
5064 });
5065
5066 cx.spawn(async move |this, cx| {
5067 let to_stage = to_stage.await;
5068 this.update(cx, |this, cx| {
5069 this.stage_or_unstage_entries(true, to_stage, cx)
5070 })?
5071 .await
5072 })
5073 }
5074
5075 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5076 let snapshot = self.snapshot.clone();
5077 let pending_ops = self.pending_ops.clone();
5078 let to_unstage = cx.background_spawn(async move {
5079 snapshot
5080 .status()
5081 .filter_map(|entry| {
5082 if let Some(ops) =
5083 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5084 {
5085 if !ops.staging() && !ops.staged() {
5086 None
5087 } else {
5088 Some(entry.repo_path)
5089 }
5090 } else if entry.status.staging().is_fully_unstaged() {
5091 None
5092 } else {
5093 Some(entry.repo_path)
5094 }
5095 })
5096 .collect()
5097 });
5098
5099 cx.spawn(async move |this, cx| {
5100 let to_unstage = to_unstage.await;
5101 this.update(cx, |this, cx| {
5102 this.stage_or_unstage_entries(false, to_unstage, cx)
5103 })?
5104 .await
5105 })
5106 }
5107
5108 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5109 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5110
5111 self.stash_entries(to_stash, cx)
5112 }
5113
5114 pub fn stash_entries(
5115 &mut self,
5116 entries: Vec<RepoPath>,
5117 cx: &mut Context<Self>,
5118 ) -> Task<anyhow::Result<()>> {
5119 let id = self.id;
5120
5121 cx.spawn(async move |this, cx| {
5122 this.update(cx, |this, _| {
5123 this.send_job(None, move |git_repo, _cx| async move {
5124 match git_repo {
5125 RepositoryState::Local(LocalRepositoryState {
5126 backend,
5127 environment,
5128 ..
5129 }) => backend.stash_paths(entries, environment).await,
5130 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5131 client
5132 .request(proto::Stash {
5133 project_id: project_id.0,
5134 repository_id: id.to_proto(),
5135 paths: entries
5136 .into_iter()
5137 .map(|repo_path| repo_path.to_proto())
5138 .collect(),
5139 })
5140 .await?;
5141 Ok(())
5142 }
5143 }
5144 })
5145 })?
5146 .await??;
5147 Ok(())
5148 })
5149 }
5150
5151 pub fn stash_pop(
5152 &mut self,
5153 index: Option<usize>,
5154 cx: &mut Context<Self>,
5155 ) -> Task<anyhow::Result<()>> {
5156 let id = self.id;
5157 cx.spawn(async move |this, cx| {
5158 this.update(cx, |this, _| {
5159 this.send_job(None, move |git_repo, _cx| async move {
5160 match git_repo {
5161 RepositoryState::Local(LocalRepositoryState {
5162 backend,
5163 environment,
5164 ..
5165 }) => backend.stash_pop(index, environment).await,
5166 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5167 client
5168 .request(proto::StashPop {
5169 project_id: project_id.0,
5170 repository_id: id.to_proto(),
5171 stash_index: index.map(|i| i as u64),
5172 })
5173 .await
5174 .context("sending stash pop request")?;
5175 Ok(())
5176 }
5177 }
5178 })
5179 })?
5180 .await??;
5181 Ok(())
5182 })
5183 }
5184
5185 pub fn stash_apply(
5186 &mut self,
5187 index: Option<usize>,
5188 cx: &mut Context<Self>,
5189 ) -> Task<anyhow::Result<()>> {
5190 let id = self.id;
5191 cx.spawn(async move |this, cx| {
5192 this.update(cx, |this, _| {
5193 this.send_job(None, move |git_repo, _cx| async move {
5194 match git_repo {
5195 RepositoryState::Local(LocalRepositoryState {
5196 backend,
5197 environment,
5198 ..
5199 }) => backend.stash_apply(index, environment).await,
5200 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5201 client
5202 .request(proto::StashApply {
5203 project_id: project_id.0,
5204 repository_id: id.to_proto(),
5205 stash_index: index.map(|i| i as u64),
5206 })
5207 .await
5208 .context("sending stash apply request")?;
5209 Ok(())
5210 }
5211 }
5212 })
5213 })?
5214 .await??;
5215 Ok(())
5216 })
5217 }
5218
5219 pub fn stash_drop(
5220 &mut self,
5221 index: Option<usize>,
5222 cx: &mut Context<Self>,
5223 ) -> oneshot::Receiver<anyhow::Result<()>> {
5224 let id = self.id;
5225 let updates_tx = self
5226 .git_store()
5227 .and_then(|git_store| match &git_store.read(cx).state {
5228 GitStoreState::Local { downstream, .. } => downstream
5229 .as_ref()
5230 .map(|downstream| downstream.updates_tx.clone()),
5231 _ => None,
5232 });
5233 let this = cx.weak_entity();
5234 self.send_job(None, move |git_repo, mut cx| async move {
5235 match git_repo {
5236 RepositoryState::Local(LocalRepositoryState {
5237 backend,
5238 environment,
5239 ..
5240 }) => {
5241 // TODO would be nice to not have to do this manually
5242 let result = backend.stash_drop(index, environment).await;
5243 if result.is_ok()
5244 && let Ok(stash_entries) = backend.stash_entries().await
5245 {
5246 let snapshot = this.update(&mut cx, |this, cx| {
5247 this.snapshot.stash_entries = stash_entries;
5248 cx.emit(RepositoryEvent::StashEntriesChanged);
5249 this.snapshot.clone()
5250 })?;
5251 if let Some(updates_tx) = updates_tx {
5252 updates_tx
5253 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5254 .ok();
5255 }
5256 }
5257
5258 result
5259 }
5260 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5261 client
5262 .request(proto::StashDrop {
5263 project_id: project_id.0,
5264 repository_id: id.to_proto(),
5265 stash_index: index.map(|i| i as u64),
5266 })
5267 .await
5268 .context("sending stash pop request")?;
5269 Ok(())
5270 }
5271 }
5272 })
5273 }
5274
5275 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5276 let id = self.id;
5277 self.send_job(
5278 Some(format!("git hook {}", hook.as_str()).into()),
5279 move |git_repo, _cx| async move {
5280 match git_repo {
5281 RepositoryState::Local(LocalRepositoryState {
5282 backend,
5283 environment,
5284 ..
5285 }) => backend.run_hook(hook, environment.clone()).await,
5286 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5287 client
5288 .request(proto::RunGitHook {
5289 project_id: project_id.0,
5290 repository_id: id.to_proto(),
5291 hook: hook.to_proto(),
5292 })
5293 .await?;
5294
5295 Ok(())
5296 }
5297 }
5298 },
5299 )
5300 }
5301
5302 pub fn commit(
5303 &mut self,
5304 message: SharedString,
5305 name_and_email: Option<(SharedString, SharedString)>,
5306 options: CommitOptions,
5307 askpass: AskPassDelegate,
5308 cx: &mut App,
5309 ) -> oneshot::Receiver<Result<()>> {
5310 let id = self.id;
5311 let askpass_delegates = self.askpass_delegates.clone();
5312 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5313
5314 let rx = self.run_hook(RunHook::PreCommit, cx);
5315
5316 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5317 rx.await??;
5318
5319 match git_repo {
5320 RepositoryState::Local(LocalRepositoryState {
5321 backend,
5322 environment,
5323 ..
5324 }) => {
5325 backend
5326 .commit(message, name_and_email, options, askpass, environment)
5327 .await
5328 }
5329 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5330 askpass_delegates.lock().insert(askpass_id, askpass);
5331 let _defer = util::defer(|| {
5332 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5333 debug_assert!(askpass_delegate.is_some());
5334 });
5335 let (name, email) = name_and_email.unzip();
5336 client
5337 .request(proto::Commit {
5338 project_id: project_id.0,
5339 repository_id: id.to_proto(),
5340 message: String::from(message),
5341 name: name.map(String::from),
5342 email: email.map(String::from),
5343 options: Some(proto::commit::CommitOptions {
5344 amend: options.amend,
5345 signoff: options.signoff,
5346 }),
5347 askpass_id,
5348 })
5349 .await?;
5350
5351 Ok(())
5352 }
5353 }
5354 })
5355 }
5356
5357 pub fn fetch(
5358 &mut self,
5359 fetch_options: FetchOptions,
5360 askpass: AskPassDelegate,
5361 _cx: &mut App,
5362 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5363 let askpass_delegates = self.askpass_delegates.clone();
5364 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5365 let id = self.id;
5366
5367 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5368 match git_repo {
5369 RepositoryState::Local(LocalRepositoryState {
5370 backend,
5371 environment,
5372 ..
5373 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5374 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5375 askpass_delegates.lock().insert(askpass_id, askpass);
5376 let _defer = util::defer(|| {
5377 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5378 debug_assert!(askpass_delegate.is_some());
5379 });
5380
5381 let response = client
5382 .request(proto::Fetch {
5383 project_id: project_id.0,
5384 repository_id: id.to_proto(),
5385 askpass_id,
5386 remote: fetch_options.to_proto(),
5387 })
5388 .await?;
5389
5390 Ok(RemoteCommandOutput {
5391 stdout: response.stdout,
5392 stderr: response.stderr,
5393 })
5394 }
5395 }
5396 })
5397 }
5398
5399 pub fn push(
5400 &mut self,
5401 branch: SharedString,
5402 remote_branch: SharedString,
5403 remote: SharedString,
5404 options: Option<PushOptions>,
5405 askpass: AskPassDelegate,
5406 cx: &mut Context<Self>,
5407 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5408 let askpass_delegates = self.askpass_delegates.clone();
5409 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5410 let id = self.id;
5411
5412 let args = options
5413 .map(|option| match option {
5414 PushOptions::SetUpstream => " --set-upstream",
5415 PushOptions::Force => " --force-with-lease",
5416 })
5417 .unwrap_or("");
5418
5419 let updates_tx = self
5420 .git_store()
5421 .and_then(|git_store| match &git_store.read(cx).state {
5422 GitStoreState::Local { downstream, .. } => downstream
5423 .as_ref()
5424 .map(|downstream| downstream.updates_tx.clone()),
5425 _ => None,
5426 });
5427
5428 let this = cx.weak_entity();
5429 self.send_job(
5430 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5431 move |git_repo, mut cx| async move {
5432 match git_repo {
5433 RepositoryState::Local(LocalRepositoryState {
5434 backend,
5435 environment,
5436 ..
5437 }) => {
5438 let result = backend
5439 .push(
5440 branch.to_string(),
5441 remote_branch.to_string(),
5442 remote.to_string(),
5443 options,
5444 askpass,
5445 environment.clone(),
5446 cx.clone(),
5447 )
5448 .await;
5449 // TODO would be nice to not have to do this manually
5450 if result.is_ok() {
5451 let branches = backend.branches().await?;
5452 let branch = branches.into_iter().find(|branch| branch.is_head);
5453 log::info!("head branch after scan is {branch:?}");
5454 let snapshot = this.update(&mut cx, |this, cx| {
5455 this.snapshot.branch = branch;
5456 cx.emit(RepositoryEvent::BranchChanged);
5457 this.snapshot.clone()
5458 })?;
5459 if let Some(updates_tx) = updates_tx {
5460 updates_tx
5461 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5462 .ok();
5463 }
5464 }
5465 result
5466 }
5467 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5468 askpass_delegates.lock().insert(askpass_id, askpass);
5469 let _defer = util::defer(|| {
5470 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5471 debug_assert!(askpass_delegate.is_some());
5472 });
5473 let response = client
5474 .request(proto::Push {
5475 project_id: project_id.0,
5476 repository_id: id.to_proto(),
5477 askpass_id,
5478 branch_name: branch.to_string(),
5479 remote_branch_name: remote_branch.to_string(),
5480 remote_name: remote.to_string(),
5481 options: options.map(|options| match options {
5482 PushOptions::Force => proto::push::PushOptions::Force,
5483 PushOptions::SetUpstream => {
5484 proto::push::PushOptions::SetUpstream
5485 }
5486 }
5487 as i32),
5488 })
5489 .await?;
5490
5491 Ok(RemoteCommandOutput {
5492 stdout: response.stdout,
5493 stderr: response.stderr,
5494 })
5495 }
5496 }
5497 },
5498 )
5499 }
5500
5501 pub fn pull(
5502 &mut self,
5503 branch: Option<SharedString>,
5504 remote: SharedString,
5505 rebase: bool,
5506 askpass: AskPassDelegate,
5507 _cx: &mut App,
5508 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5509 let askpass_delegates = self.askpass_delegates.clone();
5510 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5511 let id = self.id;
5512
5513 let mut status = "git pull".to_string();
5514 if rebase {
5515 status.push_str(" --rebase");
5516 }
5517 status.push_str(&format!(" {}", remote));
5518 if let Some(b) = &branch {
5519 status.push_str(&format!(" {}", b));
5520 }
5521
5522 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5523 match git_repo {
5524 RepositoryState::Local(LocalRepositoryState {
5525 backend,
5526 environment,
5527 ..
5528 }) => {
5529 backend
5530 .pull(
5531 branch.as_ref().map(|b| b.to_string()),
5532 remote.to_string(),
5533 rebase,
5534 askpass,
5535 environment.clone(),
5536 cx,
5537 )
5538 .await
5539 }
5540 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5541 askpass_delegates.lock().insert(askpass_id, askpass);
5542 let _defer = util::defer(|| {
5543 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5544 debug_assert!(askpass_delegate.is_some());
5545 });
5546 let response = client
5547 .request(proto::Pull {
5548 project_id: project_id.0,
5549 repository_id: id.to_proto(),
5550 askpass_id,
5551 rebase,
5552 branch_name: branch.as_ref().map(|b| b.to_string()),
5553 remote_name: remote.to_string(),
5554 })
5555 .await?;
5556
5557 Ok(RemoteCommandOutput {
5558 stdout: response.stdout,
5559 stderr: response.stderr,
5560 })
5561 }
5562 }
5563 })
5564 }
5565
5566 fn spawn_set_index_text_job(
5567 &mut self,
5568 path: RepoPath,
5569 content: Option<String>,
5570 hunk_staging_operation_count: Option<usize>,
5571 cx: &mut Context<Self>,
5572 ) -> oneshot::Receiver<anyhow::Result<()>> {
5573 let id = self.id;
5574 let this = cx.weak_entity();
5575 let git_store = self.git_store.clone();
5576 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5577 self.send_keyed_job(
5578 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5579 None,
5580 move |git_repo, mut cx| async move {
5581 log::debug!(
5582 "start updating index text for buffer {}",
5583 path.as_unix_str()
5584 );
5585
5586 match git_repo {
5587 RepositoryState::Local(LocalRepositoryState {
5588 fs,
5589 backend,
5590 environment,
5591 ..
5592 }) => {
5593 let executable = match fs.metadata(&abs_path).await {
5594 Ok(Some(meta)) => meta.is_executable,
5595 Ok(None) => false,
5596 Err(_err) => false,
5597 };
5598 backend
5599 .set_index_text(path.clone(), content, environment.clone(), executable)
5600 .await?;
5601 }
5602 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5603 client
5604 .request(proto::SetIndexText {
5605 project_id: project_id.0,
5606 repository_id: id.to_proto(),
5607 path: path.to_proto(),
5608 text: content,
5609 })
5610 .await?;
5611 }
5612 }
5613 log::debug!(
5614 "finish updating index text for buffer {}",
5615 path.as_unix_str()
5616 );
5617
5618 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5619 let project_path = this
5620 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5621 .ok()
5622 .flatten();
5623 git_store
5624 .update(&mut cx, |git_store, cx| {
5625 let buffer_id = git_store
5626 .buffer_store
5627 .read(cx)
5628 .get_by_path(&project_path?)?
5629 .read(cx)
5630 .remote_id();
5631 let diff_state = git_store.diffs.get(&buffer_id)?;
5632 diff_state.update(cx, |diff_state, _| {
5633 diff_state.hunk_staging_operation_count_as_of_write =
5634 hunk_staging_operation_count;
5635 });
5636 Some(())
5637 })
5638 .context("Git store dropped")?;
5639 }
5640 Ok(())
5641 },
5642 )
5643 }
5644
5645 pub fn create_remote(
5646 &mut self,
5647 remote_name: String,
5648 remote_url: String,
5649 ) -> oneshot::Receiver<Result<()>> {
5650 let id = self.id;
5651 self.send_job(
5652 Some(format!("git remote add {remote_name} {remote_url}").into()),
5653 move |repo, _cx| async move {
5654 match repo {
5655 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5656 backend.create_remote(remote_name, remote_url).await
5657 }
5658 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5659 client
5660 .request(proto::GitCreateRemote {
5661 project_id: project_id.0,
5662 repository_id: id.to_proto(),
5663 remote_name,
5664 remote_url,
5665 })
5666 .await?;
5667
5668 Ok(())
5669 }
5670 }
5671 },
5672 )
5673 }
5674
5675 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5676 let id = self.id;
5677 self.send_job(
5678 Some(format!("git remove remote {remote_name}").into()),
5679 move |repo, _cx| async move {
5680 match repo {
5681 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5682 backend.remove_remote(remote_name).await
5683 }
5684 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5685 client
5686 .request(proto::GitRemoveRemote {
5687 project_id: project_id.0,
5688 repository_id: id.to_proto(),
5689 remote_name,
5690 })
5691 .await?;
5692
5693 Ok(())
5694 }
5695 }
5696 },
5697 )
5698 }
5699
5700 pub fn get_remotes(
5701 &mut self,
5702 branch_name: Option<String>,
5703 is_push: bool,
5704 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5705 let id = self.id;
5706 self.send_job(None, move |repo, _cx| async move {
5707 match repo {
5708 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5709 let remote = if let Some(branch_name) = branch_name {
5710 if is_push {
5711 backend.get_push_remote(branch_name).await?
5712 } else {
5713 backend.get_branch_remote(branch_name).await?
5714 }
5715 } else {
5716 None
5717 };
5718
5719 match remote {
5720 Some(remote) => Ok(vec![remote]),
5721 None => backend.get_all_remotes().await,
5722 }
5723 }
5724 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5725 let response = client
5726 .request(proto::GetRemotes {
5727 project_id: project_id.0,
5728 repository_id: id.to_proto(),
5729 branch_name,
5730 is_push,
5731 })
5732 .await?;
5733
5734 let remotes = response
5735 .remotes
5736 .into_iter()
5737 .map(|remotes| Remote {
5738 name: remotes.name.into(),
5739 })
5740 .collect();
5741
5742 Ok(remotes)
5743 }
5744 }
5745 })
5746 }
5747
5748 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5749 let id = self.id;
5750 self.send_job(None, move |repo, _| async move {
5751 match repo {
5752 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5753 backend.branches().await
5754 }
5755 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5756 let response = client
5757 .request(proto::GitGetBranches {
5758 project_id: project_id.0,
5759 repository_id: id.to_proto(),
5760 })
5761 .await?;
5762
5763 let branches = response
5764 .branches
5765 .into_iter()
5766 .map(|branch| proto_to_branch(&branch))
5767 .collect();
5768
5769 Ok(branches)
5770 }
5771 }
5772 })
5773 }
5774
5775 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5776 /// returns the pathed for the linked worktree.
5777 ///
5778 /// Returns None if this is the main checkout.
5779 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5780 if self.work_directory_abs_path != self.original_repo_abs_path {
5781 Some(&self.work_directory_abs_path)
5782 } else {
5783 None
5784 }
5785 }
5786
5787 pub fn path_for_new_linked_worktree(
5788 &self,
5789 branch_name: &str,
5790 worktree_directory_setting: &str,
5791 ) -> Result<PathBuf> {
5792 let original_repo = self.original_repo_abs_path.clone();
5793 let project_name = original_repo
5794 .file_name()
5795 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5796 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5797 Ok(directory.join(branch_name).join(project_name))
5798 }
5799
5800 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5801 let id = self.id;
5802 self.send_job(None, move |repo, _| async move {
5803 match repo {
5804 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5805 backend.worktrees().await
5806 }
5807 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5808 let response = client
5809 .request(proto::GitGetWorktrees {
5810 project_id: project_id.0,
5811 repository_id: id.to_proto(),
5812 })
5813 .await?;
5814
5815 let worktrees = response
5816 .worktrees
5817 .into_iter()
5818 .map(|worktree| proto_to_worktree(&worktree))
5819 .collect();
5820
5821 Ok(worktrees)
5822 }
5823 }
5824 })
5825 }
5826
5827 pub fn create_worktree(
5828 &mut self,
5829 branch_name: String,
5830 path: PathBuf,
5831 commit: Option<String>,
5832 ) -> oneshot::Receiver<Result<()>> {
5833 let id = self.id;
5834 self.send_job(
5835 Some(format!("git worktree add: {}", branch_name).into()),
5836 move |repo, _cx| async move {
5837 match repo {
5838 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5839 backend.create_worktree(branch_name, path, commit).await
5840 }
5841 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5842 client
5843 .request(proto::GitCreateWorktree {
5844 project_id: project_id.0,
5845 repository_id: id.to_proto(),
5846 name: branch_name,
5847 directory: path.to_string_lossy().to_string(),
5848 commit,
5849 })
5850 .await?;
5851
5852 Ok(())
5853 }
5854 }
5855 },
5856 )
5857 }
5858
5859 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5860 let id = self.id;
5861 self.send_job(
5862 Some(format!("git worktree remove: {}", path.display()).into()),
5863 move |repo, _cx| async move {
5864 match repo {
5865 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5866 backend.remove_worktree(path, force).await
5867 }
5868 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5869 client
5870 .request(proto::GitRemoveWorktree {
5871 project_id: project_id.0,
5872 repository_id: id.to_proto(),
5873 path: path.to_string_lossy().to_string(),
5874 force,
5875 })
5876 .await?;
5877
5878 Ok(())
5879 }
5880 }
5881 },
5882 )
5883 }
5884
5885 pub fn rename_worktree(
5886 &mut self,
5887 old_path: PathBuf,
5888 new_path: PathBuf,
5889 ) -> oneshot::Receiver<Result<()>> {
5890 let id = self.id;
5891 self.send_job(
5892 Some(format!("git worktree move: {}", old_path.display()).into()),
5893 move |repo, _cx| async move {
5894 match repo {
5895 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5896 backend.rename_worktree(old_path, new_path).await
5897 }
5898 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5899 client
5900 .request(proto::GitRenameWorktree {
5901 project_id: project_id.0,
5902 repository_id: id.to_proto(),
5903 old_path: old_path.to_string_lossy().to_string(),
5904 new_path: new_path.to_string_lossy().to_string(),
5905 })
5906 .await?;
5907
5908 Ok(())
5909 }
5910 }
5911 },
5912 )
5913 }
5914
5915 pub fn default_branch(
5916 &mut self,
5917 include_remote_name: bool,
5918 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5919 let id = self.id;
5920 self.send_job(None, move |repo, _| async move {
5921 match repo {
5922 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5923 backend.default_branch(include_remote_name).await
5924 }
5925 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5926 let response = client
5927 .request(proto::GetDefaultBranch {
5928 project_id: project_id.0,
5929 repository_id: id.to_proto(),
5930 })
5931 .await?;
5932
5933 anyhow::Ok(response.branch.map(SharedString::from))
5934 }
5935 }
5936 })
5937 }
5938
5939 pub fn diff_tree(
5940 &mut self,
5941 diff_type: DiffTreeType,
5942 _cx: &App,
5943 ) -> oneshot::Receiver<Result<TreeDiff>> {
5944 let repository_id = self.snapshot.id;
5945 self.send_job(None, move |repo, _cx| async move {
5946 match repo {
5947 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5948 backend.diff_tree(diff_type).await
5949 }
5950 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5951 let response = client
5952 .request(proto::GetTreeDiff {
5953 project_id: project_id.0,
5954 repository_id: repository_id.0,
5955 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5956 base: diff_type.base().to_string(),
5957 head: diff_type.head().to_string(),
5958 })
5959 .await?;
5960
5961 let entries = response
5962 .entries
5963 .into_iter()
5964 .filter_map(|entry| {
5965 let status = match entry.status() {
5966 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5967 proto::tree_diff_status::Status::Modified => {
5968 TreeDiffStatus::Modified {
5969 old: git::Oid::from_str(
5970 &entry.oid.context("missing oid").log_err()?,
5971 )
5972 .log_err()?,
5973 }
5974 }
5975 proto::tree_diff_status::Status::Deleted => {
5976 TreeDiffStatus::Deleted {
5977 old: git::Oid::from_str(
5978 &entry.oid.context("missing oid").log_err()?,
5979 )
5980 .log_err()?,
5981 }
5982 }
5983 };
5984 Some((
5985 RepoPath::from_rel_path(
5986 &RelPath::from_proto(&entry.path).log_err()?,
5987 ),
5988 status,
5989 ))
5990 })
5991 .collect();
5992
5993 Ok(TreeDiff { entries })
5994 }
5995 }
5996 })
5997 }
5998
5999 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6000 let id = self.id;
6001 self.send_job(None, move |repo, _cx| async move {
6002 match repo {
6003 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6004 backend.diff(diff_type).await
6005 }
6006 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6007 let (proto_diff_type, merge_base_ref) = match &diff_type {
6008 DiffType::HeadToIndex => {
6009 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6010 }
6011 DiffType::HeadToWorktree => {
6012 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6013 }
6014 DiffType::MergeBase { base_ref } => (
6015 proto::git_diff::DiffType::MergeBase.into(),
6016 Some(base_ref.to_string()),
6017 ),
6018 };
6019 let response = client
6020 .request(proto::GitDiff {
6021 project_id: project_id.0,
6022 repository_id: id.to_proto(),
6023 diff_type: proto_diff_type,
6024 merge_base_ref,
6025 })
6026 .await?;
6027
6028 Ok(response.diff)
6029 }
6030 }
6031 })
6032 }
6033
6034 pub fn create_branch(
6035 &mut self,
6036 branch_name: String,
6037 base_branch: Option<String>,
6038 ) -> oneshot::Receiver<Result<()>> {
6039 let id = self.id;
6040 let status_msg = if let Some(ref base) = base_branch {
6041 format!("git switch -c {branch_name} {base}").into()
6042 } else {
6043 format!("git switch -c {branch_name}").into()
6044 };
6045 self.send_job(Some(status_msg), move |repo, _cx| async move {
6046 match repo {
6047 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6048 backend.create_branch(branch_name, base_branch).await
6049 }
6050 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6051 client
6052 .request(proto::GitCreateBranch {
6053 project_id: project_id.0,
6054 repository_id: id.to_proto(),
6055 branch_name,
6056 })
6057 .await?;
6058
6059 Ok(())
6060 }
6061 }
6062 })
6063 }
6064
6065 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6066 let id = self.id;
6067 self.send_job(
6068 Some(format!("git switch {branch_name}").into()),
6069 move |repo, _cx| async move {
6070 match repo {
6071 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6072 backend.change_branch(branch_name).await
6073 }
6074 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6075 client
6076 .request(proto::GitChangeBranch {
6077 project_id: project_id.0,
6078 repository_id: id.to_proto(),
6079 branch_name,
6080 })
6081 .await?;
6082
6083 Ok(())
6084 }
6085 }
6086 },
6087 )
6088 }
6089
6090 pub fn delete_branch(
6091 &mut self,
6092 is_remote: bool,
6093 branch_name: String,
6094 ) -> oneshot::Receiver<Result<()>> {
6095 let id = self.id;
6096 self.send_job(
6097 Some(
6098 format!(
6099 "git branch {} {}",
6100 if is_remote { "-dr" } else { "-d" },
6101 branch_name
6102 )
6103 .into(),
6104 ),
6105 move |repo, _cx| async move {
6106 match repo {
6107 RepositoryState::Local(state) => {
6108 state.backend.delete_branch(is_remote, branch_name).await
6109 }
6110 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6111 client
6112 .request(proto::GitDeleteBranch {
6113 project_id: project_id.0,
6114 repository_id: id.to_proto(),
6115 is_remote,
6116 branch_name,
6117 })
6118 .await?;
6119
6120 Ok(())
6121 }
6122 }
6123 },
6124 )
6125 }
6126
6127 pub fn rename_branch(
6128 &mut self,
6129 branch: String,
6130 new_name: String,
6131 ) -> oneshot::Receiver<Result<()>> {
6132 let id = self.id;
6133 self.send_job(
6134 Some(format!("git branch -m {branch} {new_name}").into()),
6135 move |repo, _cx| async move {
6136 match repo {
6137 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6138 backend.rename_branch(branch, new_name).await
6139 }
6140 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6141 client
6142 .request(proto::GitRenameBranch {
6143 project_id: project_id.0,
6144 repository_id: id.to_proto(),
6145 branch,
6146 new_name,
6147 })
6148 .await?;
6149
6150 Ok(())
6151 }
6152 }
6153 },
6154 )
6155 }
6156
6157 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6158 let id = self.id;
6159 self.send_job(None, move |repo, _cx| async move {
6160 match repo {
6161 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6162 backend.check_for_pushed_commit().await
6163 }
6164 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6165 let response = client
6166 .request(proto::CheckForPushedCommits {
6167 project_id: project_id.0,
6168 repository_id: id.to_proto(),
6169 })
6170 .await?;
6171
6172 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6173
6174 Ok(branches)
6175 }
6176 }
6177 })
6178 }
6179
6180 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6181 self.send_job(None, |repo, _cx| async move {
6182 match repo {
6183 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6184 backend.checkpoint().await
6185 }
6186 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6187 }
6188 })
6189 }
6190
6191 pub fn restore_checkpoint(
6192 &mut self,
6193 checkpoint: GitRepositoryCheckpoint,
6194 ) -> oneshot::Receiver<Result<()>> {
6195 self.send_job(None, move |repo, _cx| async move {
6196 match repo {
6197 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6198 backend.restore_checkpoint(checkpoint).await
6199 }
6200 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6201 }
6202 })
6203 }
6204
6205 pub(crate) fn apply_remote_update(
6206 &mut self,
6207 update: proto::UpdateRepository,
6208 cx: &mut Context<Self>,
6209 ) -> Result<()> {
6210 if let Some(main_path) = &update.original_repo_abs_path {
6211 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6212 }
6213
6214 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6215 let new_head_commit = update
6216 .head_commit_details
6217 .as_ref()
6218 .map(proto_to_commit_details);
6219 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6220 cx.emit(RepositoryEvent::BranchChanged)
6221 }
6222 self.snapshot.branch = new_branch;
6223 self.snapshot.head_commit = new_head_commit;
6224
6225 // We don't store any merge head state for downstream projects; the upstream
6226 // will track it and we will just get the updated conflicts
6227 let new_merge_heads = TreeMap::from_ordered_entries(
6228 update
6229 .current_merge_conflicts
6230 .into_iter()
6231 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6232 );
6233 let conflicts_changed =
6234 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6235 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6236 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6237 let new_stash_entries = GitStash {
6238 entries: update
6239 .stash_entries
6240 .iter()
6241 .filter_map(|entry| proto_to_stash(entry).ok())
6242 .collect(),
6243 };
6244 if self.snapshot.stash_entries != new_stash_entries {
6245 cx.emit(RepositoryEvent::StashEntriesChanged)
6246 }
6247 self.snapshot.stash_entries = new_stash_entries;
6248 let new_linked_worktrees: Arc<[GitWorktree]> = update
6249 .linked_worktrees
6250 .iter()
6251 .map(proto_to_worktree)
6252 .collect();
6253 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6254 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6255 }
6256 self.snapshot.linked_worktrees = new_linked_worktrees;
6257 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6258 self.snapshot.remote_origin_url = update.remote_origin_url;
6259
6260 let edits = update
6261 .removed_statuses
6262 .into_iter()
6263 .filter_map(|path| {
6264 Some(sum_tree::Edit::Remove(PathKey(
6265 RelPath::from_proto(&path).log_err()?,
6266 )))
6267 })
6268 .chain(
6269 update
6270 .updated_statuses
6271 .into_iter()
6272 .filter_map(|updated_status| {
6273 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6274 }),
6275 )
6276 .collect::<Vec<_>>();
6277 if conflicts_changed || !edits.is_empty() {
6278 cx.emit(RepositoryEvent::StatusesChanged);
6279 }
6280 self.snapshot.statuses_by_path.edit(edits, ());
6281
6282 if update.is_last_update {
6283 self.snapshot.scan_id = update.scan_id;
6284 }
6285 self.clear_pending_ops(cx);
6286 Ok(())
6287 }
6288
6289 pub fn compare_checkpoints(
6290 &mut self,
6291 left: GitRepositoryCheckpoint,
6292 right: GitRepositoryCheckpoint,
6293 ) -> oneshot::Receiver<Result<bool>> {
6294 self.send_job(None, move |repo, _cx| async move {
6295 match repo {
6296 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6297 backend.compare_checkpoints(left, right).await
6298 }
6299 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6300 }
6301 })
6302 }
6303
6304 pub fn diff_checkpoints(
6305 &mut self,
6306 base_checkpoint: GitRepositoryCheckpoint,
6307 target_checkpoint: GitRepositoryCheckpoint,
6308 ) -> oneshot::Receiver<Result<String>> {
6309 self.send_job(None, move |repo, _cx| async move {
6310 match repo {
6311 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6312 backend
6313 .diff_checkpoints(base_checkpoint, target_checkpoint)
6314 .await
6315 }
6316 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6317 }
6318 })
6319 }
6320
6321 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6322 let updated = SumTree::from_iter(
6323 self.pending_ops.iter().filter_map(|ops| {
6324 let inner_ops: Vec<PendingOp> =
6325 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6326 if inner_ops.is_empty() {
6327 None
6328 } else {
6329 Some(PendingOps {
6330 repo_path: ops.repo_path.clone(),
6331 ops: inner_ops,
6332 })
6333 }
6334 }),
6335 (),
6336 );
6337
6338 if updated != self.pending_ops {
6339 cx.emit(RepositoryEvent::PendingOpsChanged {
6340 pending_ops: self.pending_ops.clone(),
6341 })
6342 }
6343
6344 self.pending_ops = updated;
6345 }
6346
6347 fn schedule_scan(
6348 &mut self,
6349 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6350 cx: &mut Context<Self>,
6351 ) {
6352 let this = cx.weak_entity();
6353 let _ = self.send_keyed_job(
6354 Some(GitJobKey::ReloadGitState),
6355 None,
6356 |state, mut cx| async move {
6357 log::debug!("run scheduled git status scan");
6358
6359 let Some(this) = this.upgrade() else {
6360 return Ok(());
6361 };
6362 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6363 bail!("not a local repository")
6364 };
6365 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6366 this.update(&mut cx, |this, cx| {
6367 this.clear_pending_ops(cx);
6368 });
6369 if let Some(updates_tx) = updates_tx {
6370 updates_tx
6371 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6372 .ok();
6373 }
6374 Ok(())
6375 },
6376 );
6377 }
6378
6379 fn spawn_local_git_worker(
6380 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6381 cx: &mut Context<Self>,
6382 ) -> mpsc::UnboundedSender<GitJob> {
6383 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6384
6385 cx.spawn(async move |_, cx| {
6386 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6387 if let Some(git_hosting_provider_registry) =
6388 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6389 {
6390 git_hosting_providers::register_additional_providers(
6391 git_hosting_provider_registry,
6392 state.backend.clone(),
6393 )
6394 .await;
6395 }
6396 let state = RepositoryState::Local(state);
6397 let mut jobs = VecDeque::new();
6398 loop {
6399 while let Ok(Some(next_job)) = job_rx.try_next() {
6400 jobs.push_back(next_job);
6401 }
6402
6403 if let Some(job) = jobs.pop_front() {
6404 if let Some(current_key) = &job.key
6405 && jobs
6406 .iter()
6407 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6408 {
6409 continue;
6410 }
6411 (job.job)(state.clone(), cx).await;
6412 } else if let Some(job) = job_rx.next().await {
6413 jobs.push_back(job);
6414 } else {
6415 break;
6416 }
6417 }
6418 anyhow::Ok(())
6419 })
6420 .detach_and_log_err(cx);
6421
6422 job_tx
6423 }
6424
6425 fn spawn_remote_git_worker(
6426 state: RemoteRepositoryState,
6427 cx: &mut Context<Self>,
6428 ) -> mpsc::UnboundedSender<GitJob> {
6429 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6430
6431 cx.spawn(async move |_, cx| {
6432 let state = RepositoryState::Remote(state);
6433 let mut jobs = VecDeque::new();
6434 loop {
6435 while let Ok(Some(next_job)) = job_rx.try_next() {
6436 jobs.push_back(next_job);
6437 }
6438
6439 if let Some(job) = jobs.pop_front() {
6440 if let Some(current_key) = &job.key
6441 && jobs
6442 .iter()
6443 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6444 {
6445 continue;
6446 }
6447 (job.job)(state.clone(), cx).await;
6448 } else if let Some(job) = job_rx.next().await {
6449 jobs.push_back(job);
6450 } else {
6451 break;
6452 }
6453 }
6454 anyhow::Ok(())
6455 })
6456 .detach_and_log_err(cx);
6457
6458 job_tx
6459 }
6460
6461 fn load_staged_text(
6462 &mut self,
6463 buffer_id: BufferId,
6464 repo_path: RepoPath,
6465 cx: &App,
6466 ) -> Task<Result<Option<String>>> {
6467 let rx = self.send_job(None, move |state, _| async move {
6468 match state {
6469 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6470 anyhow::Ok(backend.load_index_text(repo_path).await)
6471 }
6472 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6473 let response = client
6474 .request(proto::OpenUnstagedDiff {
6475 project_id: project_id.to_proto(),
6476 buffer_id: buffer_id.to_proto(),
6477 })
6478 .await?;
6479 Ok(response.staged_text)
6480 }
6481 }
6482 });
6483 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6484 }
6485
6486 fn load_committed_text(
6487 &mut self,
6488 buffer_id: BufferId,
6489 repo_path: RepoPath,
6490 cx: &App,
6491 ) -> Task<Result<DiffBasesChange>> {
6492 let rx = self.send_job(None, move |state, _| async move {
6493 match state {
6494 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6495 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6496 let staged_text = backend.load_index_text(repo_path).await;
6497 let diff_bases_change = if committed_text == staged_text {
6498 DiffBasesChange::SetBoth(committed_text)
6499 } else {
6500 DiffBasesChange::SetEach {
6501 index: staged_text,
6502 head: committed_text,
6503 }
6504 };
6505 anyhow::Ok(diff_bases_change)
6506 }
6507 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6508 use proto::open_uncommitted_diff_response::Mode;
6509
6510 let response = client
6511 .request(proto::OpenUncommittedDiff {
6512 project_id: project_id.to_proto(),
6513 buffer_id: buffer_id.to_proto(),
6514 })
6515 .await?;
6516 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6517 let bases = match mode {
6518 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6519 Mode::IndexAndHead => DiffBasesChange::SetEach {
6520 head: response.committed_text,
6521 index: response.staged_text,
6522 },
6523 };
6524 Ok(bases)
6525 }
6526 }
6527 });
6528
6529 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6530 }
6531
6532 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6533 let repository_id = self.snapshot.id;
6534 let rx = self.send_job(None, move |state, _| async move {
6535 match state {
6536 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6537 backend.load_blob_content(oid).await
6538 }
6539 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6540 let response = client
6541 .request(proto::GetBlobContent {
6542 project_id: project_id.to_proto(),
6543 repository_id: repository_id.0,
6544 oid: oid.to_string(),
6545 })
6546 .await?;
6547 Ok(response.content)
6548 }
6549 }
6550 });
6551 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6552 }
6553
6554 fn paths_changed(
6555 &mut self,
6556 paths: Vec<RepoPath>,
6557 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6558 cx: &mut Context<Self>,
6559 ) {
6560 if !paths.is_empty() {
6561 self.paths_needing_status_update.push(paths);
6562 }
6563
6564 let this = cx.weak_entity();
6565 let _ = self.send_keyed_job(
6566 Some(GitJobKey::RefreshStatuses),
6567 None,
6568 |state, mut cx| async move {
6569 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6570 (
6571 this.snapshot.clone(),
6572 mem::take(&mut this.paths_needing_status_update),
6573 )
6574 })?;
6575 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6576 bail!("not a local repository")
6577 };
6578
6579 if changed_paths.is_empty() {
6580 return Ok(());
6581 }
6582
6583 let has_head = prev_snapshot.head_commit.is_some();
6584
6585 let stash_entries = backend.stash_entries().await?;
6586 let changed_path_statuses = cx
6587 .background_spawn(async move {
6588 let mut changed_paths =
6589 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6590 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6591
6592 let status_task = backend.status(&changed_paths_vec);
6593 let diff_stat_future = if has_head {
6594 backend.diff_stat(&changed_paths_vec)
6595 } else {
6596 future::ready(Ok(status::GitDiffStat {
6597 entries: Arc::default(),
6598 }))
6599 .boxed()
6600 };
6601
6602 let (statuses, diff_stats) =
6603 futures::future::try_join(status_task, diff_stat_future).await?;
6604
6605 let diff_stats: HashMap<RepoPath, DiffStat> =
6606 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6607
6608 let mut changed_path_statuses = Vec::new();
6609 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6610 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6611
6612 for (repo_path, status) in &*statuses.entries {
6613 let current_diff_stat = diff_stats.get(repo_path).copied();
6614
6615 changed_paths.remove(repo_path);
6616 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6617 && cursor.item().is_some_and(|entry| {
6618 entry.status == *status && entry.diff_stat == current_diff_stat
6619 })
6620 {
6621 continue;
6622 }
6623
6624 changed_path_statuses.push(Edit::Insert(StatusEntry {
6625 repo_path: repo_path.clone(),
6626 status: *status,
6627 diff_stat: current_diff_stat,
6628 }));
6629 }
6630 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6631 for path in changed_paths.into_iter() {
6632 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6633 changed_path_statuses
6634 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6635 }
6636 }
6637 anyhow::Ok(changed_path_statuses)
6638 })
6639 .await?;
6640
6641 this.update(&mut cx, |this, cx| {
6642 if this.snapshot.stash_entries != stash_entries {
6643 cx.emit(RepositoryEvent::StashEntriesChanged);
6644 this.snapshot.stash_entries = stash_entries;
6645 }
6646
6647 if !changed_path_statuses.is_empty() {
6648 cx.emit(RepositoryEvent::StatusesChanged);
6649 this.snapshot
6650 .statuses_by_path
6651 .edit(changed_path_statuses, ());
6652 this.snapshot.scan_id += 1;
6653 }
6654
6655 if let Some(updates_tx) = updates_tx {
6656 updates_tx
6657 .unbounded_send(DownstreamUpdate::UpdateRepository(
6658 this.snapshot.clone(),
6659 ))
6660 .ok();
6661 }
6662 })
6663 },
6664 );
6665 }
6666
6667 /// currently running git command and when it started
6668 pub fn current_job(&self) -> Option<JobInfo> {
6669 self.active_jobs.values().next().cloned()
6670 }
6671
6672 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6673 self.send_job(None, |_, _| async {})
6674 }
6675
6676 fn spawn_job_with_tracking<AsyncFn>(
6677 &mut self,
6678 paths: Vec<RepoPath>,
6679 git_status: pending_op::GitStatus,
6680 cx: &mut Context<Self>,
6681 f: AsyncFn,
6682 ) -> Task<Result<()>>
6683 where
6684 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6685 {
6686 let ids = self.new_pending_ops_for_paths(paths, git_status);
6687
6688 cx.spawn(async move |this, cx| {
6689 let (job_status, result) = match f(this.clone(), cx).await {
6690 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6691 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6692 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6693 };
6694
6695 this.update(cx, |this, _| {
6696 let mut edits = Vec::with_capacity(ids.len());
6697 for (id, entry) in ids {
6698 if let Some(mut ops) = this
6699 .pending_ops
6700 .get(&PathKey(entry.as_ref().clone()), ())
6701 .cloned()
6702 {
6703 if let Some(op) = ops.op_by_id_mut(id) {
6704 op.job_status = job_status;
6705 }
6706 edits.push(sum_tree::Edit::Insert(ops));
6707 }
6708 }
6709 this.pending_ops.edit(edits, ());
6710 })?;
6711
6712 result
6713 })
6714 }
6715
6716 fn new_pending_ops_for_paths(
6717 &mut self,
6718 paths: Vec<RepoPath>,
6719 git_status: pending_op::GitStatus,
6720 ) -> Vec<(PendingOpId, RepoPath)> {
6721 let mut edits = Vec::with_capacity(paths.len());
6722 let mut ids = Vec::with_capacity(paths.len());
6723 for path in paths {
6724 let mut ops = self
6725 .pending_ops
6726 .get(&PathKey(path.as_ref().clone()), ())
6727 .cloned()
6728 .unwrap_or_else(|| PendingOps::new(&path));
6729 let id = ops.max_id() + 1;
6730 ops.ops.push(PendingOp {
6731 id,
6732 git_status,
6733 job_status: pending_op::JobStatus::Running,
6734 });
6735 edits.push(sum_tree::Edit::Insert(ops));
6736 ids.push((id, path));
6737 }
6738 self.pending_ops.edit(edits, ());
6739 ids
6740 }
6741 pub fn default_remote_url(&self) -> Option<String> {
6742 self.remote_upstream_url
6743 .clone()
6744 .or(self.remote_origin_url.clone())
6745 }
6746}
6747
6748/// If `path` is a git linked worktree checkout, resolves it to the main
6749/// repository's working directory path. Returns `None` if `path` is a normal
6750/// repository, not a git repo, or if resolution fails.
6751///
6752/// Resolution works by:
6753/// 1. Reading the `.git` file to get the `gitdir:` pointer
6754/// 2. Following that to the worktree-specific git directory
6755/// 3. Reading the `commondir` file to find the shared `.git` directory
6756/// 4. Deriving the main repo's working directory from the common dir
6757pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6758 let dot_git = path.join(".git");
6759 let metadata = fs.metadata(&dot_git).await.ok()??;
6760 if metadata.is_dir {
6761 return None; // Normal repo, not a linked worktree
6762 }
6763 // It's a .git file — parse the gitdir: pointer
6764 let content = fs.load(&dot_git).await.ok()?;
6765 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6766 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6767 // Read commondir to find the main .git directory
6768 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6769 let common_dir = fs
6770 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6771 .await
6772 .ok()?;
6773 Some(git::repository::original_repo_path_from_common_dir(
6774 &common_dir,
6775 ))
6776}
6777
6778/// Validates that the resolved worktree directory is acceptable:
6779/// - The setting must not be an absolute path.
6780/// - The resolved path must be either a subdirectory of the working
6781/// directory or a subdirectory of its parent (i.e., a sibling).
6782///
6783/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6784pub fn worktrees_directory_for_repo(
6785 original_repo_abs_path: &Path,
6786 worktree_directory_setting: &str,
6787) -> Result<PathBuf> {
6788 // Check the original setting before trimming, since a path like "///"
6789 // is absolute but becomes "" after stripping trailing separators.
6790 // Also check for leading `/` or `\` explicitly, because on Windows
6791 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6792 // would slip through even though it's clearly not a relative path.
6793 if Path::new(worktree_directory_setting).is_absolute()
6794 || worktree_directory_setting.starts_with('/')
6795 || worktree_directory_setting.starts_with('\\')
6796 {
6797 anyhow::bail!(
6798 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6799 );
6800 }
6801
6802 if worktree_directory_setting.is_empty() {
6803 anyhow::bail!("git.worktree_directory must not be empty");
6804 }
6805
6806 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6807 if trimmed == ".." {
6808 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6809 }
6810
6811 let joined = original_repo_abs_path.join(trimmed);
6812 let resolved = util::normalize_path(&joined);
6813 let resolved = if resolved.starts_with(original_repo_abs_path) {
6814 resolved
6815 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
6816 resolved.join(repo_dir_name)
6817 } else {
6818 resolved
6819 };
6820
6821 let parent = original_repo_abs_path
6822 .parent()
6823 .unwrap_or(original_repo_abs_path);
6824
6825 if !resolved.starts_with(parent) {
6826 anyhow::bail!(
6827 "git.worktree_directory resolved to {resolved:?}, which is outside \
6828 the project root and its parent directory. It must resolve to a \
6829 subdirectory of {original_repo_abs_path:?} or a sibling of it."
6830 );
6831 }
6832
6833 Ok(resolved)
6834}
6835
6836/// Returns a short name for a linked worktree suitable for UI display
6837///
6838/// Uses the main worktree path to come up with a short name that disambiguates
6839/// the linked worktree from the main worktree.
6840pub fn linked_worktree_short_name(
6841 main_worktree_path: &Path,
6842 linked_worktree_path: &Path,
6843) -> Option<SharedString> {
6844 if main_worktree_path == linked_worktree_path {
6845 return None;
6846 }
6847
6848 let project_name = main_worktree_path.file_name()?.to_str()?;
6849 let directory_name = linked_worktree_path.file_name()?.to_str()?;
6850 let name = if directory_name != project_name {
6851 directory_name.to_string()
6852 } else {
6853 linked_worktree_path
6854 .parent()?
6855 .file_name()?
6856 .to_str()?
6857 .to_string()
6858 };
6859 Some(name.into())
6860}
6861
6862fn get_permalink_in_rust_registry_src(
6863 provider_registry: Arc<GitHostingProviderRegistry>,
6864 path: PathBuf,
6865 selection: Range<u32>,
6866) -> Result<url::Url> {
6867 #[derive(Deserialize)]
6868 struct CargoVcsGit {
6869 sha1: String,
6870 }
6871
6872 #[derive(Deserialize)]
6873 struct CargoVcsInfo {
6874 git: CargoVcsGit,
6875 path_in_vcs: String,
6876 }
6877
6878 #[derive(Deserialize)]
6879 struct CargoPackage {
6880 repository: String,
6881 }
6882
6883 #[derive(Deserialize)]
6884 struct CargoToml {
6885 package: CargoPackage,
6886 }
6887
6888 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6889 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6890 Some((dir, json))
6891 }) else {
6892 bail!("No .cargo_vcs_info.json found in parent directories")
6893 };
6894 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6895 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6896 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6897 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6898 .context("parsing package.repository field of manifest")?;
6899 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6900 let permalink = provider.build_permalink(
6901 remote,
6902 BuildPermalinkParams::new(
6903 &cargo_vcs_info.git.sha1,
6904 &RepoPath::from_rel_path(
6905 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6906 ),
6907 Some(selection),
6908 ),
6909 );
6910 Ok(permalink)
6911}
6912
6913fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6914 let Some(blame) = blame else {
6915 return proto::BlameBufferResponse {
6916 blame_response: None,
6917 };
6918 };
6919
6920 let entries = blame
6921 .entries
6922 .into_iter()
6923 .map(|entry| proto::BlameEntry {
6924 sha: entry.sha.as_bytes().into(),
6925 start_line: entry.range.start,
6926 end_line: entry.range.end,
6927 original_line_number: entry.original_line_number,
6928 author: entry.author,
6929 author_mail: entry.author_mail,
6930 author_time: entry.author_time,
6931 author_tz: entry.author_tz,
6932 committer: entry.committer_name,
6933 committer_mail: entry.committer_email,
6934 committer_time: entry.committer_time,
6935 committer_tz: entry.committer_tz,
6936 summary: entry.summary,
6937 previous: entry.previous,
6938 filename: entry.filename,
6939 })
6940 .collect::<Vec<_>>();
6941
6942 let messages = blame
6943 .messages
6944 .into_iter()
6945 .map(|(oid, message)| proto::CommitMessage {
6946 oid: oid.as_bytes().into(),
6947 message,
6948 })
6949 .collect::<Vec<_>>();
6950
6951 proto::BlameBufferResponse {
6952 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6953 }
6954}
6955
6956fn deserialize_blame_buffer_response(
6957 response: proto::BlameBufferResponse,
6958) -> Option<git::blame::Blame> {
6959 let response = response.blame_response?;
6960 let entries = response
6961 .entries
6962 .into_iter()
6963 .filter_map(|entry| {
6964 Some(git::blame::BlameEntry {
6965 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6966 range: entry.start_line..entry.end_line,
6967 original_line_number: entry.original_line_number,
6968 committer_name: entry.committer,
6969 committer_time: entry.committer_time,
6970 committer_tz: entry.committer_tz,
6971 committer_email: entry.committer_mail,
6972 author: entry.author,
6973 author_mail: entry.author_mail,
6974 author_time: entry.author_time,
6975 author_tz: entry.author_tz,
6976 summary: entry.summary,
6977 previous: entry.previous,
6978 filename: entry.filename,
6979 })
6980 })
6981 .collect::<Vec<_>>();
6982
6983 let messages = response
6984 .messages
6985 .into_iter()
6986 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6987 .collect::<HashMap<_, _>>();
6988
6989 Some(Blame { entries, messages })
6990}
6991
6992fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6993 proto::Branch {
6994 is_head: branch.is_head,
6995 ref_name: branch.ref_name.to_string(),
6996 unix_timestamp: branch
6997 .most_recent_commit
6998 .as_ref()
6999 .map(|commit| commit.commit_timestamp as u64),
7000 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7001 ref_name: upstream.ref_name.to_string(),
7002 tracking: upstream
7003 .tracking
7004 .status()
7005 .map(|upstream| proto::UpstreamTracking {
7006 ahead: upstream.ahead as u64,
7007 behind: upstream.behind as u64,
7008 }),
7009 }),
7010 most_recent_commit: branch
7011 .most_recent_commit
7012 .as_ref()
7013 .map(|commit| proto::CommitSummary {
7014 sha: commit.sha.to_string(),
7015 subject: commit.subject.to_string(),
7016 commit_timestamp: commit.commit_timestamp,
7017 author_name: commit.author_name.to_string(),
7018 }),
7019 }
7020}
7021
7022fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7023 proto::Worktree {
7024 path: worktree.path.to_string_lossy().to_string(),
7025 ref_name: worktree
7026 .ref_name
7027 .as_ref()
7028 .map(|s| s.to_string())
7029 .unwrap_or_default(),
7030 sha: worktree.sha.to_string(),
7031 }
7032}
7033
7034fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7035 git::repository::Worktree {
7036 path: PathBuf::from(proto.path.clone()),
7037 ref_name: Some(SharedString::from(&proto.ref_name)),
7038 sha: proto.sha.clone().into(),
7039 }
7040}
7041
7042fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7043 git::repository::Branch {
7044 is_head: proto.is_head,
7045 ref_name: proto.ref_name.clone().into(),
7046 upstream: proto
7047 .upstream
7048 .as_ref()
7049 .map(|upstream| git::repository::Upstream {
7050 ref_name: upstream.ref_name.to_string().into(),
7051 tracking: upstream
7052 .tracking
7053 .as_ref()
7054 .map(|tracking| {
7055 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7056 ahead: tracking.ahead as u32,
7057 behind: tracking.behind as u32,
7058 })
7059 })
7060 .unwrap_or(git::repository::UpstreamTracking::Gone),
7061 }),
7062 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7063 git::repository::CommitSummary {
7064 sha: commit.sha.to_string().into(),
7065 subject: commit.subject.to_string().into(),
7066 commit_timestamp: commit.commit_timestamp,
7067 author_name: commit.author_name.to_string().into(),
7068 has_parent: true,
7069 }
7070 }),
7071 }
7072}
7073
7074fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7075 proto::GitCommitDetails {
7076 sha: commit.sha.to_string(),
7077 message: commit.message.to_string(),
7078 commit_timestamp: commit.commit_timestamp,
7079 author_email: commit.author_email.to_string(),
7080 author_name: commit.author_name.to_string(),
7081 }
7082}
7083
7084fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7085 CommitDetails {
7086 sha: proto.sha.clone().into(),
7087 message: proto.message.clone().into(),
7088 commit_timestamp: proto.commit_timestamp,
7089 author_email: proto.author_email.clone().into(),
7090 author_name: proto.author_name.clone().into(),
7091 }
7092}
7093
7094/// This snapshot computes the repository state on the foreground thread while
7095/// running the git commands on the background thread. We update branch, head,
7096/// remotes, and worktrees first so the UI can react sooner, then compute file
7097/// state and emit those events immediately after.
7098async fn compute_snapshot(
7099 this: Entity<Repository>,
7100 backend: Arc<dyn GitRepository>,
7101 cx: &mut AsyncApp,
7102) -> Result<RepositorySnapshot> {
7103 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7104 this.paths_needing_status_update.clear();
7105 (
7106 this.id,
7107 this.work_directory_abs_path.clone(),
7108 this.snapshot.clone(),
7109 )
7110 });
7111
7112 let head_commit_future = {
7113 let backend = backend.clone();
7114 async move {
7115 Ok(match backend.head_sha().await {
7116 Some(head_sha) => backend.show(head_sha).await.log_err(),
7117 None => None,
7118 })
7119 }
7120 };
7121 let (branches, head_commit, all_worktrees) = cx
7122 .background_spawn({
7123 let backend = backend.clone();
7124 async move {
7125 futures::future::try_join3(
7126 backend.branches(),
7127 head_commit_future,
7128 backend.worktrees(),
7129 )
7130 .await
7131 }
7132 })
7133 .await?;
7134 let branch = branches.into_iter().find(|branch| branch.is_head);
7135
7136 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7137 .into_iter()
7138 .filter(|wt| wt.path != *work_directory_abs_path)
7139 .collect();
7140
7141 let (remote_origin_url, remote_upstream_url) = cx
7142 .background_spawn({
7143 let backend = backend.clone();
7144 async move {
7145 Ok::<_, anyhow::Error>(
7146 futures::future::join(
7147 backend.remote_url("origin"),
7148 backend.remote_url("upstream"),
7149 )
7150 .await,
7151 )
7152 }
7153 })
7154 .await?;
7155
7156 let snapshot = this.update(cx, |this, cx| {
7157 let branch_changed =
7158 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7159 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7160
7161 this.snapshot = RepositorySnapshot {
7162 id,
7163 work_directory_abs_path,
7164 branch,
7165 head_commit,
7166 remote_origin_url,
7167 remote_upstream_url,
7168 linked_worktrees,
7169 scan_id: prev_snapshot.scan_id + 1,
7170 ..prev_snapshot
7171 };
7172
7173 if branch_changed {
7174 cx.emit(RepositoryEvent::BranchChanged);
7175 }
7176
7177 if worktrees_changed {
7178 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7179 }
7180
7181 this.snapshot.clone()
7182 });
7183
7184 let (statuses, diff_stats, stash_entries) = cx
7185 .background_spawn({
7186 let backend = backend.clone();
7187 let snapshot = snapshot.clone();
7188 async move {
7189 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7190 if snapshot.head_commit.is_some() {
7191 backend.diff_stat(&[])
7192 } else {
7193 future::ready(Ok(status::GitDiffStat {
7194 entries: Arc::default(),
7195 }))
7196 .boxed()
7197 };
7198 futures::future::try_join3(
7199 backend.status(&[RepoPath::from_rel_path(
7200 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7201 )]),
7202 diff_stat_future,
7203 backend.stash_entries(),
7204 )
7205 .await
7206 }
7207 })
7208 .await?;
7209
7210 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7211 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7212 let mut conflicted_paths = Vec::new();
7213 let statuses_by_path = SumTree::from_iter(
7214 statuses.entries.iter().map(|(repo_path, status)| {
7215 if status.is_conflicted() {
7216 conflicted_paths.push(repo_path.clone());
7217 }
7218 StatusEntry {
7219 repo_path: repo_path.clone(),
7220 status: *status,
7221 diff_stat: diff_stat_map.get(repo_path).copied(),
7222 }
7223 }),
7224 (),
7225 );
7226
7227 let merge_details = cx
7228 .background_spawn({
7229 let backend = backend.clone();
7230 let mut merge_details = snapshot.merge.clone();
7231 async move {
7232 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7233 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7234 }
7235 })
7236 .await?;
7237 let (merge_details, conflicts_changed) = merge_details;
7238 log::debug!("new merge details: {merge_details:?}");
7239
7240 Ok(this.update(cx, |this, cx| {
7241 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7242 cx.emit(RepositoryEvent::StatusesChanged);
7243 }
7244 if stash_entries != this.snapshot.stash_entries {
7245 cx.emit(RepositoryEvent::StashEntriesChanged);
7246 }
7247
7248 this.snapshot.scan_id += 1;
7249 this.snapshot.merge = merge_details;
7250 this.snapshot.statuses_by_path = statuses_by_path;
7251 this.snapshot.stash_entries = stash_entries;
7252
7253 this.snapshot.clone()
7254 }))
7255}
7256
7257fn status_from_proto(
7258 simple_status: i32,
7259 status: Option<proto::GitFileStatus>,
7260) -> anyhow::Result<FileStatus> {
7261 use proto::git_file_status::Variant;
7262
7263 let Some(variant) = status.and_then(|status| status.variant) else {
7264 let code = proto::GitStatus::from_i32(simple_status)
7265 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7266 let result = match code {
7267 proto::GitStatus::Added => TrackedStatus {
7268 worktree_status: StatusCode::Added,
7269 index_status: StatusCode::Unmodified,
7270 }
7271 .into(),
7272 proto::GitStatus::Modified => TrackedStatus {
7273 worktree_status: StatusCode::Modified,
7274 index_status: StatusCode::Unmodified,
7275 }
7276 .into(),
7277 proto::GitStatus::Conflict => UnmergedStatus {
7278 first_head: UnmergedStatusCode::Updated,
7279 second_head: UnmergedStatusCode::Updated,
7280 }
7281 .into(),
7282 proto::GitStatus::Deleted => TrackedStatus {
7283 worktree_status: StatusCode::Deleted,
7284 index_status: StatusCode::Unmodified,
7285 }
7286 .into(),
7287 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7288 };
7289 return Ok(result);
7290 };
7291
7292 let result = match variant {
7293 Variant::Untracked(_) => FileStatus::Untracked,
7294 Variant::Ignored(_) => FileStatus::Ignored,
7295 Variant::Unmerged(unmerged) => {
7296 let [first_head, second_head] =
7297 [unmerged.first_head, unmerged.second_head].map(|head| {
7298 let code = proto::GitStatus::from_i32(head)
7299 .with_context(|| format!("Invalid git status code: {head}"))?;
7300 let result = match code {
7301 proto::GitStatus::Added => UnmergedStatusCode::Added,
7302 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7303 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7304 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7305 };
7306 Ok(result)
7307 });
7308 let [first_head, second_head] = [first_head?, second_head?];
7309 UnmergedStatus {
7310 first_head,
7311 second_head,
7312 }
7313 .into()
7314 }
7315 Variant::Tracked(tracked) => {
7316 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7317 .map(|status| {
7318 let code = proto::GitStatus::from_i32(status)
7319 .with_context(|| format!("Invalid git status code: {status}"))?;
7320 let result = match code {
7321 proto::GitStatus::Modified => StatusCode::Modified,
7322 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7323 proto::GitStatus::Added => StatusCode::Added,
7324 proto::GitStatus::Deleted => StatusCode::Deleted,
7325 proto::GitStatus::Renamed => StatusCode::Renamed,
7326 proto::GitStatus::Copied => StatusCode::Copied,
7327 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7328 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7329 };
7330 Ok(result)
7331 });
7332 let [index_status, worktree_status] = [index_status?, worktree_status?];
7333 TrackedStatus {
7334 index_status,
7335 worktree_status,
7336 }
7337 .into()
7338 }
7339 };
7340 Ok(result)
7341}
7342
7343fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7344 use proto::git_file_status::{Tracked, Unmerged, Variant};
7345
7346 let variant = match status {
7347 FileStatus::Untracked => Variant::Untracked(Default::default()),
7348 FileStatus::Ignored => Variant::Ignored(Default::default()),
7349 FileStatus::Unmerged(UnmergedStatus {
7350 first_head,
7351 second_head,
7352 }) => Variant::Unmerged(Unmerged {
7353 first_head: unmerged_status_to_proto(first_head),
7354 second_head: unmerged_status_to_proto(second_head),
7355 }),
7356 FileStatus::Tracked(TrackedStatus {
7357 index_status,
7358 worktree_status,
7359 }) => Variant::Tracked(Tracked {
7360 index_status: tracked_status_to_proto(index_status),
7361 worktree_status: tracked_status_to_proto(worktree_status),
7362 }),
7363 };
7364 proto::GitFileStatus {
7365 variant: Some(variant),
7366 }
7367}
7368
7369fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7370 match code {
7371 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7372 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7373 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7374 }
7375}
7376
7377fn tracked_status_to_proto(code: StatusCode) -> i32 {
7378 match code {
7379 StatusCode::Added => proto::GitStatus::Added as _,
7380 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7381 StatusCode::Modified => proto::GitStatus::Modified as _,
7382 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7383 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7384 StatusCode::Copied => proto::GitStatus::Copied as _,
7385 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7386 }
7387}