1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let unstaged_diff = this
825 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
826 .await?;
827 buffer_diff.update(cx, |buffer_diff, _| {
828 buffer_diff.set_secondary_diff(unstaged_diff);
829 });
830
831 this.update(cx, |this, cx| {
832 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
833 .detach();
834
835 this.loading_diffs.remove(&(buffer_id, diff_kind));
836
837 let git_store = cx.weak_entity();
838 let diff_state = this
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842
843 diff_state.update(cx, |state, _| {
844 if let Some(oid) = oid {
845 if let Some(content) = content {
846 state.oid_texts.insert(oid, content);
847 }
848 }
849 state.oid_diffs.insert(oid, buffer_diff.downgrade());
850 });
851 })?;
852
853 Ok(buffer_diff)
854 }
855 .await;
856 result.map_err(Arc::new)
857 })
858 .shared();
859
860 self.loading_diffs
861 .insert((buffer_id, diff_kind), task.clone());
862 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
863 }
864
865 #[ztracing::instrument(skip_all)]
866 pub fn open_uncommitted_diff(
867 &mut self,
868 buffer: Entity<Buffer>,
869 cx: &mut Context<Self>,
870 ) -> Task<Result<Entity<BufferDiff>>> {
871 let buffer_id = buffer.read(cx).remote_id();
872
873 if let Some(diff_state) = self.diffs.get(&buffer_id)
874 && let Some(uncommitted_diff) = diff_state
875 .read(cx)
876 .uncommitted_diff
877 .as_ref()
878 .and_then(|weak| weak.upgrade())
879 {
880 if let Some(task) =
881 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
882 {
883 return cx.background_executor().spawn(async move {
884 task.await;
885 Ok(uncommitted_diff)
886 });
887 }
888 return Task::ready(Ok(uncommitted_diff));
889 }
890
891 let Some((repo, repo_path)) =
892 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
893 else {
894 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
895 };
896
897 let task = self
898 .loading_diffs
899 .entry((buffer_id, DiffKind::Uncommitted))
900 .or_insert_with(|| {
901 let changes = repo.update(cx, |repo, cx| {
902 repo.load_committed_text(buffer_id, repo_path, cx)
903 });
904
905 // todo(lw): hot foreground spawn
906 cx.spawn(async move |this, cx| {
907 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
908 .await
909 .map_err(Arc::new)
910 })
911 .shared()
912 })
913 .clone();
914
915 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
916 }
917
918 #[ztracing::instrument(skip_all)]
919 async fn open_diff_internal(
920 this: WeakEntity<Self>,
921 kind: DiffKind,
922 texts: Result<DiffBasesChange>,
923 buffer_entity: Entity<Buffer>,
924 cx: &mut AsyncApp,
925 ) -> Result<Entity<BufferDiff>> {
926 let diff_bases_change = match texts {
927 Err(e) => {
928 this.update(cx, |this, cx| {
929 let buffer = buffer_entity.read(cx);
930 let buffer_id = buffer.remote_id();
931 this.loading_diffs.remove(&(buffer_id, kind));
932 })?;
933 return Err(e);
934 }
935 Ok(change) => change,
936 };
937
938 this.update(cx, |this, cx| {
939 let buffer = buffer_entity.read(cx);
940 let buffer_id = buffer.remote_id();
941 let language = buffer.language().cloned();
942 let language_registry = buffer.language_registry();
943 let text_snapshot = buffer.text_snapshot();
944 this.loading_diffs.remove(&(buffer_id, kind));
945
946 let git_store = cx.weak_entity();
947 let diff_state = this
948 .diffs
949 .entry(buffer_id)
950 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
951
952 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953
954 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
955 diff_state.update(cx, |diff_state, cx| {
956 diff_state.language_changed = true;
957 diff_state.language = language;
958 diff_state.language_registry = language_registry;
959
960 match kind {
961 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
962 DiffKind::Uncommitted => {
963 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
964 diff
965 } else {
966 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
967 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
968 unstaged_diff
969 };
970
971 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
972 diff_state.uncommitted_diff = Some(diff.downgrade())
973 }
974 DiffKind::SinceOid(_) => {
975 unreachable!("open_diff_internal is not used for OID diffs")
976 }
977 }
978
979 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
980 let rx = diff_state.wait_for_recalculation();
981
982 anyhow::Ok(async move {
983 if let Some(rx) = rx {
984 rx.await;
985 }
986 Ok(diff)
987 })
988 })
989 })??
990 .await
991 }
992
993 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
994 let diff_state = self.diffs.get(&buffer_id)?;
995 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
996 }
997
998 pub fn get_uncommitted_diff(
999 &self,
1000 buffer_id: BufferId,
1001 cx: &App,
1002 ) -> Option<Entity<BufferDiff>> {
1003 let diff_state = self.diffs.get(&buffer_id)?;
1004 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1005 }
1006
1007 pub fn get_diff_since_oid(
1008 &self,
1009 buffer_id: BufferId,
1010 oid: Option<git::Oid>,
1011 cx: &App,
1012 ) -> Option<Entity<BufferDiff>> {
1013 let diff_state = self.diffs.get(&buffer_id)?;
1014 diff_state.read(cx).oid_diff(oid)
1015 }
1016
1017 pub fn open_conflict_set(
1018 &mut self,
1019 buffer: Entity<Buffer>,
1020 cx: &mut Context<Self>,
1021 ) -> Entity<ConflictSet> {
1022 log::debug!("open conflict set");
1023 let buffer_id = buffer.read(cx).remote_id();
1024
1025 if let Some(git_state) = self.diffs.get(&buffer_id)
1026 && let Some(conflict_set) = git_state
1027 .read(cx)
1028 .conflict_set
1029 .as_ref()
1030 .and_then(|weak| weak.upgrade())
1031 {
1032 let conflict_set = conflict_set;
1033 let buffer_snapshot = buffer.read(cx).text_snapshot();
1034
1035 git_state.update(cx, |state, cx| {
1036 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1037 });
1038
1039 return conflict_set;
1040 }
1041
1042 let is_unmerged = self
1043 .repository_and_path_for_buffer_id(buffer_id, cx)
1044 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1045 let git_store = cx.weak_entity();
1046 let buffer_git_state = self
1047 .diffs
1048 .entry(buffer_id)
1049 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1050 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1051
1052 self._subscriptions
1053 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1054 cx.emit(GitStoreEvent::ConflictsUpdated);
1055 }));
1056
1057 buffer_git_state.update(cx, |state, cx| {
1058 state.conflict_set = Some(conflict_set.downgrade());
1059 let buffer_snapshot = buffer.read(cx).text_snapshot();
1060 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1061 });
1062
1063 conflict_set
1064 }
1065
1066 pub fn project_path_git_status(
1067 &self,
1068 project_path: &ProjectPath,
1069 cx: &App,
1070 ) -> Option<FileStatus> {
1071 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1072 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1073 }
1074
1075 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1076 let mut work_directory_abs_paths = Vec::new();
1077 let mut checkpoints = Vec::new();
1078 for repository in self.repositories.values() {
1079 repository.update(cx, |repository, _| {
1080 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1081 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1082 });
1083 }
1084
1085 cx.background_executor().spawn(async move {
1086 let checkpoints = future::try_join_all(checkpoints).await?;
1087 Ok(GitStoreCheckpoint {
1088 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1089 .into_iter()
1090 .zip(checkpoints)
1091 .collect(),
1092 })
1093 })
1094 }
1095
1096 pub fn restore_checkpoint(
1097 &self,
1098 checkpoint: GitStoreCheckpoint,
1099 cx: &mut App,
1100 ) -> Task<Result<()>> {
1101 let repositories_by_work_dir_abs_path = self
1102 .repositories
1103 .values()
1104 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1105 .collect::<HashMap<_, _>>();
1106
1107 let mut tasks = Vec::new();
1108 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1109 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1110 let restore = repository.update(cx, |repository, _| {
1111 repository.restore_checkpoint(checkpoint)
1112 });
1113 tasks.push(async move { restore.await? });
1114 }
1115 }
1116 cx.background_spawn(async move {
1117 future::try_join_all(tasks).await?;
1118 Ok(())
1119 })
1120 }
1121
1122 /// Compares two checkpoints, returning true if they are equal.
1123 pub fn compare_checkpoints(
1124 &self,
1125 left: GitStoreCheckpoint,
1126 mut right: GitStoreCheckpoint,
1127 cx: &mut App,
1128 ) -> Task<Result<bool>> {
1129 let repositories_by_work_dir_abs_path = self
1130 .repositories
1131 .values()
1132 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1133 .collect::<HashMap<_, _>>();
1134
1135 let mut tasks = Vec::new();
1136 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1137 if let Some(right_checkpoint) = right
1138 .checkpoints_by_work_dir_abs_path
1139 .remove(&work_dir_abs_path)
1140 {
1141 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1142 {
1143 let compare = repository.update(cx, |repository, _| {
1144 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1145 });
1146
1147 tasks.push(async move { compare.await? });
1148 }
1149 } else {
1150 return Task::ready(Ok(false));
1151 }
1152 }
1153 cx.background_spawn(async move {
1154 Ok(future::try_join_all(tasks)
1155 .await?
1156 .into_iter()
1157 .all(|result| result))
1158 })
1159 }
1160
1161 /// Blames a buffer.
1162 pub fn blame_buffer(
1163 &self,
1164 buffer: &Entity<Buffer>,
1165 version: Option<clock::Global>,
1166 cx: &mut Context<Self>,
1167 ) -> Task<Result<Option<Blame>>> {
1168 let buffer = buffer.read(cx);
1169 let Some((repo, repo_path)) =
1170 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1171 else {
1172 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1173 };
1174 let content = match &version {
1175 Some(version) => buffer.rope_for_version(version),
1176 None => buffer.as_rope().clone(),
1177 };
1178 let line_ending = buffer.line_ending();
1179 let version = version.unwrap_or(buffer.version());
1180 let buffer_id = buffer.remote_id();
1181
1182 let repo = repo.downgrade();
1183 cx.spawn(async move |_, cx| {
1184 let repository_state = repo
1185 .update(cx, |repo, _| repo.repository_state.clone())?
1186 .await
1187 .map_err(|err| anyhow::anyhow!(err))?;
1188 match repository_state {
1189 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1190 .blame(repo_path.clone(), content, line_ending)
1191 .await
1192 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1193 .map(Some),
1194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1195 let response = client
1196 .request(proto::BlameBuffer {
1197 project_id: project_id.to_proto(),
1198 buffer_id: buffer_id.into(),
1199 version: serialize_version(&version),
1200 })
1201 .await?;
1202 Ok(deserialize_blame_buffer_response(response))
1203 }
1204 }
1205 })
1206 }
1207
1208 pub fn file_history(
1209 &self,
1210 repo: &Entity<Repository>,
1211 path: RepoPath,
1212 cx: &mut App,
1213 ) -> Task<Result<git::repository::FileHistory>> {
1214 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1215
1216 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1217 }
1218
1219 pub fn file_history_paginated(
1220 &self,
1221 repo: &Entity<Repository>,
1222 path: RepoPath,
1223 skip: usize,
1224 limit: Option<usize>,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn get_permalink_to_line(
1233 &self,
1234 buffer: &Entity<Buffer>,
1235 selection: Range<u32>,
1236 cx: &mut App,
1237 ) -> Task<Result<url::Url>> {
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 return Task::ready(Err(anyhow!("buffer has no file")));
1240 };
1241
1242 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1243 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1244 cx,
1245 ) else {
1246 // If we're not in a Git repo, check whether this is a Rust source
1247 // file in the Cargo registry (presumably opened with go-to-definition
1248 // from a normal Rust file). If so, we can put together a permalink
1249 // using crate metadata.
1250 if buffer
1251 .read(cx)
1252 .language()
1253 .is_none_or(|lang| lang.name() != "Rust")
1254 {
1255 return Task::ready(Err(anyhow!("no permalink available")));
1256 }
1257 let file_path = file.worktree.read(cx).absolutize(&file.path);
1258 return cx.spawn(async move |cx| {
1259 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1260 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1261 .context("no permalink available")
1262 });
1263 };
1264
1265 let buffer_id = buffer.read(cx).remote_id();
1266 let branch = repo.read(cx).branch.clone();
1267 let remote = branch
1268 .as_ref()
1269 .and_then(|b| b.upstream.as_ref())
1270 .and_then(|b| b.remote_name())
1271 .unwrap_or("origin")
1272 .to_string();
1273
1274 let rx = repo.update(cx, |repo, _| {
1275 repo.send_job(None, move |state, cx| async move {
1276 match state {
1277 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1278 let origin_url = backend
1279 .remote_url(&remote)
1280 .await
1281 .with_context(|| format!("remote \"{remote}\" not found"))?;
1282
1283 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1284
1285 let provider_registry =
1286 cx.update(GitHostingProviderRegistry::default_global);
1287
1288 let (provider, remote) =
1289 parse_git_remote_url(provider_registry, &origin_url)
1290 .context("parsing Git remote URL")?;
1291
1292 Ok(provider.build_permalink(
1293 remote,
1294 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1295 ))
1296 }
1297 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1298 let response = client
1299 .request(proto::GetPermalinkToLine {
1300 project_id: project_id.to_proto(),
1301 buffer_id: buffer_id.into(),
1302 selection: Some(proto::Range {
1303 start: selection.start as u64,
1304 end: selection.end as u64,
1305 }),
1306 })
1307 .await?;
1308
1309 url::Url::parse(&response.permalink).context("failed to parse permalink")
1310 }
1311 }
1312 })
1313 });
1314 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1315 }
1316
1317 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1318 match &self.state {
1319 GitStoreState::Local {
1320 downstream: downstream_client,
1321 ..
1322 } => downstream_client
1323 .as_ref()
1324 .map(|state| (state.client.clone(), state.project_id)),
1325 GitStoreState::Remote {
1326 downstream: downstream_client,
1327 ..
1328 } => downstream_client.clone(),
1329 }
1330 }
1331
1332 fn upstream_client(&self) -> Option<AnyProtoClient> {
1333 match &self.state {
1334 GitStoreState::Local { .. } => None,
1335 GitStoreState::Remote {
1336 upstream_client, ..
1337 } => Some(upstream_client.clone()),
1338 }
1339 }
1340
1341 fn on_worktree_store_event(
1342 &mut self,
1343 worktree_store: Entity<WorktreeStore>,
1344 event: &WorktreeStoreEvent,
1345 cx: &mut Context<Self>,
1346 ) {
1347 let GitStoreState::Local {
1348 project_environment,
1349 downstream,
1350 next_repository_id,
1351 fs,
1352 } = &self.state
1353 else {
1354 return;
1355 };
1356
1357 match event {
1358 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1359 if let Some(worktree) = self
1360 .worktree_store
1361 .read(cx)
1362 .worktree_for_id(*worktree_id, cx)
1363 {
1364 let paths_by_git_repo =
1365 self.process_updated_entries(&worktree, updated_entries, cx);
1366 let downstream = downstream
1367 .as_ref()
1368 .map(|downstream| downstream.updates_tx.clone());
1369 cx.spawn(async move |_, cx| {
1370 let paths_by_git_repo = paths_by_git_repo.await;
1371 for (repo, paths) in paths_by_git_repo {
1372 repo.update(cx, |repo, cx| {
1373 repo.paths_changed(paths, downstream.clone(), cx);
1374 });
1375 }
1376 })
1377 .detach();
1378 }
1379 }
1380 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1381 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1382 else {
1383 return;
1384 };
1385 if !worktree.read(cx).is_visible() {
1386 log::debug!(
1387 "not adding repositories for local worktree {:?} because it's not visible",
1388 worktree.read(cx).abs_path()
1389 );
1390 return;
1391 }
1392 self.update_repositories_from_worktree(
1393 *worktree_id,
1394 project_environment.clone(),
1395 next_repository_id.clone(),
1396 downstream
1397 .as_ref()
1398 .map(|downstream| downstream.updates_tx.clone()),
1399 changed_repos.clone(),
1400 fs.clone(),
1401 cx,
1402 );
1403 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1404 }
1405 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1406 let repos_without_worktree: Vec<RepositoryId> = self
1407 .worktree_ids
1408 .iter_mut()
1409 .filter_map(|(repo_id, worktree_ids)| {
1410 worktree_ids.remove(worktree_id);
1411 if worktree_ids.is_empty() {
1412 Some(*repo_id)
1413 } else {
1414 None
1415 }
1416 })
1417 .collect();
1418 let is_active_repo_removed = repos_without_worktree
1419 .iter()
1420 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1421
1422 for repo_id in repos_without_worktree {
1423 self.repositories.remove(&repo_id);
1424 self.worktree_ids.remove(&repo_id);
1425 if let Some(updates_tx) =
1426 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1427 {
1428 updates_tx
1429 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1430 .ok();
1431 }
1432 }
1433
1434 if is_active_repo_removed {
1435 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1436 self.active_repo_id = Some(repo_id);
1437 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1438 } else {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 }
1443 }
1444 _ => {}
1445 }
1446 }
1447 fn on_repository_event(
1448 &mut self,
1449 repo: Entity<Repository>,
1450 event: &RepositoryEvent,
1451 cx: &mut Context<Self>,
1452 ) {
1453 let id = repo.read(cx).id;
1454 let repo_snapshot = repo.read(cx).snapshot.clone();
1455 for (buffer_id, diff) in self.diffs.iter() {
1456 if let Some((buffer_repo, repo_path)) =
1457 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1458 && buffer_repo == repo
1459 {
1460 diff.update(cx, |diff, cx| {
1461 if let Some(conflict_set) = &diff.conflict_set {
1462 let conflict_status_changed =
1463 conflict_set.update(cx, |conflict_set, cx| {
1464 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1465 conflict_set.set_has_conflict(has_conflict, cx)
1466 })?;
1467 if conflict_status_changed {
1468 let buffer_store = self.buffer_store.read(cx);
1469 if let Some(buffer) = buffer_store.get(*buffer_id) {
1470 let _ = diff
1471 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1472 }
1473 }
1474 }
1475 anyhow::Ok(())
1476 })
1477 .ok();
1478 }
1479 }
1480 cx.emit(GitStoreEvent::RepositoryUpdated(
1481 id,
1482 event.clone(),
1483 self.active_repo_id == Some(id),
1484 ))
1485 }
1486
1487 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1488 cx.emit(GitStoreEvent::JobsUpdated)
1489 }
1490
1491 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1492 fn update_repositories_from_worktree(
1493 &mut self,
1494 worktree_id: WorktreeId,
1495 project_environment: Entity<ProjectEnvironment>,
1496 next_repository_id: Arc<AtomicU64>,
1497 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1498 updated_git_repositories: UpdatedGitRepositoriesSet,
1499 fs: Arc<dyn Fs>,
1500 cx: &mut Context<Self>,
1501 ) {
1502 let mut removed_ids = Vec::new();
1503 for update in updated_git_repositories.iter() {
1504 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1505 let existing_work_directory_abs_path =
1506 repo.read(cx).work_directory_abs_path.clone();
1507 Some(&existing_work_directory_abs_path)
1508 == update.old_work_directory_abs_path.as_ref()
1509 || Some(&existing_work_directory_abs_path)
1510 == update.new_work_directory_abs_path.as_ref()
1511 }) {
1512 let repo_id = *id;
1513 if let Some(new_work_directory_abs_path) =
1514 update.new_work_directory_abs_path.clone()
1515 {
1516 self.worktree_ids
1517 .entry(repo_id)
1518 .or_insert_with(HashSet::new)
1519 .insert(worktree_id);
1520 existing.update(cx, |existing, cx| {
1521 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1522 existing.schedule_scan(updates_tx.clone(), cx);
1523 });
1524 } else {
1525 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1526 worktree_ids.remove(&worktree_id);
1527 if worktree_ids.is_empty() {
1528 removed_ids.push(repo_id);
1529 }
1530 }
1531 }
1532 } else if let UpdatedGitRepository {
1533 new_work_directory_abs_path: Some(work_directory_abs_path),
1534 dot_git_abs_path: Some(dot_git_abs_path),
1535 repository_dir_abs_path: Some(repository_dir_abs_path),
1536 common_dir_abs_path: Some(common_dir_abs_path),
1537 ..
1538 } = update
1539 {
1540 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1541 work_directory_abs_path,
1542 common_dir_abs_path,
1543 repository_dir_abs_path,
1544 )
1545 .into();
1546 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1547 let is_trusted = TrustedWorktrees::try_get_global(cx)
1548 .map(|trusted_worktrees| {
1549 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1550 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1551 })
1552 })
1553 .unwrap_or(false);
1554 let git_store = cx.weak_entity();
1555 let repo = cx.new(|cx| {
1556 let mut repo = Repository::local(
1557 id,
1558 work_directory_abs_path.clone(),
1559 original_repo_abs_path.clone(),
1560 dot_git_abs_path.clone(),
1561 project_environment.downgrade(),
1562 fs.clone(),
1563 is_trusted,
1564 git_store,
1565 cx,
1566 );
1567 if let Some(updates_tx) = updates_tx.as_ref() {
1568 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1569 updates_tx
1570 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1571 .ok();
1572 }
1573 repo.schedule_scan(updates_tx.clone(), cx);
1574 repo
1575 });
1576 self._subscriptions
1577 .push(cx.subscribe(&repo, Self::on_repository_event));
1578 self._subscriptions
1579 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1580 self.repositories.insert(id, repo);
1581 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1582 cx.emit(GitStoreEvent::RepositoryAdded);
1583 self.active_repo_id.get_or_insert_with(|| {
1584 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1585 id
1586 });
1587 }
1588 }
1589
1590 for id in removed_ids {
1591 if self.active_repo_id == Some(id) {
1592 self.active_repo_id = None;
1593 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1594 }
1595 self.repositories.remove(&id);
1596 if let Some(updates_tx) = updates_tx.as_ref() {
1597 updates_tx
1598 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1599 .ok();
1600 }
1601 }
1602 }
1603
1604 fn on_trusted_worktrees_event(
1605 &mut self,
1606 _: Entity<TrustedWorktreesStore>,
1607 event: &TrustedWorktreesEvent,
1608 cx: &mut Context<Self>,
1609 ) {
1610 if !matches!(self.state, GitStoreState::Local { .. }) {
1611 return;
1612 }
1613
1614 let (is_trusted, event_paths) = match event {
1615 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1616 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1617 };
1618
1619 for (repo_id, worktree_ids) in &self.worktree_ids {
1620 if worktree_ids
1621 .iter()
1622 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1623 {
1624 if let Some(repo) = self.repositories.get(repo_id) {
1625 let repository_state = repo.read(cx).repository_state.clone();
1626 cx.background_spawn(async move {
1627 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1628 state.backend.set_trusted(is_trusted);
1629 }
1630 })
1631 .detach();
1632 }
1633 }
1634 }
1635 }
1636
1637 fn on_buffer_store_event(
1638 &mut self,
1639 _: Entity<BufferStore>,
1640 event: &BufferStoreEvent,
1641 cx: &mut Context<Self>,
1642 ) {
1643 match event {
1644 BufferStoreEvent::BufferAdded(buffer) => {
1645 cx.subscribe(buffer, |this, buffer, event, cx| {
1646 if let BufferEvent::LanguageChanged(_) = event {
1647 let buffer_id = buffer.read(cx).remote_id();
1648 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1649 diff_state.update(cx, |diff_state, cx| {
1650 diff_state.buffer_language_changed(buffer, cx);
1651 });
1652 }
1653 }
1654 })
1655 .detach();
1656 }
1657 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1658 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1659 diffs.remove(buffer_id);
1660 }
1661 }
1662 BufferStoreEvent::BufferDropped(buffer_id) => {
1663 self.diffs.remove(buffer_id);
1664 for diffs in self.shared_diffs.values_mut() {
1665 diffs.remove(buffer_id);
1666 }
1667 }
1668 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1669 // Whenever a buffer's file path changes, it's possible that the
1670 // new path is actually a path that is being tracked by a git
1671 // repository. In that case, we'll want to update the buffer's
1672 // `BufferDiffState`, in case it already has one.
1673 let buffer_id = buffer.read(cx).remote_id();
1674 let diff_state = self.diffs.get(&buffer_id);
1675 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1676
1677 if let Some(diff_state) = diff_state
1678 && let Some((repo, repo_path)) = repo
1679 {
1680 let buffer = buffer.clone();
1681 let diff_state = diff_state.clone();
1682
1683 cx.spawn(async move |_git_store, cx| {
1684 async {
1685 let diff_bases_change = repo
1686 .update(cx, |repo, cx| {
1687 repo.load_committed_text(buffer_id, repo_path, cx)
1688 })
1689 .await?;
1690
1691 diff_state.update(cx, |diff_state, cx| {
1692 let buffer_snapshot = buffer.read(cx).text_snapshot();
1693 diff_state.diff_bases_changed(
1694 buffer_snapshot,
1695 Some(diff_bases_change),
1696 cx,
1697 );
1698 });
1699 anyhow::Ok(())
1700 }
1701 .await
1702 .log_err();
1703 })
1704 .detach();
1705 }
1706 }
1707 }
1708 }
1709
1710 pub fn recalculate_buffer_diffs(
1711 &mut self,
1712 buffers: Vec<Entity<Buffer>>,
1713 cx: &mut Context<Self>,
1714 ) -> impl Future<Output = ()> + use<> {
1715 let mut futures = Vec::new();
1716 for buffer in buffers {
1717 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1718 let buffer = buffer.read(cx).text_snapshot();
1719 diff_state.update(cx, |diff_state, cx| {
1720 diff_state.recalculate_diffs(buffer.clone(), cx);
1721 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1722 });
1723 futures.push(diff_state.update(cx, |diff_state, cx| {
1724 diff_state
1725 .reparse_conflict_markers(buffer, cx)
1726 .map(|_| {})
1727 .boxed()
1728 }));
1729 }
1730 }
1731 async move {
1732 futures::future::join_all(futures).await;
1733 }
1734 }
1735
1736 fn on_buffer_diff_event(
1737 &mut self,
1738 diff: Entity<buffer_diff::BufferDiff>,
1739 event: &BufferDiffEvent,
1740 cx: &mut Context<Self>,
1741 ) {
1742 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1743 let buffer_id = diff.read(cx).buffer_id;
1744 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1745 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1746 diff_state.hunk_staging_operation_count += 1;
1747 diff_state.hunk_staging_operation_count
1748 });
1749 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1750 let recv = repo.update(cx, |repo, cx| {
1751 log::debug!("hunks changed for {}", path.as_unix_str());
1752 repo.spawn_set_index_text_job(
1753 path,
1754 new_index_text.as_ref().map(|rope| rope.to_string()),
1755 Some(hunk_staging_operation_count),
1756 cx,
1757 )
1758 });
1759 let diff = diff.downgrade();
1760 cx.spawn(async move |this, cx| {
1761 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1762 diff.update(cx, |diff, cx| {
1763 diff.clear_pending_hunks(cx);
1764 })
1765 .ok();
1766 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1767 .ok();
1768 }
1769 })
1770 .detach();
1771 }
1772 }
1773 }
1774 }
1775
1776 fn local_worktree_git_repos_changed(
1777 &mut self,
1778 worktree: Entity<Worktree>,
1779 changed_repos: &UpdatedGitRepositoriesSet,
1780 cx: &mut Context<Self>,
1781 ) {
1782 log::debug!("local worktree repos changed");
1783 debug_assert!(worktree.read(cx).is_local());
1784
1785 for repository in self.repositories.values() {
1786 repository.update(cx, |repository, cx| {
1787 let repo_abs_path = &repository.work_directory_abs_path;
1788 if changed_repos.iter().any(|update| {
1789 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1790 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1791 }) {
1792 repository.reload_buffer_diff_bases(cx);
1793 }
1794 });
1795 }
1796 }
1797
1798 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1799 &self.repositories
1800 }
1801
1802 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1803 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1804 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1805 Some(status.status)
1806 }
1807
1808 pub fn repository_and_path_for_buffer_id(
1809 &self,
1810 buffer_id: BufferId,
1811 cx: &App,
1812 ) -> Option<(Entity<Repository>, RepoPath)> {
1813 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1814 let project_path = buffer.read(cx).project_path(cx)?;
1815 self.repository_and_path_for_project_path(&project_path, cx)
1816 }
1817
1818 pub fn repository_and_path_for_project_path(
1819 &self,
1820 path: &ProjectPath,
1821 cx: &App,
1822 ) -> Option<(Entity<Repository>, RepoPath)> {
1823 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1824 self.repositories
1825 .values()
1826 .filter_map(|repo| {
1827 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1828 Some((repo.clone(), repo_path))
1829 })
1830 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1831 }
1832
1833 pub fn git_init(
1834 &self,
1835 path: Arc<Path>,
1836 fallback_branch_name: String,
1837 cx: &App,
1838 ) -> Task<Result<()>> {
1839 match &self.state {
1840 GitStoreState::Local { fs, .. } => {
1841 let fs = fs.clone();
1842 cx.background_executor()
1843 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1844 }
1845 GitStoreState::Remote {
1846 upstream_client,
1847 upstream_project_id: project_id,
1848 ..
1849 } => {
1850 let client = upstream_client.clone();
1851 let project_id = *project_id;
1852 cx.background_executor().spawn(async move {
1853 client
1854 .request(proto::GitInit {
1855 project_id: project_id,
1856 abs_path: path.to_string_lossy().into_owned(),
1857 fallback_branch_name,
1858 })
1859 .await?;
1860 Ok(())
1861 })
1862 }
1863 }
1864 }
1865
1866 pub fn git_clone(
1867 &self,
1868 repo: String,
1869 path: impl Into<Arc<std::path::Path>>,
1870 cx: &App,
1871 ) -> Task<Result<()>> {
1872 let path = path.into();
1873 match &self.state {
1874 GitStoreState::Local { fs, .. } => {
1875 let fs = fs.clone();
1876 cx.background_executor()
1877 .spawn(async move { fs.git_clone(&repo, &path).await })
1878 }
1879 GitStoreState::Remote {
1880 upstream_client,
1881 upstream_project_id,
1882 ..
1883 } => {
1884 if upstream_client.is_via_collab() {
1885 return Task::ready(Err(anyhow!(
1886 "Git Clone isn't supported for project guests"
1887 )));
1888 }
1889 let request = upstream_client.request(proto::GitClone {
1890 project_id: *upstream_project_id,
1891 abs_path: path.to_string_lossy().into_owned(),
1892 remote_repo: repo,
1893 });
1894
1895 cx.background_spawn(async move {
1896 let result = request.await?;
1897
1898 match result.success {
1899 true => Ok(()),
1900 false => Err(anyhow!("Git Clone failed")),
1901 }
1902 })
1903 }
1904 }
1905 }
1906
1907 async fn handle_update_repository(
1908 this: Entity<Self>,
1909 envelope: TypedEnvelope<proto::UpdateRepository>,
1910 mut cx: AsyncApp,
1911 ) -> Result<()> {
1912 this.update(&mut cx, |this, cx| {
1913 let path_style = this.worktree_store.read(cx).path_style();
1914 let mut update = envelope.payload;
1915
1916 let id = RepositoryId::from_proto(update.id);
1917 let client = this.upstream_client().context("no upstream client")?;
1918
1919 let original_repo_abs_path: Option<Arc<Path>> = update
1920 .original_repo_abs_path
1921 .as_deref()
1922 .map(|p| Path::new(p).into());
1923
1924 let mut repo_subscription = None;
1925 let repo = this.repositories.entry(id).or_insert_with(|| {
1926 let git_store = cx.weak_entity();
1927 let repo = cx.new(|cx| {
1928 Repository::remote(
1929 id,
1930 Path::new(&update.abs_path).into(),
1931 original_repo_abs_path.clone(),
1932 path_style,
1933 ProjectId(update.project_id),
1934 client,
1935 git_store,
1936 cx,
1937 )
1938 });
1939 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1940 cx.emit(GitStoreEvent::RepositoryAdded);
1941 repo
1942 });
1943 this._subscriptions.extend(repo_subscription);
1944
1945 repo.update(cx, {
1946 let update = update.clone();
1947 |repo, cx| repo.apply_remote_update(update, cx)
1948 })?;
1949
1950 this.active_repo_id.get_or_insert_with(|| {
1951 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1952 id
1953 });
1954
1955 if let Some((client, project_id)) = this.downstream_client() {
1956 update.project_id = project_id.to_proto();
1957 client.send(update).log_err();
1958 }
1959 Ok(())
1960 })
1961 }
1962
1963 async fn handle_remove_repository(
1964 this: Entity<Self>,
1965 envelope: TypedEnvelope<proto::RemoveRepository>,
1966 mut cx: AsyncApp,
1967 ) -> Result<()> {
1968 this.update(&mut cx, |this, cx| {
1969 let mut update = envelope.payload;
1970 let id = RepositoryId::from_proto(update.id);
1971 this.repositories.remove(&id);
1972 if let Some((client, project_id)) = this.downstream_client() {
1973 update.project_id = project_id.to_proto();
1974 client.send(update).log_err();
1975 }
1976 if this.active_repo_id == Some(id) {
1977 this.active_repo_id = None;
1978 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1979 }
1980 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1981 });
1982 Ok(())
1983 }
1984
1985 async fn handle_git_init(
1986 this: Entity<Self>,
1987 envelope: TypedEnvelope<proto::GitInit>,
1988 cx: AsyncApp,
1989 ) -> Result<proto::Ack> {
1990 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1991 let name = envelope.payload.fallback_branch_name;
1992 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1993 .await?;
1994
1995 Ok(proto::Ack {})
1996 }
1997
1998 async fn handle_git_clone(
1999 this: Entity<Self>,
2000 envelope: TypedEnvelope<proto::GitClone>,
2001 cx: AsyncApp,
2002 ) -> Result<proto::GitCloneResponse> {
2003 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2004 let repo_name = envelope.payload.remote_repo;
2005 let result = cx
2006 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2007 .await;
2008
2009 Ok(proto::GitCloneResponse {
2010 success: result.is_ok(),
2011 })
2012 }
2013
2014 async fn handle_fetch(
2015 this: Entity<Self>,
2016 envelope: TypedEnvelope<proto::Fetch>,
2017 mut cx: AsyncApp,
2018 ) -> Result<proto::RemoteMessageResponse> {
2019 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2020 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2021 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2022 let askpass_id = envelope.payload.askpass_id;
2023
2024 let askpass = make_remote_delegate(
2025 this,
2026 envelope.payload.project_id,
2027 repository_id,
2028 askpass_id,
2029 &mut cx,
2030 );
2031
2032 let remote_output = repository_handle
2033 .update(&mut cx, |repository_handle, cx| {
2034 repository_handle.fetch(fetch_options, askpass, cx)
2035 })
2036 .await??;
2037
2038 Ok(proto::RemoteMessageResponse {
2039 stdout: remote_output.stdout,
2040 stderr: remote_output.stderr,
2041 })
2042 }
2043
2044 async fn handle_push(
2045 this: Entity<Self>,
2046 envelope: TypedEnvelope<proto::Push>,
2047 mut cx: AsyncApp,
2048 ) -> Result<proto::RemoteMessageResponse> {
2049 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2050 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2051
2052 let askpass_id = envelope.payload.askpass_id;
2053 let askpass = make_remote_delegate(
2054 this,
2055 envelope.payload.project_id,
2056 repository_id,
2057 askpass_id,
2058 &mut cx,
2059 );
2060
2061 let options = envelope
2062 .payload
2063 .options
2064 .as_ref()
2065 .map(|_| match envelope.payload.options() {
2066 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2067 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2068 });
2069
2070 let branch_name = envelope.payload.branch_name.into();
2071 let remote_branch_name = envelope.payload.remote_branch_name.into();
2072 let remote_name = envelope.payload.remote_name.into();
2073
2074 let remote_output = repository_handle
2075 .update(&mut cx, |repository_handle, cx| {
2076 repository_handle.push(
2077 branch_name,
2078 remote_branch_name,
2079 remote_name,
2080 options,
2081 askpass,
2082 cx,
2083 )
2084 })
2085 .await??;
2086 Ok(proto::RemoteMessageResponse {
2087 stdout: remote_output.stdout,
2088 stderr: remote_output.stderr,
2089 })
2090 }
2091
2092 async fn handle_pull(
2093 this: Entity<Self>,
2094 envelope: TypedEnvelope<proto::Pull>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::RemoteMessageResponse> {
2097 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2098 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2099 let askpass_id = envelope.payload.askpass_id;
2100 let askpass = make_remote_delegate(
2101 this,
2102 envelope.payload.project_id,
2103 repository_id,
2104 askpass_id,
2105 &mut cx,
2106 );
2107
2108 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2109 let remote_name = envelope.payload.remote_name.into();
2110 let rebase = envelope.payload.rebase;
2111
2112 let remote_message = repository_handle
2113 .update(&mut cx, |repository_handle, cx| {
2114 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2115 })
2116 .await??;
2117
2118 Ok(proto::RemoteMessageResponse {
2119 stdout: remote_message.stdout,
2120 stderr: remote_message.stderr,
2121 })
2122 }
2123
2124 async fn handle_stage(
2125 this: Entity<Self>,
2126 envelope: TypedEnvelope<proto::Stage>,
2127 mut cx: AsyncApp,
2128 ) -> Result<proto::Ack> {
2129 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2130 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2131
2132 let entries = envelope
2133 .payload
2134 .paths
2135 .into_iter()
2136 .map(|path| RepoPath::new(&path))
2137 .collect::<Result<Vec<_>>>()?;
2138
2139 repository_handle
2140 .update(&mut cx, |repository_handle, cx| {
2141 repository_handle.stage_entries(entries, cx)
2142 })
2143 .await?;
2144 Ok(proto::Ack {})
2145 }
2146
2147 async fn handle_unstage(
2148 this: Entity<Self>,
2149 envelope: TypedEnvelope<proto::Unstage>,
2150 mut cx: AsyncApp,
2151 ) -> Result<proto::Ack> {
2152 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2153 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2154
2155 let entries = envelope
2156 .payload
2157 .paths
2158 .into_iter()
2159 .map(|path| RepoPath::new(&path))
2160 .collect::<Result<Vec<_>>>()?;
2161
2162 repository_handle
2163 .update(&mut cx, |repository_handle, cx| {
2164 repository_handle.unstage_entries(entries, cx)
2165 })
2166 .await?;
2167
2168 Ok(proto::Ack {})
2169 }
2170
2171 async fn handle_stash(
2172 this: Entity<Self>,
2173 envelope: TypedEnvelope<proto::Stash>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::Ack> {
2176 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2177 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2178
2179 let entries = envelope
2180 .payload
2181 .paths
2182 .into_iter()
2183 .map(|path| RepoPath::new(&path))
2184 .collect::<Result<Vec<_>>>()?;
2185
2186 repository_handle
2187 .update(&mut cx, |repository_handle, cx| {
2188 repository_handle.stash_entries(entries, cx)
2189 })
2190 .await?;
2191
2192 Ok(proto::Ack {})
2193 }
2194
2195 async fn handle_stash_pop(
2196 this: Entity<Self>,
2197 envelope: TypedEnvelope<proto::StashPop>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::Ack> {
2200 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2201 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2202 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2203
2204 repository_handle
2205 .update(&mut cx, |repository_handle, cx| {
2206 repository_handle.stash_pop(stash_index, cx)
2207 })
2208 .await?;
2209
2210 Ok(proto::Ack {})
2211 }
2212
2213 async fn handle_stash_apply(
2214 this: Entity<Self>,
2215 envelope: TypedEnvelope<proto::StashApply>,
2216 mut cx: AsyncApp,
2217 ) -> Result<proto::Ack> {
2218 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2219 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2220 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2221
2222 repository_handle
2223 .update(&mut cx, |repository_handle, cx| {
2224 repository_handle.stash_apply(stash_index, cx)
2225 })
2226 .await?;
2227
2228 Ok(proto::Ack {})
2229 }
2230
2231 async fn handle_stash_drop(
2232 this: Entity<Self>,
2233 envelope: TypedEnvelope<proto::StashDrop>,
2234 mut cx: AsyncApp,
2235 ) -> Result<proto::Ack> {
2236 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2237 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2238 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2239
2240 repository_handle
2241 .update(&mut cx, |repository_handle, cx| {
2242 repository_handle.stash_drop(stash_index, cx)
2243 })
2244 .await??;
2245
2246 Ok(proto::Ack {})
2247 }
2248
2249 async fn handle_set_index_text(
2250 this: Entity<Self>,
2251 envelope: TypedEnvelope<proto::SetIndexText>,
2252 mut cx: AsyncApp,
2253 ) -> Result<proto::Ack> {
2254 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2255 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2256 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2257
2258 repository_handle
2259 .update(&mut cx, |repository_handle, cx| {
2260 repository_handle.spawn_set_index_text_job(
2261 repo_path,
2262 envelope.payload.text,
2263 None,
2264 cx,
2265 )
2266 })
2267 .await??;
2268 Ok(proto::Ack {})
2269 }
2270
2271 async fn handle_run_hook(
2272 this: Entity<Self>,
2273 envelope: TypedEnvelope<proto::RunGitHook>,
2274 mut cx: AsyncApp,
2275 ) -> Result<proto::Ack> {
2276 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2277 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2278 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2279 repository_handle
2280 .update(&mut cx, |repository_handle, cx| {
2281 repository_handle.run_hook(hook, cx)
2282 })
2283 .await??;
2284 Ok(proto::Ack {})
2285 }
2286
2287 async fn handle_commit(
2288 this: Entity<Self>,
2289 envelope: TypedEnvelope<proto::Commit>,
2290 mut cx: AsyncApp,
2291 ) -> Result<proto::Ack> {
2292 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2293 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2294 let askpass_id = envelope.payload.askpass_id;
2295
2296 let askpass = make_remote_delegate(
2297 this,
2298 envelope.payload.project_id,
2299 repository_id,
2300 askpass_id,
2301 &mut cx,
2302 );
2303
2304 let message = SharedString::from(envelope.payload.message);
2305 let name = envelope.payload.name.map(SharedString::from);
2306 let email = envelope.payload.email.map(SharedString::from);
2307 let options = envelope.payload.options.unwrap_or_default();
2308
2309 repository_handle
2310 .update(&mut cx, |repository_handle, cx| {
2311 repository_handle.commit(
2312 message,
2313 name.zip(email),
2314 CommitOptions {
2315 amend: options.amend,
2316 signoff: options.signoff,
2317 },
2318 askpass,
2319 cx,
2320 )
2321 })
2322 .await??;
2323 Ok(proto::Ack {})
2324 }
2325
2326 async fn handle_get_remotes(
2327 this: Entity<Self>,
2328 envelope: TypedEnvelope<proto::GetRemotes>,
2329 mut cx: AsyncApp,
2330 ) -> Result<proto::GetRemotesResponse> {
2331 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2332 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2333
2334 let branch_name = envelope.payload.branch_name;
2335 let is_push = envelope.payload.is_push;
2336
2337 let remotes = repository_handle
2338 .update(&mut cx, |repository_handle, _| {
2339 repository_handle.get_remotes(branch_name, is_push)
2340 })
2341 .await??;
2342
2343 Ok(proto::GetRemotesResponse {
2344 remotes: remotes
2345 .into_iter()
2346 .map(|remotes| proto::get_remotes_response::Remote {
2347 name: remotes.name.to_string(),
2348 })
2349 .collect::<Vec<_>>(),
2350 })
2351 }
2352
2353 async fn handle_get_worktrees(
2354 this: Entity<Self>,
2355 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2356 mut cx: AsyncApp,
2357 ) -> Result<proto::GitWorktreesResponse> {
2358 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2359 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2360
2361 let worktrees = repository_handle
2362 .update(&mut cx, |repository_handle, _| {
2363 repository_handle.worktrees()
2364 })
2365 .await??;
2366
2367 Ok(proto::GitWorktreesResponse {
2368 worktrees: worktrees
2369 .into_iter()
2370 .map(|worktree| worktree_to_proto(&worktree))
2371 .collect::<Vec<_>>(),
2372 })
2373 }
2374
2375 async fn handle_create_worktree(
2376 this: Entity<Self>,
2377 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2378 mut cx: AsyncApp,
2379 ) -> Result<proto::Ack> {
2380 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2381 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2382 let directory = PathBuf::from(envelope.payload.directory);
2383 let name = envelope.payload.name;
2384 let commit = envelope.payload.commit;
2385
2386 repository_handle
2387 .update(&mut cx, |repository_handle, _| {
2388 repository_handle.create_worktree(name, directory, commit)
2389 })
2390 .await??;
2391
2392 Ok(proto::Ack {})
2393 }
2394
2395 async fn handle_remove_worktree(
2396 this: Entity<Self>,
2397 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2398 mut cx: AsyncApp,
2399 ) -> Result<proto::Ack> {
2400 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2401 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2402 let path = PathBuf::from(envelope.payload.path);
2403 let force = envelope.payload.force;
2404
2405 repository_handle
2406 .update(&mut cx, |repository_handle, _| {
2407 repository_handle.remove_worktree(path, force)
2408 })
2409 .await??;
2410
2411 Ok(proto::Ack {})
2412 }
2413
2414 async fn handle_rename_worktree(
2415 this: Entity<Self>,
2416 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2417 mut cx: AsyncApp,
2418 ) -> Result<proto::Ack> {
2419 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2420 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2421 let old_path = PathBuf::from(envelope.payload.old_path);
2422 let new_path = PathBuf::from(envelope.payload.new_path);
2423
2424 repository_handle
2425 .update(&mut cx, |repository_handle, _| {
2426 repository_handle.rename_worktree(old_path, new_path)
2427 })
2428 .await??;
2429
2430 Ok(proto::Ack {})
2431 }
2432
2433 async fn handle_get_branches(
2434 this: Entity<Self>,
2435 envelope: TypedEnvelope<proto::GitGetBranches>,
2436 mut cx: AsyncApp,
2437 ) -> Result<proto::GitBranchesResponse> {
2438 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2439 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2440
2441 let branches = repository_handle
2442 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2443 .await??;
2444
2445 Ok(proto::GitBranchesResponse {
2446 branches: branches
2447 .into_iter()
2448 .map(|branch| branch_to_proto(&branch))
2449 .collect::<Vec<_>>(),
2450 })
2451 }
2452 async fn handle_get_default_branch(
2453 this: Entity<Self>,
2454 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2455 mut cx: AsyncApp,
2456 ) -> Result<proto::GetDefaultBranchResponse> {
2457 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2458 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2459
2460 let branch = repository_handle
2461 .update(&mut cx, |repository_handle, _| {
2462 repository_handle.default_branch(false)
2463 })
2464 .await??
2465 .map(Into::into);
2466
2467 Ok(proto::GetDefaultBranchResponse { branch })
2468 }
2469 async fn handle_create_branch(
2470 this: Entity<Self>,
2471 envelope: TypedEnvelope<proto::GitCreateBranch>,
2472 mut cx: AsyncApp,
2473 ) -> Result<proto::Ack> {
2474 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2475 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2476 let branch_name = envelope.payload.branch_name;
2477
2478 repository_handle
2479 .update(&mut cx, |repository_handle, _| {
2480 repository_handle.create_branch(branch_name, None)
2481 })
2482 .await??;
2483
2484 Ok(proto::Ack {})
2485 }
2486
2487 async fn handle_change_branch(
2488 this: Entity<Self>,
2489 envelope: TypedEnvelope<proto::GitChangeBranch>,
2490 mut cx: AsyncApp,
2491 ) -> Result<proto::Ack> {
2492 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2493 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2494 let branch_name = envelope.payload.branch_name;
2495
2496 repository_handle
2497 .update(&mut cx, |repository_handle, _| {
2498 repository_handle.change_branch(branch_name)
2499 })
2500 .await??;
2501
2502 Ok(proto::Ack {})
2503 }
2504
2505 async fn handle_rename_branch(
2506 this: Entity<Self>,
2507 envelope: TypedEnvelope<proto::GitRenameBranch>,
2508 mut cx: AsyncApp,
2509 ) -> Result<proto::Ack> {
2510 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2511 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2512 let branch = envelope.payload.branch;
2513 let new_name = envelope.payload.new_name;
2514
2515 repository_handle
2516 .update(&mut cx, |repository_handle, _| {
2517 repository_handle.rename_branch(branch, new_name)
2518 })
2519 .await??;
2520
2521 Ok(proto::Ack {})
2522 }
2523
2524 async fn handle_create_remote(
2525 this: Entity<Self>,
2526 envelope: TypedEnvelope<proto::GitCreateRemote>,
2527 mut cx: AsyncApp,
2528 ) -> Result<proto::Ack> {
2529 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2530 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2531 let remote_name = envelope.payload.remote_name;
2532 let remote_url = envelope.payload.remote_url;
2533
2534 repository_handle
2535 .update(&mut cx, |repository_handle, _| {
2536 repository_handle.create_remote(remote_name, remote_url)
2537 })
2538 .await??;
2539
2540 Ok(proto::Ack {})
2541 }
2542
2543 async fn handle_delete_branch(
2544 this: Entity<Self>,
2545 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2546 mut cx: AsyncApp,
2547 ) -> Result<proto::Ack> {
2548 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2549 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2550 let is_remote = envelope.payload.is_remote;
2551 let branch_name = envelope.payload.branch_name;
2552
2553 repository_handle
2554 .update(&mut cx, |repository_handle, _| {
2555 repository_handle.delete_branch(is_remote, branch_name)
2556 })
2557 .await??;
2558
2559 Ok(proto::Ack {})
2560 }
2561
2562 async fn handle_remove_remote(
2563 this: Entity<Self>,
2564 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2565 mut cx: AsyncApp,
2566 ) -> Result<proto::Ack> {
2567 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2568 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2569 let remote_name = envelope.payload.remote_name;
2570
2571 repository_handle
2572 .update(&mut cx, |repository_handle, _| {
2573 repository_handle.remove_remote(remote_name)
2574 })
2575 .await??;
2576
2577 Ok(proto::Ack {})
2578 }
2579
2580 async fn handle_show(
2581 this: Entity<Self>,
2582 envelope: TypedEnvelope<proto::GitShow>,
2583 mut cx: AsyncApp,
2584 ) -> Result<proto::GitCommitDetails> {
2585 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2586 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2587
2588 let commit = repository_handle
2589 .update(&mut cx, |repository_handle, _| {
2590 repository_handle.show(envelope.payload.commit)
2591 })
2592 .await??;
2593 Ok(proto::GitCommitDetails {
2594 sha: commit.sha.into(),
2595 message: commit.message.into(),
2596 commit_timestamp: commit.commit_timestamp,
2597 author_email: commit.author_email.into(),
2598 author_name: commit.author_name.into(),
2599 })
2600 }
2601
2602 async fn handle_load_commit_diff(
2603 this: Entity<Self>,
2604 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2605 mut cx: AsyncApp,
2606 ) -> Result<proto::LoadCommitDiffResponse> {
2607 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2608 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2609
2610 let commit_diff = repository_handle
2611 .update(&mut cx, |repository_handle, _| {
2612 repository_handle.load_commit_diff(envelope.payload.commit)
2613 })
2614 .await??;
2615 Ok(proto::LoadCommitDiffResponse {
2616 files: commit_diff
2617 .files
2618 .into_iter()
2619 .map(|file| proto::CommitFile {
2620 path: file.path.to_proto(),
2621 old_text: file.old_text,
2622 new_text: file.new_text,
2623 is_binary: file.is_binary,
2624 })
2625 .collect(),
2626 })
2627 }
2628
2629 async fn handle_file_history(
2630 this: Entity<Self>,
2631 envelope: TypedEnvelope<proto::GitFileHistory>,
2632 mut cx: AsyncApp,
2633 ) -> Result<proto::GitFileHistoryResponse> {
2634 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2635 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2636 let path = RepoPath::from_proto(&envelope.payload.path)?;
2637 let skip = envelope.payload.skip as usize;
2638 let limit = envelope.payload.limit.map(|l| l as usize);
2639
2640 let file_history = repository_handle
2641 .update(&mut cx, |repository_handle, _| {
2642 repository_handle.file_history_paginated(path, skip, limit)
2643 })
2644 .await??;
2645
2646 Ok(proto::GitFileHistoryResponse {
2647 entries: file_history
2648 .entries
2649 .into_iter()
2650 .map(|entry| proto::FileHistoryEntry {
2651 sha: entry.sha.to_string(),
2652 subject: entry.subject.to_string(),
2653 message: entry.message.to_string(),
2654 commit_timestamp: entry.commit_timestamp,
2655 author_name: entry.author_name.to_string(),
2656 author_email: entry.author_email.to_string(),
2657 })
2658 .collect(),
2659 path: file_history.path.to_proto(),
2660 })
2661 }
2662
2663 async fn handle_reset(
2664 this: Entity<Self>,
2665 envelope: TypedEnvelope<proto::GitReset>,
2666 mut cx: AsyncApp,
2667 ) -> Result<proto::Ack> {
2668 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2669 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2670
2671 let mode = match envelope.payload.mode() {
2672 git_reset::ResetMode::Soft => ResetMode::Soft,
2673 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2674 };
2675
2676 repository_handle
2677 .update(&mut cx, |repository_handle, cx| {
2678 repository_handle.reset(envelope.payload.commit, mode, cx)
2679 })
2680 .await??;
2681 Ok(proto::Ack {})
2682 }
2683
2684 async fn handle_checkout_files(
2685 this: Entity<Self>,
2686 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2687 mut cx: AsyncApp,
2688 ) -> Result<proto::Ack> {
2689 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2690 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2691 let paths = envelope
2692 .payload
2693 .paths
2694 .iter()
2695 .map(|s| RepoPath::from_proto(s))
2696 .collect::<Result<Vec<_>>>()?;
2697
2698 repository_handle
2699 .update(&mut cx, |repository_handle, cx| {
2700 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2701 })
2702 .await?;
2703 Ok(proto::Ack {})
2704 }
2705
2706 async fn handle_open_commit_message_buffer(
2707 this: Entity<Self>,
2708 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2709 mut cx: AsyncApp,
2710 ) -> Result<proto::OpenBufferResponse> {
2711 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2712 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2713 let buffer = repository
2714 .update(&mut cx, |repository, cx| {
2715 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2716 })
2717 .await?;
2718
2719 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2720 this.update(&mut cx, |this, cx| {
2721 this.buffer_store.update(cx, |buffer_store, cx| {
2722 buffer_store
2723 .create_buffer_for_peer(
2724 &buffer,
2725 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2726 cx,
2727 )
2728 .detach_and_log_err(cx);
2729 })
2730 });
2731
2732 Ok(proto::OpenBufferResponse {
2733 buffer_id: buffer_id.to_proto(),
2734 })
2735 }
2736
2737 async fn handle_askpass(
2738 this: Entity<Self>,
2739 envelope: TypedEnvelope<proto::AskPassRequest>,
2740 mut cx: AsyncApp,
2741 ) -> Result<proto::AskPassResponse> {
2742 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2743 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2744
2745 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2746 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2747 debug_panic!("no askpass found");
2748 anyhow::bail!("no askpass found");
2749 };
2750
2751 let response = askpass
2752 .ask_password(envelope.payload.prompt)
2753 .await
2754 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2755
2756 delegates
2757 .lock()
2758 .insert(envelope.payload.askpass_id, askpass);
2759
2760 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2761 Ok(proto::AskPassResponse {
2762 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2763 })
2764 }
2765
2766 async fn handle_check_for_pushed_commits(
2767 this: Entity<Self>,
2768 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2769 mut cx: AsyncApp,
2770 ) -> Result<proto::CheckForPushedCommitsResponse> {
2771 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2772 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2773
2774 let branches = repository_handle
2775 .update(&mut cx, |repository_handle, _| {
2776 repository_handle.check_for_pushed_commits()
2777 })
2778 .await??;
2779 Ok(proto::CheckForPushedCommitsResponse {
2780 pushed_to: branches
2781 .into_iter()
2782 .map(|commit| commit.to_string())
2783 .collect(),
2784 })
2785 }
2786
2787 async fn handle_git_diff(
2788 this: Entity<Self>,
2789 envelope: TypedEnvelope<proto::GitDiff>,
2790 mut cx: AsyncApp,
2791 ) -> Result<proto::GitDiffResponse> {
2792 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2793 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2794 let diff_type = match envelope.payload.diff_type() {
2795 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2796 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2797 proto::git_diff::DiffType::MergeBase => {
2798 let base_ref = envelope
2799 .payload
2800 .merge_base_ref
2801 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2802 DiffType::MergeBase {
2803 base_ref: base_ref.into(),
2804 }
2805 }
2806 };
2807
2808 let mut diff = repository_handle
2809 .update(&mut cx, |repository_handle, cx| {
2810 repository_handle.diff(diff_type, cx)
2811 })
2812 .await??;
2813 const ONE_MB: usize = 1_000_000;
2814 if diff.len() > ONE_MB {
2815 diff = diff.chars().take(ONE_MB).collect()
2816 }
2817
2818 Ok(proto::GitDiffResponse { diff })
2819 }
2820
2821 async fn handle_tree_diff(
2822 this: Entity<Self>,
2823 request: TypedEnvelope<proto::GetTreeDiff>,
2824 mut cx: AsyncApp,
2825 ) -> Result<proto::GetTreeDiffResponse> {
2826 let repository_id = RepositoryId(request.payload.repository_id);
2827 let diff_type = if request.payload.is_merge {
2828 DiffTreeType::MergeBase {
2829 base: request.payload.base.into(),
2830 head: request.payload.head.into(),
2831 }
2832 } else {
2833 DiffTreeType::Since {
2834 base: request.payload.base.into(),
2835 head: request.payload.head.into(),
2836 }
2837 };
2838
2839 let diff = this
2840 .update(&mut cx, |this, cx| {
2841 let repository = this.repositories().get(&repository_id)?;
2842 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2843 })
2844 .context("missing repository")?
2845 .await??;
2846
2847 Ok(proto::GetTreeDiffResponse {
2848 entries: diff
2849 .entries
2850 .into_iter()
2851 .map(|(path, status)| proto::TreeDiffStatus {
2852 path: path.as_ref().to_proto(),
2853 status: match status {
2854 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2855 TreeDiffStatus::Modified { .. } => {
2856 proto::tree_diff_status::Status::Modified.into()
2857 }
2858 TreeDiffStatus::Deleted { .. } => {
2859 proto::tree_diff_status::Status::Deleted.into()
2860 }
2861 },
2862 oid: match status {
2863 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2864 Some(old.to_string())
2865 }
2866 TreeDiffStatus::Added => None,
2867 },
2868 })
2869 .collect(),
2870 })
2871 }
2872
2873 async fn handle_get_blob_content(
2874 this: Entity<Self>,
2875 request: TypedEnvelope<proto::GetBlobContent>,
2876 mut cx: AsyncApp,
2877 ) -> Result<proto::GetBlobContentResponse> {
2878 let oid = git::Oid::from_str(&request.payload.oid)?;
2879 let repository_id = RepositoryId(request.payload.repository_id);
2880 let content = this
2881 .update(&mut cx, |this, cx| {
2882 let repository = this.repositories().get(&repository_id)?;
2883 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2884 })
2885 .context("missing repository")?
2886 .await?;
2887 Ok(proto::GetBlobContentResponse { content })
2888 }
2889
2890 async fn handle_open_unstaged_diff(
2891 this: Entity<Self>,
2892 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2893 mut cx: AsyncApp,
2894 ) -> Result<proto::OpenUnstagedDiffResponse> {
2895 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2896 let diff = this
2897 .update(&mut cx, |this, cx| {
2898 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2899 Some(this.open_unstaged_diff(buffer, cx))
2900 })
2901 .context("missing buffer")?
2902 .await?;
2903 this.update(&mut cx, |this, _| {
2904 let shared_diffs = this
2905 .shared_diffs
2906 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2907 .or_default();
2908 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2909 });
2910 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2911 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2912 }
2913
2914 async fn handle_open_uncommitted_diff(
2915 this: Entity<Self>,
2916 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2917 mut cx: AsyncApp,
2918 ) -> Result<proto::OpenUncommittedDiffResponse> {
2919 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2920 let diff = this
2921 .update(&mut cx, |this, cx| {
2922 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2923 Some(this.open_uncommitted_diff(buffer, cx))
2924 })
2925 .context("missing buffer")?
2926 .await?;
2927 this.update(&mut cx, |this, _| {
2928 let shared_diffs = this
2929 .shared_diffs
2930 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2931 .or_default();
2932 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2933 });
2934 Ok(diff.read_with(&cx, |diff, cx| {
2935 use proto::open_uncommitted_diff_response::Mode;
2936
2937 let unstaged_diff = diff.secondary_diff();
2938 let index_snapshot = unstaged_diff.and_then(|diff| {
2939 let diff = diff.read(cx);
2940 diff.base_text_exists().then(|| diff.base_text(cx))
2941 });
2942
2943 let mode;
2944 let staged_text;
2945 let committed_text;
2946 if diff.base_text_exists() {
2947 let committed_snapshot = diff.base_text(cx);
2948 committed_text = Some(committed_snapshot.text());
2949 if let Some(index_text) = index_snapshot {
2950 if index_text.remote_id() == committed_snapshot.remote_id() {
2951 mode = Mode::IndexMatchesHead;
2952 staged_text = None;
2953 } else {
2954 mode = Mode::IndexAndHead;
2955 staged_text = Some(index_text.text());
2956 }
2957 } else {
2958 mode = Mode::IndexAndHead;
2959 staged_text = None;
2960 }
2961 } else {
2962 mode = Mode::IndexAndHead;
2963 committed_text = None;
2964 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2965 }
2966
2967 proto::OpenUncommittedDiffResponse {
2968 committed_text,
2969 staged_text,
2970 mode: mode.into(),
2971 }
2972 }))
2973 }
2974
2975 async fn handle_update_diff_bases(
2976 this: Entity<Self>,
2977 request: TypedEnvelope<proto::UpdateDiffBases>,
2978 mut cx: AsyncApp,
2979 ) -> Result<()> {
2980 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2981 this.update(&mut cx, |this, cx| {
2982 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2983 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2984 {
2985 let buffer = buffer.read(cx).text_snapshot();
2986 diff_state.update(cx, |diff_state, cx| {
2987 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2988 })
2989 }
2990 });
2991 Ok(())
2992 }
2993
2994 async fn handle_blame_buffer(
2995 this: Entity<Self>,
2996 envelope: TypedEnvelope<proto::BlameBuffer>,
2997 mut cx: AsyncApp,
2998 ) -> Result<proto::BlameBufferResponse> {
2999 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3000 let version = deserialize_version(&envelope.payload.version);
3001 let buffer = this.read_with(&cx, |this, cx| {
3002 this.buffer_store.read(cx).get_existing(buffer_id)
3003 })?;
3004 buffer
3005 .update(&mut cx, |buffer, _| {
3006 buffer.wait_for_version(version.clone())
3007 })
3008 .await?;
3009 let blame = this
3010 .update(&mut cx, |this, cx| {
3011 this.blame_buffer(&buffer, Some(version), cx)
3012 })
3013 .await?;
3014 Ok(serialize_blame_buffer_response(blame))
3015 }
3016
3017 async fn handle_get_permalink_to_line(
3018 this: Entity<Self>,
3019 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3020 mut cx: AsyncApp,
3021 ) -> Result<proto::GetPermalinkToLineResponse> {
3022 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3023 // let version = deserialize_version(&envelope.payload.version);
3024 let selection = {
3025 let proto_selection = envelope
3026 .payload
3027 .selection
3028 .context("no selection to get permalink for defined")?;
3029 proto_selection.start as u32..proto_selection.end as u32
3030 };
3031 let buffer = this.read_with(&cx, |this, cx| {
3032 this.buffer_store.read(cx).get_existing(buffer_id)
3033 })?;
3034 let permalink = this
3035 .update(&mut cx, |this, cx| {
3036 this.get_permalink_to_line(&buffer, selection, cx)
3037 })
3038 .await?;
3039 Ok(proto::GetPermalinkToLineResponse {
3040 permalink: permalink.to_string(),
3041 })
3042 }
3043
3044 fn repository_for_request(
3045 this: &Entity<Self>,
3046 id: RepositoryId,
3047 cx: &mut AsyncApp,
3048 ) -> Result<Entity<Repository>> {
3049 this.read_with(cx, |this, _| {
3050 this.repositories
3051 .get(&id)
3052 .context("missing repository handle")
3053 .cloned()
3054 })
3055 }
3056
3057 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3058 self.repositories
3059 .iter()
3060 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3061 .collect()
3062 }
3063
3064 fn process_updated_entries(
3065 &self,
3066 worktree: &Entity<Worktree>,
3067 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3068 cx: &mut App,
3069 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3070 let path_style = worktree.read(cx).path_style();
3071 let mut repo_paths = self
3072 .repositories
3073 .values()
3074 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3075 .collect::<Vec<_>>();
3076 let mut entries: Vec<_> = updated_entries
3077 .iter()
3078 .map(|(path, _, _)| path.clone())
3079 .collect();
3080 entries.sort();
3081 let worktree = worktree.read(cx);
3082
3083 let entries = entries
3084 .into_iter()
3085 .map(|path| worktree.absolutize(&path))
3086 .collect::<Arc<[_]>>();
3087
3088 let executor = cx.background_executor().clone();
3089 cx.background_executor().spawn(async move {
3090 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3091 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3092 let mut tasks = FuturesOrdered::new();
3093 for (repo_path, repo) in repo_paths.into_iter().rev() {
3094 let entries = entries.clone();
3095 let task = executor.spawn(async move {
3096 // Find all repository paths that belong to this repo
3097 let mut ix = entries.partition_point(|path| path < &*repo_path);
3098 if ix == entries.len() {
3099 return None;
3100 };
3101
3102 let mut paths = Vec::new();
3103 // All paths prefixed by a given repo will constitute a continuous range.
3104 while let Some(path) = entries.get(ix)
3105 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3106 &repo_path, path, path_style,
3107 )
3108 {
3109 paths.push((repo_path, ix));
3110 ix += 1;
3111 }
3112 if paths.is_empty() {
3113 None
3114 } else {
3115 Some((repo, paths))
3116 }
3117 });
3118 tasks.push_back(task);
3119 }
3120
3121 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3122 let mut path_was_used = vec![false; entries.len()];
3123 let tasks = tasks.collect::<Vec<_>>().await;
3124 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3125 // We always want to assign a path to it's innermost repository.
3126 for t in tasks {
3127 let Some((repo, paths)) = t else {
3128 continue;
3129 };
3130 let entry = paths_by_git_repo.entry(repo).or_default();
3131 for (repo_path, ix) in paths {
3132 if path_was_used[ix] {
3133 continue;
3134 }
3135 path_was_used[ix] = true;
3136 entry.push(repo_path);
3137 }
3138 }
3139
3140 paths_by_git_repo
3141 })
3142 }
3143}
3144
3145impl BufferGitState {
3146 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3147 Self {
3148 unstaged_diff: Default::default(),
3149 uncommitted_diff: Default::default(),
3150 oid_diffs: Default::default(),
3151 recalculate_diff_task: Default::default(),
3152 language: Default::default(),
3153 language_registry: Default::default(),
3154 recalculating_tx: postage::watch::channel_with(false).0,
3155 hunk_staging_operation_count: 0,
3156 hunk_staging_operation_count_as_of_write: 0,
3157 head_text: Default::default(),
3158 index_text: Default::default(),
3159 oid_texts: Default::default(),
3160 head_changed: Default::default(),
3161 index_changed: Default::default(),
3162 language_changed: Default::default(),
3163 conflict_updated_futures: Default::default(),
3164 conflict_set: Default::default(),
3165 reparse_conflict_markers_task: Default::default(),
3166 }
3167 }
3168
3169 #[ztracing::instrument(skip_all)]
3170 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3171 self.language = buffer.read(cx).language().cloned();
3172 self.language_changed = true;
3173 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3174 }
3175
3176 fn reparse_conflict_markers(
3177 &mut self,
3178 buffer: text::BufferSnapshot,
3179 cx: &mut Context<Self>,
3180 ) -> oneshot::Receiver<()> {
3181 let (tx, rx) = oneshot::channel();
3182
3183 let Some(conflict_set) = self
3184 .conflict_set
3185 .as_ref()
3186 .and_then(|conflict_set| conflict_set.upgrade())
3187 else {
3188 return rx;
3189 };
3190
3191 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3192 if conflict_set.has_conflict {
3193 Some(conflict_set.snapshot())
3194 } else {
3195 None
3196 }
3197 });
3198
3199 if let Some(old_snapshot) = old_snapshot {
3200 self.conflict_updated_futures.push(tx);
3201 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3202 let (snapshot, changed_range) = cx
3203 .background_spawn(async move {
3204 let new_snapshot = ConflictSet::parse(&buffer);
3205 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3206 (new_snapshot, changed_range)
3207 })
3208 .await;
3209 this.update(cx, |this, cx| {
3210 if let Some(conflict_set) = &this.conflict_set {
3211 conflict_set
3212 .update(cx, |conflict_set, cx| {
3213 conflict_set.set_snapshot(snapshot, changed_range, cx);
3214 })
3215 .ok();
3216 }
3217 let futures = std::mem::take(&mut this.conflict_updated_futures);
3218 for tx in futures {
3219 tx.send(()).ok();
3220 }
3221 })
3222 }))
3223 }
3224
3225 rx
3226 }
3227
3228 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3229 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3230 }
3231
3232 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3233 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3234 }
3235
3236 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3237 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3238 }
3239
3240 fn handle_base_texts_updated(
3241 &mut self,
3242 buffer: text::BufferSnapshot,
3243 message: proto::UpdateDiffBases,
3244 cx: &mut Context<Self>,
3245 ) {
3246 use proto::update_diff_bases::Mode;
3247
3248 let Some(mode) = Mode::from_i32(message.mode) else {
3249 return;
3250 };
3251
3252 let diff_bases_change = match mode {
3253 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3254 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3255 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3256 Mode::IndexAndHead => DiffBasesChange::SetEach {
3257 index: message.staged_text,
3258 head: message.committed_text,
3259 },
3260 };
3261
3262 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3263 }
3264
3265 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3266 if *self.recalculating_tx.borrow() {
3267 let mut rx = self.recalculating_tx.subscribe();
3268 Some(async move {
3269 loop {
3270 let is_recalculating = rx.recv().await;
3271 if is_recalculating != Some(true) {
3272 break;
3273 }
3274 }
3275 })
3276 } else {
3277 None
3278 }
3279 }
3280
3281 fn diff_bases_changed(
3282 &mut self,
3283 buffer: text::BufferSnapshot,
3284 diff_bases_change: Option<DiffBasesChange>,
3285 cx: &mut Context<Self>,
3286 ) {
3287 match diff_bases_change {
3288 Some(DiffBasesChange::SetIndex(index)) => {
3289 self.index_text = index.map(|mut index| {
3290 text::LineEnding::normalize(&mut index);
3291 Arc::from(index.as_str())
3292 });
3293 self.index_changed = true;
3294 }
3295 Some(DiffBasesChange::SetHead(head)) => {
3296 self.head_text = head.map(|mut head| {
3297 text::LineEnding::normalize(&mut head);
3298 Arc::from(head.as_str())
3299 });
3300 self.head_changed = true;
3301 }
3302 Some(DiffBasesChange::SetBoth(text)) => {
3303 let text = text.map(|mut text| {
3304 text::LineEnding::normalize(&mut text);
3305 Arc::from(text.as_str())
3306 });
3307 self.head_text = text.clone();
3308 self.index_text = text;
3309 self.head_changed = true;
3310 self.index_changed = true;
3311 }
3312 Some(DiffBasesChange::SetEach { index, head }) => {
3313 self.index_text = index.map(|mut index| {
3314 text::LineEnding::normalize(&mut index);
3315 Arc::from(index.as_str())
3316 });
3317 self.index_changed = true;
3318 self.head_text = head.map(|mut head| {
3319 text::LineEnding::normalize(&mut head);
3320 Arc::from(head.as_str())
3321 });
3322 self.head_changed = true;
3323 }
3324 None => {}
3325 }
3326
3327 self.recalculate_diffs(buffer, cx)
3328 }
3329
3330 #[ztracing::instrument(skip_all)]
3331 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3332 *self.recalculating_tx.borrow_mut() = true;
3333
3334 let language = self.language.clone();
3335 let language_registry = self.language_registry.clone();
3336 let unstaged_diff = self.unstaged_diff();
3337 let uncommitted_diff = self.uncommitted_diff();
3338 let head = self.head_text.clone();
3339 let index = self.index_text.clone();
3340 let index_changed = self.index_changed;
3341 let head_changed = self.head_changed;
3342 let language_changed = self.language_changed;
3343 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3344 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3345 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3346 (None, None) => true,
3347 _ => false,
3348 };
3349
3350 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3351 .oid_diffs
3352 .iter()
3353 .filter_map(|(oid, weak)| {
3354 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3355 weak.upgrade().map(|diff| (*oid, diff, base_text))
3356 })
3357 .collect();
3358
3359 self.oid_diffs.retain(|oid, weak| {
3360 let alive = weak.upgrade().is_some();
3361 if !alive {
3362 if let Some(oid) = oid {
3363 self.oid_texts.remove(oid);
3364 }
3365 }
3366 alive
3367 });
3368 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3369 log::debug!(
3370 "start recalculating diffs for buffer {}",
3371 buffer.remote_id()
3372 );
3373
3374 let mut new_unstaged_diff = None;
3375 if let Some(unstaged_diff) = &unstaged_diff {
3376 new_unstaged_diff = Some(
3377 cx.update(|cx| {
3378 unstaged_diff.read(cx).update_diff(
3379 buffer.clone(),
3380 index,
3381 index_changed.then_some(false),
3382 language.clone(),
3383 cx,
3384 )
3385 })
3386 .await,
3387 );
3388 }
3389
3390 // Dropping BufferDiff can be expensive, so yield back to the event loop
3391 // for a bit
3392 yield_now().await;
3393
3394 let mut new_uncommitted_diff = None;
3395 if let Some(uncommitted_diff) = &uncommitted_diff {
3396 new_uncommitted_diff = if index_matches_head {
3397 new_unstaged_diff.clone()
3398 } else {
3399 Some(
3400 cx.update(|cx| {
3401 uncommitted_diff.read(cx).update_diff(
3402 buffer.clone(),
3403 head,
3404 head_changed.then_some(true),
3405 language.clone(),
3406 cx,
3407 )
3408 })
3409 .await,
3410 )
3411 }
3412 }
3413
3414 // Dropping BufferDiff can be expensive, so yield back to the event loop
3415 // for a bit
3416 yield_now().await;
3417
3418 let cancel = this.update(cx, |this, _| {
3419 // This checks whether all pending stage/unstage operations
3420 // have quiesced (i.e. both the corresponding write and the
3421 // read of that write have completed). If not, then we cancel
3422 // this recalculation attempt to avoid invalidating pending
3423 // state too quickly; another recalculation will come along
3424 // later and clear the pending state once the state of the index has settled.
3425 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3426 *this.recalculating_tx.borrow_mut() = false;
3427 true
3428 } else {
3429 false
3430 }
3431 })?;
3432 if cancel {
3433 log::debug!(
3434 concat!(
3435 "aborting recalculating diffs for buffer {}",
3436 "due to subsequent hunk operations",
3437 ),
3438 buffer.remote_id()
3439 );
3440 return Ok(());
3441 }
3442
3443 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3444 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3445 {
3446 let task = unstaged_diff.update(cx, |diff, cx| {
3447 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3448 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3449 // view multibuffers.
3450 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3451 });
3452 Some(task.await)
3453 } else {
3454 None
3455 };
3456
3457 yield_now().await;
3458
3459 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3460 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3461 {
3462 uncommitted_diff
3463 .update(cx, |diff, cx| {
3464 if language_changed {
3465 diff.language_changed(language.clone(), language_registry, cx);
3466 }
3467 diff.set_snapshot_with_secondary(
3468 new_uncommitted_diff,
3469 &buffer,
3470 unstaged_changed_range.flatten(),
3471 true,
3472 cx,
3473 )
3474 })
3475 .await;
3476 }
3477
3478 yield_now().await;
3479
3480 for (oid, oid_diff, base_text) in oid_diffs {
3481 let new_oid_diff = cx
3482 .update(|cx| {
3483 oid_diff.read(cx).update_diff(
3484 buffer.clone(),
3485 base_text,
3486 None,
3487 language.clone(),
3488 cx,
3489 )
3490 })
3491 .await;
3492
3493 oid_diff
3494 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3495 .await;
3496
3497 log::debug!(
3498 "finished recalculating oid diff for buffer {} oid {:?}",
3499 buffer.remote_id(),
3500 oid
3501 );
3502
3503 yield_now().await;
3504 }
3505
3506 log::debug!(
3507 "finished recalculating diffs for buffer {}",
3508 buffer.remote_id()
3509 );
3510
3511 if let Some(this) = this.upgrade() {
3512 this.update(cx, |this, _| {
3513 this.index_changed = false;
3514 this.head_changed = false;
3515 this.language_changed = false;
3516 *this.recalculating_tx.borrow_mut() = false;
3517 });
3518 }
3519
3520 Ok(())
3521 }));
3522 }
3523}
3524
3525fn make_remote_delegate(
3526 this: Entity<GitStore>,
3527 project_id: u64,
3528 repository_id: RepositoryId,
3529 askpass_id: u64,
3530 cx: &mut AsyncApp,
3531) -> AskPassDelegate {
3532 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3533 this.update(cx, |this, cx| {
3534 let Some((client, _)) = this.downstream_client() else {
3535 return;
3536 };
3537 let response = client.request(proto::AskPassRequest {
3538 project_id,
3539 repository_id: repository_id.to_proto(),
3540 askpass_id,
3541 prompt,
3542 });
3543 cx.spawn(async move |_, _| {
3544 let mut response = response.await?.response;
3545 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3546 .ok();
3547 response.zeroize();
3548 anyhow::Ok(())
3549 })
3550 .detach_and_log_err(cx);
3551 });
3552 })
3553}
3554
3555impl RepositoryId {
3556 pub fn to_proto(self) -> u64 {
3557 self.0
3558 }
3559
3560 pub fn from_proto(id: u64) -> Self {
3561 RepositoryId(id)
3562 }
3563}
3564
3565impl RepositorySnapshot {
3566 fn empty(
3567 id: RepositoryId,
3568 work_directory_abs_path: Arc<Path>,
3569 original_repo_abs_path: Option<Arc<Path>>,
3570 path_style: PathStyle,
3571 ) -> Self {
3572 Self {
3573 id,
3574 statuses_by_path: Default::default(),
3575 original_repo_abs_path: original_repo_abs_path
3576 .unwrap_or_else(|| work_directory_abs_path.clone()),
3577 work_directory_abs_path,
3578 branch: None,
3579 head_commit: None,
3580 scan_id: 0,
3581 merge: Default::default(),
3582 remote_origin_url: None,
3583 remote_upstream_url: None,
3584 stash_entries: Default::default(),
3585 linked_worktrees: Arc::from([]),
3586 path_style,
3587 }
3588 }
3589
3590 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3591 proto::UpdateRepository {
3592 branch_summary: self.branch.as_ref().map(branch_to_proto),
3593 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3594 updated_statuses: self
3595 .statuses_by_path
3596 .iter()
3597 .map(|entry| entry.to_proto())
3598 .collect(),
3599 removed_statuses: Default::default(),
3600 current_merge_conflicts: self
3601 .merge
3602 .merge_heads_by_conflicted_path
3603 .iter()
3604 .map(|(repo_path, _)| repo_path.to_proto())
3605 .collect(),
3606 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3607 project_id,
3608 id: self.id.to_proto(),
3609 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3610 entry_ids: vec![self.id.to_proto()],
3611 scan_id: self.scan_id,
3612 is_last_update: true,
3613 stash_entries: self
3614 .stash_entries
3615 .entries
3616 .iter()
3617 .map(stash_to_proto)
3618 .collect(),
3619 remote_upstream_url: self.remote_upstream_url.clone(),
3620 remote_origin_url: self.remote_origin_url.clone(),
3621 original_repo_abs_path: Some(
3622 self.original_repo_abs_path.to_string_lossy().into_owned(),
3623 ),
3624 linked_worktrees: self
3625 .linked_worktrees
3626 .iter()
3627 .map(worktree_to_proto)
3628 .collect(),
3629 }
3630 }
3631
3632 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3633 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3634 let mut removed_statuses: Vec<String> = Vec::new();
3635
3636 let mut new_statuses = self.statuses_by_path.iter().peekable();
3637 let mut old_statuses = old.statuses_by_path.iter().peekable();
3638
3639 let mut current_new_entry = new_statuses.next();
3640 let mut current_old_entry = old_statuses.next();
3641 loop {
3642 match (current_new_entry, current_old_entry) {
3643 (Some(new_entry), Some(old_entry)) => {
3644 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3645 Ordering::Less => {
3646 updated_statuses.push(new_entry.to_proto());
3647 current_new_entry = new_statuses.next();
3648 }
3649 Ordering::Equal => {
3650 if new_entry.status != old_entry.status
3651 || new_entry.diff_stat != old_entry.diff_stat
3652 {
3653 updated_statuses.push(new_entry.to_proto());
3654 }
3655 current_old_entry = old_statuses.next();
3656 current_new_entry = new_statuses.next();
3657 }
3658 Ordering::Greater => {
3659 removed_statuses.push(old_entry.repo_path.to_proto());
3660 current_old_entry = old_statuses.next();
3661 }
3662 }
3663 }
3664 (None, Some(old_entry)) => {
3665 removed_statuses.push(old_entry.repo_path.to_proto());
3666 current_old_entry = old_statuses.next();
3667 }
3668 (Some(new_entry), None) => {
3669 updated_statuses.push(new_entry.to_proto());
3670 current_new_entry = new_statuses.next();
3671 }
3672 (None, None) => break,
3673 }
3674 }
3675
3676 proto::UpdateRepository {
3677 branch_summary: self.branch.as_ref().map(branch_to_proto),
3678 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3679 updated_statuses,
3680 removed_statuses,
3681 current_merge_conflicts: self
3682 .merge
3683 .merge_heads_by_conflicted_path
3684 .iter()
3685 .map(|(path, _)| path.to_proto())
3686 .collect(),
3687 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3688 project_id,
3689 id: self.id.to_proto(),
3690 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3691 entry_ids: vec![],
3692 scan_id: self.scan_id,
3693 is_last_update: true,
3694 stash_entries: self
3695 .stash_entries
3696 .entries
3697 .iter()
3698 .map(stash_to_proto)
3699 .collect(),
3700 remote_upstream_url: self.remote_upstream_url.clone(),
3701 remote_origin_url: self.remote_origin_url.clone(),
3702 original_repo_abs_path: Some(
3703 self.original_repo_abs_path.to_string_lossy().into_owned(),
3704 ),
3705 linked_worktrees: self
3706 .linked_worktrees
3707 .iter()
3708 .map(worktree_to_proto)
3709 .collect(),
3710 }
3711 }
3712
3713 /// The main worktree is the original checkout that other worktrees were
3714 /// created from.
3715 ///
3716 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3717 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3718 ///
3719 /// Submodules also return `true` here, since they are not linked worktrees.
3720 pub fn is_main_worktree(&self) -> bool {
3721 self.work_directory_abs_path == self.original_repo_abs_path
3722 }
3723
3724 /// Returns true if this repository is a linked worktree, that is, one that
3725 /// was created from another worktree.
3726 ///
3727 /// Returns `false` for both the main worktree and submodules.
3728 pub fn is_linked_worktree(&self) -> bool {
3729 !self.is_main_worktree()
3730 }
3731
3732 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3733 &self.linked_worktrees
3734 }
3735
3736 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3737 self.statuses_by_path.iter().cloned()
3738 }
3739
3740 pub fn status_summary(&self) -> GitSummary {
3741 self.statuses_by_path.summary().item_summary
3742 }
3743
3744 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3745 self.statuses_by_path
3746 .get(&PathKey(path.as_ref().clone()), ())
3747 .cloned()
3748 }
3749
3750 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3751 self.statuses_by_path
3752 .get(&PathKey(path.as_ref().clone()), ())
3753 .and_then(|entry| entry.diff_stat)
3754 }
3755
3756 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3757 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3758 }
3759
3760 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3761 self.path_style
3762 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3763 .unwrap()
3764 .into()
3765 }
3766
3767 #[inline]
3768 fn abs_path_to_repo_path_inner(
3769 work_directory_abs_path: &Path,
3770 abs_path: &Path,
3771 path_style: PathStyle,
3772 ) -> Option<RepoPath> {
3773 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3774 Some(RepoPath::from_rel_path(&rel_path))
3775 }
3776
3777 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3778 self.merge
3779 .merge_heads_by_conflicted_path
3780 .contains_key(repo_path)
3781 }
3782
3783 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3784 let had_conflict_on_last_merge_head_change = self
3785 .merge
3786 .merge_heads_by_conflicted_path
3787 .contains_key(repo_path);
3788 let has_conflict_currently = self
3789 .status_for_path(repo_path)
3790 .is_some_and(|entry| entry.status.is_conflicted());
3791 had_conflict_on_last_merge_head_change || has_conflict_currently
3792 }
3793
3794 /// This is the name that will be displayed in the repository selector for this repository.
3795 pub fn display_name(&self) -> SharedString {
3796 self.work_directory_abs_path
3797 .file_name()
3798 .unwrap_or_default()
3799 .to_string_lossy()
3800 .to_string()
3801 .into()
3802 }
3803}
3804
3805pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3806 proto::StashEntry {
3807 oid: entry.oid.as_bytes().to_vec(),
3808 message: entry.message.clone(),
3809 branch: entry.branch.clone(),
3810 index: entry.index as u64,
3811 timestamp: entry.timestamp,
3812 }
3813}
3814
3815pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3816 Ok(StashEntry {
3817 oid: Oid::from_bytes(&entry.oid)?,
3818 message: entry.message.clone(),
3819 index: entry.index as usize,
3820 branch: entry.branch.clone(),
3821 timestamp: entry.timestamp,
3822 })
3823}
3824
3825impl MergeDetails {
3826 async fn update(
3827 &mut self,
3828 backend: &Arc<dyn GitRepository>,
3829 current_conflicted_paths: Vec<RepoPath>,
3830 ) -> Result<bool> {
3831 log::debug!("load merge details");
3832 self.message = backend.merge_message().await.map(SharedString::from);
3833 let heads = backend
3834 .revparse_batch(vec![
3835 "MERGE_HEAD".into(),
3836 "CHERRY_PICK_HEAD".into(),
3837 "REBASE_HEAD".into(),
3838 "REVERT_HEAD".into(),
3839 "APPLY_HEAD".into(),
3840 ])
3841 .await
3842 .log_err()
3843 .unwrap_or_default()
3844 .into_iter()
3845 .map(|opt| opt.map(SharedString::from))
3846 .collect::<Vec<_>>();
3847
3848 let mut conflicts_changed = false;
3849
3850 // Record the merge state for newly conflicted paths
3851 for path in ¤t_conflicted_paths {
3852 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3853 conflicts_changed = true;
3854 self.merge_heads_by_conflicted_path
3855 .insert(path.clone(), heads.clone());
3856 }
3857 }
3858
3859 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3860 self.merge_heads_by_conflicted_path
3861 .retain(|path, old_merge_heads| {
3862 let keep = current_conflicted_paths.contains(path)
3863 || (old_merge_heads == &heads
3864 && old_merge_heads.iter().any(|head| head.is_some()));
3865 if !keep {
3866 conflicts_changed = true;
3867 }
3868 keep
3869 });
3870
3871 Ok(conflicts_changed)
3872 }
3873}
3874
3875impl Repository {
3876 pub fn is_trusted(&self) -> bool {
3877 match self.repository_state.peek() {
3878 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3879 _ => false,
3880 }
3881 }
3882
3883 pub fn snapshot(&self) -> RepositorySnapshot {
3884 self.snapshot.clone()
3885 }
3886
3887 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3888 self.pending_ops.iter().cloned()
3889 }
3890
3891 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3892 self.pending_ops.summary().clone()
3893 }
3894
3895 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3896 self.pending_ops
3897 .get(&PathKey(path.as_ref().clone()), ())
3898 .cloned()
3899 }
3900
3901 fn local(
3902 id: RepositoryId,
3903 work_directory_abs_path: Arc<Path>,
3904 original_repo_abs_path: Arc<Path>,
3905 dot_git_abs_path: Arc<Path>,
3906 project_environment: WeakEntity<ProjectEnvironment>,
3907 fs: Arc<dyn Fs>,
3908 is_trusted: bool,
3909 git_store: WeakEntity<GitStore>,
3910 cx: &mut Context<Self>,
3911 ) -> Self {
3912 let snapshot = RepositorySnapshot::empty(
3913 id,
3914 work_directory_abs_path.clone(),
3915 Some(original_repo_abs_path),
3916 PathStyle::local(),
3917 );
3918 let state = cx
3919 .spawn(async move |_, cx| {
3920 LocalRepositoryState::new(
3921 work_directory_abs_path,
3922 dot_git_abs_path,
3923 project_environment,
3924 fs,
3925 is_trusted,
3926 cx,
3927 )
3928 .await
3929 .map_err(|err| err.to_string())
3930 })
3931 .shared();
3932 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3933 let state = cx
3934 .spawn(async move |_, _| {
3935 let state = state.await?;
3936 Ok(RepositoryState::Local(state))
3937 })
3938 .shared();
3939
3940 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3941 RepositoryEvent::BranchChanged => {
3942 if this.scan_id > 1 {
3943 this.initial_graph_data.clear();
3944 }
3945 }
3946 _ => {}
3947 })
3948 .detach();
3949
3950 Repository {
3951 this: cx.weak_entity(),
3952 git_store,
3953 snapshot,
3954 pending_ops: Default::default(),
3955 repository_state: state,
3956 commit_message_buffer: None,
3957 askpass_delegates: Default::default(),
3958 paths_needing_status_update: Default::default(),
3959 latest_askpass_id: 0,
3960 job_sender,
3961 job_id: 0,
3962 active_jobs: Default::default(),
3963 initial_graph_data: Default::default(),
3964 commit_data: Default::default(),
3965 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3966 }
3967 }
3968
3969 fn remote(
3970 id: RepositoryId,
3971 work_directory_abs_path: Arc<Path>,
3972 original_repo_abs_path: Option<Arc<Path>>,
3973 path_style: PathStyle,
3974 project_id: ProjectId,
3975 client: AnyProtoClient,
3976 git_store: WeakEntity<GitStore>,
3977 cx: &mut Context<Self>,
3978 ) -> Self {
3979 let snapshot = RepositorySnapshot::empty(
3980 id,
3981 work_directory_abs_path,
3982 original_repo_abs_path,
3983 path_style,
3984 );
3985 let repository_state = RemoteRepositoryState { project_id, client };
3986 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3987 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3988 Self {
3989 this: cx.weak_entity(),
3990 snapshot,
3991 commit_message_buffer: None,
3992 git_store,
3993 pending_ops: Default::default(),
3994 paths_needing_status_update: Default::default(),
3995 job_sender,
3996 repository_state,
3997 askpass_delegates: Default::default(),
3998 latest_askpass_id: 0,
3999 active_jobs: Default::default(),
4000 job_id: 0,
4001 initial_graph_data: Default::default(),
4002 commit_data: Default::default(),
4003 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4004 }
4005 }
4006
4007 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4008 self.git_store.upgrade()
4009 }
4010
4011 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4012 let this = cx.weak_entity();
4013 let git_store = self.git_store.clone();
4014 let _ = self.send_keyed_job(
4015 Some(GitJobKey::ReloadBufferDiffBases),
4016 None,
4017 |state, mut cx| async move {
4018 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4019 log::error!("tried to recompute diffs for a non-local repository");
4020 return Ok(());
4021 };
4022
4023 let Some(this) = this.upgrade() else {
4024 return Ok(());
4025 };
4026
4027 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4028 git_store.update(cx, |git_store, cx| {
4029 git_store
4030 .diffs
4031 .iter()
4032 .filter_map(|(buffer_id, diff_state)| {
4033 let buffer_store = git_store.buffer_store.read(cx);
4034 let buffer = buffer_store.get(*buffer_id)?;
4035 let file = File::from_dyn(buffer.read(cx).file())?;
4036 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4037 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4038 log::debug!(
4039 "start reload diff bases for repo path {}",
4040 repo_path.as_unix_str()
4041 );
4042 diff_state.update(cx, |diff_state, _| {
4043 let has_unstaged_diff = diff_state
4044 .unstaged_diff
4045 .as_ref()
4046 .is_some_and(|diff| diff.is_upgradable());
4047 let has_uncommitted_diff = diff_state
4048 .uncommitted_diff
4049 .as_ref()
4050 .is_some_and(|set| set.is_upgradable());
4051
4052 Some((
4053 buffer,
4054 repo_path,
4055 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4056 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4057 ))
4058 })
4059 })
4060 .collect::<Vec<_>>()
4061 })
4062 })?;
4063
4064 let buffer_diff_base_changes = cx
4065 .background_spawn(async move {
4066 let mut changes = Vec::new();
4067 for (buffer, repo_path, current_index_text, current_head_text) in
4068 &repo_diff_state_updates
4069 {
4070 let index_text = if current_index_text.is_some() {
4071 backend.load_index_text(repo_path.clone()).await
4072 } else {
4073 None
4074 };
4075 let head_text = if current_head_text.is_some() {
4076 backend.load_committed_text(repo_path.clone()).await
4077 } else {
4078 None
4079 };
4080
4081 let change =
4082 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4083 (Some(current_index), Some(current_head)) => {
4084 let index_changed =
4085 index_text.as_deref() != current_index.as_deref();
4086 let head_changed =
4087 head_text.as_deref() != current_head.as_deref();
4088 if index_changed && head_changed {
4089 if index_text == head_text {
4090 Some(DiffBasesChange::SetBoth(head_text))
4091 } else {
4092 Some(DiffBasesChange::SetEach {
4093 index: index_text,
4094 head: head_text,
4095 })
4096 }
4097 } else if index_changed {
4098 Some(DiffBasesChange::SetIndex(index_text))
4099 } else if head_changed {
4100 Some(DiffBasesChange::SetHead(head_text))
4101 } else {
4102 None
4103 }
4104 }
4105 (Some(current_index), None) => {
4106 let index_changed =
4107 index_text.as_deref() != current_index.as_deref();
4108 index_changed
4109 .then_some(DiffBasesChange::SetIndex(index_text))
4110 }
4111 (None, Some(current_head)) => {
4112 let head_changed =
4113 head_text.as_deref() != current_head.as_deref();
4114 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4115 }
4116 (None, None) => None,
4117 };
4118
4119 changes.push((buffer.clone(), change))
4120 }
4121 changes
4122 })
4123 .await;
4124
4125 git_store.update(&mut cx, |git_store, cx| {
4126 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4127 let buffer_snapshot = buffer.read(cx).text_snapshot();
4128 let buffer_id = buffer_snapshot.remote_id();
4129 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4130 continue;
4131 };
4132
4133 let downstream_client = git_store.downstream_client();
4134 diff_state.update(cx, |diff_state, cx| {
4135 use proto::update_diff_bases::Mode;
4136
4137 if let Some((diff_bases_change, (client, project_id))) =
4138 diff_bases_change.clone().zip(downstream_client)
4139 {
4140 let (staged_text, committed_text, mode) = match diff_bases_change {
4141 DiffBasesChange::SetIndex(index) => {
4142 (index, None, Mode::IndexOnly)
4143 }
4144 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4145 DiffBasesChange::SetEach { index, head } => {
4146 (index, head, Mode::IndexAndHead)
4147 }
4148 DiffBasesChange::SetBoth(text) => {
4149 (None, text, Mode::IndexMatchesHead)
4150 }
4151 };
4152 client
4153 .send(proto::UpdateDiffBases {
4154 project_id: project_id.to_proto(),
4155 buffer_id: buffer_id.to_proto(),
4156 staged_text,
4157 committed_text,
4158 mode: mode as i32,
4159 })
4160 .log_err();
4161 }
4162
4163 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4164 });
4165 }
4166 })
4167 },
4168 );
4169 }
4170
4171 pub fn send_job<F, Fut, R>(
4172 &mut self,
4173 status: Option<SharedString>,
4174 job: F,
4175 ) -> oneshot::Receiver<R>
4176 where
4177 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4178 Fut: Future<Output = R> + 'static,
4179 R: Send + 'static,
4180 {
4181 self.send_keyed_job(None, status, job)
4182 }
4183
4184 fn send_keyed_job<F, Fut, R>(
4185 &mut self,
4186 key: Option<GitJobKey>,
4187 status: Option<SharedString>,
4188 job: F,
4189 ) -> oneshot::Receiver<R>
4190 where
4191 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4192 Fut: Future<Output = R> + 'static,
4193 R: Send + 'static,
4194 {
4195 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4196 let job_id = post_inc(&mut self.job_id);
4197 let this = self.this.clone();
4198 self.job_sender
4199 .unbounded_send(GitJob {
4200 key,
4201 job: Box::new(move |state, cx: &mut AsyncApp| {
4202 let job = job(state, cx.clone());
4203 cx.spawn(async move |cx| {
4204 if let Some(s) = status.clone() {
4205 this.update(cx, |this, cx| {
4206 this.active_jobs.insert(
4207 job_id,
4208 JobInfo {
4209 start: Instant::now(),
4210 message: s.clone(),
4211 },
4212 );
4213
4214 cx.notify();
4215 })
4216 .ok();
4217 }
4218 let result = job.await;
4219
4220 this.update(cx, |this, cx| {
4221 this.active_jobs.remove(&job_id);
4222 cx.notify();
4223 })
4224 .ok();
4225
4226 result_tx.send(result).ok();
4227 })
4228 }),
4229 })
4230 .ok();
4231 result_rx
4232 }
4233
4234 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4235 let Some(git_store) = self.git_store.upgrade() else {
4236 return;
4237 };
4238 let entity = cx.entity();
4239 git_store.update(cx, |git_store, cx| {
4240 let Some((&id, _)) = git_store
4241 .repositories
4242 .iter()
4243 .find(|(_, handle)| *handle == &entity)
4244 else {
4245 return;
4246 };
4247 git_store.active_repo_id = Some(id);
4248 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4249 });
4250 }
4251
4252 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4253 self.snapshot.status()
4254 }
4255
4256 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4257 self.snapshot.diff_stat_for_path(path)
4258 }
4259
4260 pub fn cached_stash(&self) -> GitStash {
4261 self.snapshot.stash_entries.clone()
4262 }
4263
4264 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4265 let git_store = self.git_store.upgrade()?;
4266 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4267 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4268 let abs_path = SanitizedPath::new(&abs_path);
4269 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4270 Some(ProjectPath {
4271 worktree_id: worktree.read(cx).id(),
4272 path: relative_path,
4273 })
4274 }
4275
4276 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4277 let git_store = self.git_store.upgrade()?;
4278 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4279 let abs_path = worktree_store.absolutize(path, cx)?;
4280 self.snapshot.abs_path_to_repo_path(&abs_path)
4281 }
4282
4283 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4284 other
4285 .read(cx)
4286 .snapshot
4287 .work_directory_abs_path
4288 .starts_with(&self.snapshot.work_directory_abs_path)
4289 }
4290
4291 pub fn open_commit_buffer(
4292 &mut self,
4293 languages: Option<Arc<LanguageRegistry>>,
4294 buffer_store: Entity<BufferStore>,
4295 cx: &mut Context<Self>,
4296 ) -> Task<Result<Entity<Buffer>>> {
4297 let id = self.id;
4298 if let Some(buffer) = self.commit_message_buffer.clone() {
4299 return Task::ready(Ok(buffer));
4300 }
4301 let this = cx.weak_entity();
4302
4303 let rx = self.send_job(None, move |state, mut cx| async move {
4304 let Some(this) = this.upgrade() else {
4305 bail!("git store was dropped");
4306 };
4307 match state {
4308 RepositoryState::Local(..) => {
4309 this.update(&mut cx, |_, cx| {
4310 Self::open_local_commit_buffer(languages, buffer_store, cx)
4311 })
4312 .await
4313 }
4314 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4315 let request = client.request(proto::OpenCommitMessageBuffer {
4316 project_id: project_id.0,
4317 repository_id: id.to_proto(),
4318 });
4319 let response = request.await.context("requesting to open commit buffer")?;
4320 let buffer_id = BufferId::new(response.buffer_id)?;
4321 let buffer = buffer_store
4322 .update(&mut cx, |buffer_store, cx| {
4323 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4324 })
4325 .await?;
4326 if let Some(language_registry) = languages {
4327 let git_commit_language =
4328 language_registry.language_for_name("Git Commit").await?;
4329 buffer.update(&mut cx, |buffer, cx| {
4330 buffer.set_language(Some(git_commit_language), cx);
4331 });
4332 }
4333 this.update(&mut cx, |this, _| {
4334 this.commit_message_buffer = Some(buffer.clone());
4335 });
4336 Ok(buffer)
4337 }
4338 }
4339 });
4340
4341 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4342 }
4343
4344 fn open_local_commit_buffer(
4345 language_registry: Option<Arc<LanguageRegistry>>,
4346 buffer_store: Entity<BufferStore>,
4347 cx: &mut Context<Self>,
4348 ) -> Task<Result<Entity<Buffer>>> {
4349 cx.spawn(async move |repository, cx| {
4350 let git_commit_language = match language_registry {
4351 Some(language_registry) => {
4352 Some(language_registry.language_for_name("Git Commit").await?)
4353 }
4354 None => None,
4355 };
4356 let buffer = buffer_store
4357 .update(cx, |buffer_store, cx| {
4358 buffer_store.create_buffer(git_commit_language, false, cx)
4359 })
4360 .await?;
4361
4362 repository.update(cx, |repository, _| {
4363 repository.commit_message_buffer = Some(buffer.clone());
4364 })?;
4365 Ok(buffer)
4366 })
4367 }
4368
4369 pub fn checkout_files(
4370 &mut self,
4371 commit: &str,
4372 paths: Vec<RepoPath>,
4373 cx: &mut Context<Self>,
4374 ) -> Task<Result<()>> {
4375 let commit = commit.to_string();
4376 let id = self.id;
4377
4378 self.spawn_job_with_tracking(
4379 paths.clone(),
4380 pending_op::GitStatus::Reverted,
4381 cx,
4382 async move |this, cx| {
4383 this.update(cx, |this, _cx| {
4384 this.send_job(
4385 Some(format!("git checkout {}", commit).into()),
4386 move |git_repo, _| async move {
4387 match git_repo {
4388 RepositoryState::Local(LocalRepositoryState {
4389 backend,
4390 environment,
4391 ..
4392 }) => {
4393 backend
4394 .checkout_files(commit, paths, environment.clone())
4395 .await
4396 }
4397 RepositoryState::Remote(RemoteRepositoryState {
4398 project_id,
4399 client,
4400 }) => {
4401 client
4402 .request(proto::GitCheckoutFiles {
4403 project_id: project_id.0,
4404 repository_id: id.to_proto(),
4405 commit,
4406 paths: paths
4407 .into_iter()
4408 .map(|p| p.to_proto())
4409 .collect(),
4410 })
4411 .await?;
4412
4413 Ok(())
4414 }
4415 }
4416 },
4417 )
4418 })?
4419 .await?
4420 },
4421 )
4422 }
4423
4424 pub fn reset(
4425 &mut self,
4426 commit: String,
4427 reset_mode: ResetMode,
4428 _cx: &mut App,
4429 ) -> oneshot::Receiver<Result<()>> {
4430 let id = self.id;
4431
4432 self.send_job(None, move |git_repo, _| async move {
4433 match git_repo {
4434 RepositoryState::Local(LocalRepositoryState {
4435 backend,
4436 environment,
4437 ..
4438 }) => backend.reset(commit, reset_mode, environment).await,
4439 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4440 client
4441 .request(proto::GitReset {
4442 project_id: project_id.0,
4443 repository_id: id.to_proto(),
4444 commit,
4445 mode: match reset_mode {
4446 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4447 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4448 },
4449 })
4450 .await?;
4451
4452 Ok(())
4453 }
4454 }
4455 })
4456 }
4457
4458 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4459 let id = self.id;
4460 self.send_job(None, move |git_repo, _cx| async move {
4461 match git_repo {
4462 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4463 backend.show(commit).await
4464 }
4465 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4466 let resp = client
4467 .request(proto::GitShow {
4468 project_id: project_id.0,
4469 repository_id: id.to_proto(),
4470 commit,
4471 })
4472 .await?;
4473
4474 Ok(CommitDetails {
4475 sha: resp.sha.into(),
4476 message: resp.message.into(),
4477 commit_timestamp: resp.commit_timestamp,
4478 author_email: resp.author_email.into(),
4479 author_name: resp.author_name.into(),
4480 })
4481 }
4482 }
4483 })
4484 }
4485
4486 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4487 let id = self.id;
4488 self.send_job(None, move |git_repo, cx| async move {
4489 match git_repo {
4490 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4491 backend.load_commit(commit, cx).await
4492 }
4493 RepositoryState::Remote(RemoteRepositoryState {
4494 client, project_id, ..
4495 }) => {
4496 let response = client
4497 .request(proto::LoadCommitDiff {
4498 project_id: project_id.0,
4499 repository_id: id.to_proto(),
4500 commit,
4501 })
4502 .await?;
4503 Ok(CommitDiff {
4504 files: response
4505 .files
4506 .into_iter()
4507 .map(|file| {
4508 Ok(CommitFile {
4509 path: RepoPath::from_proto(&file.path)?,
4510 old_text: file.old_text,
4511 new_text: file.new_text,
4512 is_binary: file.is_binary,
4513 })
4514 })
4515 .collect::<Result<Vec<_>>>()?,
4516 })
4517 }
4518 }
4519 })
4520 }
4521
4522 pub fn file_history(
4523 &mut self,
4524 path: RepoPath,
4525 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4526 self.file_history_paginated(path, 0, None)
4527 }
4528
4529 pub fn file_history_paginated(
4530 &mut self,
4531 path: RepoPath,
4532 skip: usize,
4533 limit: Option<usize>,
4534 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4535 let id = self.id;
4536 self.send_job(None, move |git_repo, _cx| async move {
4537 match git_repo {
4538 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4539 backend.file_history_paginated(path, skip, limit).await
4540 }
4541 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4542 let response = client
4543 .request(proto::GitFileHistory {
4544 project_id: project_id.0,
4545 repository_id: id.to_proto(),
4546 path: path.to_proto(),
4547 skip: skip as u64,
4548 limit: limit.map(|l| l as u64),
4549 })
4550 .await?;
4551 Ok(git::repository::FileHistory {
4552 entries: response
4553 .entries
4554 .into_iter()
4555 .map(|entry| git::repository::FileHistoryEntry {
4556 sha: entry.sha.into(),
4557 subject: entry.subject.into(),
4558 message: entry.message.into(),
4559 commit_timestamp: entry.commit_timestamp,
4560 author_name: entry.author_name.into(),
4561 author_email: entry.author_email.into(),
4562 })
4563 .collect(),
4564 path: RepoPath::from_proto(&response.path)?,
4565 })
4566 }
4567 }
4568 })
4569 }
4570
4571 pub fn get_graph_data(
4572 &self,
4573 log_source: LogSource,
4574 log_order: LogOrder,
4575 ) -> Option<&InitialGitGraphData> {
4576 self.initial_graph_data.get(&(log_source, log_order))
4577 }
4578
4579 pub fn search_commits(
4580 &mut self,
4581 log_source: LogSource,
4582 search_args: SearchCommitArgs,
4583 request_tx: smol::channel::Sender<Oid>,
4584 cx: &mut Context<Self>,
4585 ) {
4586 let repository_state = self.repository_state.clone();
4587
4588 cx.background_spawn(async move {
4589 let repo_state = repository_state.await;
4590
4591 match repo_state {
4592 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4593 backend
4594 .search_commits(log_source, search_args, request_tx)
4595 .await
4596 .log_err();
4597 }
4598 Ok(RepositoryState::Remote(_)) => {}
4599 Err(_) => {}
4600 };
4601 })
4602 .detach();
4603 }
4604
4605 pub fn graph_data(
4606 &mut self,
4607 log_source: LogSource,
4608 log_order: LogOrder,
4609 range: Range<usize>,
4610 cx: &mut Context<Self>,
4611 ) -> GraphDataResponse<'_> {
4612 let initial_commit_data = self
4613 .initial_graph_data
4614 .entry((log_source.clone(), log_order))
4615 .or_insert_with(|| {
4616 let state = self.repository_state.clone();
4617 let log_source = log_source.clone();
4618
4619 let fetch_task = cx.spawn(async move |repository, cx| {
4620 let state = state.await;
4621 let result = match state {
4622 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4623 Self::local_git_graph_data(
4624 repository.clone(),
4625 backend,
4626 log_source.clone(),
4627 log_order,
4628 cx,
4629 )
4630 .await
4631 }
4632 Ok(RepositoryState::Remote(_)) => {
4633 Err("Git graph is not supported for collab yet".into())
4634 }
4635 Err(e) => Err(SharedString::from(e)),
4636 };
4637
4638 if let Err(fetch_task_error) = result {
4639 repository
4640 .update(cx, |repository, _| {
4641 if let Some(data) = repository
4642 .initial_graph_data
4643 .get_mut(&(log_source, log_order))
4644 {
4645 data.error = Some(fetch_task_error);
4646 } else {
4647 debug_panic!(
4648 "This task would be dropped if this entry doesn't exist"
4649 );
4650 }
4651 })
4652 .ok();
4653 }
4654 });
4655
4656 InitialGitGraphData {
4657 fetch_task,
4658 error: None,
4659 commit_data: Vec::new(),
4660 commit_oid_to_index: HashMap::default(),
4661 }
4662 });
4663
4664 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4665 let max_end = initial_commit_data.commit_data.len();
4666
4667 GraphDataResponse {
4668 commits: &initial_commit_data.commit_data
4669 [range.start.min(max_start)..range.end.min(max_end)],
4670 is_loading: !initial_commit_data.fetch_task.is_ready(),
4671 error: initial_commit_data.error.clone(),
4672 }
4673 }
4674
4675 async fn local_git_graph_data(
4676 this: WeakEntity<Self>,
4677 backend: Arc<dyn GitRepository>,
4678 log_source: LogSource,
4679 log_order: LogOrder,
4680 cx: &mut AsyncApp,
4681 ) -> Result<(), SharedString> {
4682 let (request_tx, request_rx) =
4683 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4684
4685 let task = cx.background_executor().spawn({
4686 let log_source = log_source.clone();
4687 async move {
4688 backend
4689 .initial_graph_data(log_source, log_order, request_tx)
4690 .await
4691 .map_err(|err| SharedString::from(err.to_string()))
4692 }
4693 });
4694
4695 let graph_data_key = (log_source, log_order);
4696
4697 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4698 this.update(cx, |repository, cx| {
4699 let graph_data = repository
4700 .initial_graph_data
4701 .entry(graph_data_key.clone())
4702 .and_modify(|graph_data| {
4703 for commit_data in initial_graph_commit_data {
4704 graph_data
4705 .commit_oid_to_index
4706 .insert(commit_data.sha, graph_data.commit_data.len());
4707 graph_data.commit_data.push(commit_data);
4708 }
4709 cx.emit(RepositoryEvent::GraphEvent(
4710 graph_data_key.clone(),
4711 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4712 ));
4713 });
4714
4715 match &graph_data {
4716 Entry::Occupied(_) => {}
4717 Entry::Vacant(_) => {
4718 debug_panic!("This task should be dropped if data doesn't exist");
4719 }
4720 }
4721 })
4722 .ok();
4723 }
4724
4725 task.await?;
4726 Ok(())
4727 }
4728
4729 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4730 if !self.commit_data.contains_key(&sha) {
4731 match &self.graph_commit_data_handler {
4732 GraphCommitHandlerState::Open(handler) => {
4733 if handler.commit_data_request.try_send(sha).is_ok() {
4734 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4735 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4736 }
4737 }
4738 GraphCommitHandlerState::Closed => {
4739 self.open_graph_commit_data_handler(cx);
4740 }
4741 GraphCommitHandlerState::Starting => {}
4742 }
4743 }
4744
4745 self.commit_data
4746 .get(&sha)
4747 .unwrap_or(&CommitDataState::Loading)
4748 }
4749
4750 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4751 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4752
4753 let state = self.repository_state.clone();
4754 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4755 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4756
4757 let foreground_task = cx.spawn(async move |this, cx| {
4758 while let Ok((sha, commit_data)) = result_rx.recv().await {
4759 let result = this.update(cx, |this, cx| {
4760 let old_value = this
4761 .commit_data
4762 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4763 debug_assert!(
4764 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4765 "We should never overwrite commit data"
4766 );
4767
4768 cx.notify();
4769 });
4770 if result.is_err() {
4771 break;
4772 }
4773 }
4774
4775 this.update(cx, |this, _cx| {
4776 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4777 })
4778 .ok();
4779 });
4780
4781 let request_tx_for_handler = request_tx;
4782 let background_executor = cx.background_executor().clone();
4783
4784 cx.background_spawn(async move {
4785 let backend = match state.await {
4786 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4787 Ok(RepositoryState::Remote(_)) => {
4788 log::error!("commit_data_reader not supported for remote repositories");
4789 return;
4790 }
4791 Err(error) => {
4792 log::error!("failed to get repository state: {error}");
4793 return;
4794 }
4795 };
4796
4797 let reader = match backend.commit_data_reader() {
4798 Ok(reader) => reader,
4799 Err(error) => {
4800 log::error!("failed to create commit data reader: {error:?}");
4801 return;
4802 }
4803 };
4804
4805 loop {
4806 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4807
4808 futures::select_biased! {
4809 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4810 let Ok(sha) = sha else {
4811 break;
4812 };
4813
4814 match reader.read(sha).await {
4815 Ok(commit_data) => {
4816 if result_tx.send((sha, commit_data)).await.is_err() {
4817 break;
4818 }
4819 }
4820 Err(error) => {
4821 log::error!("failed to read commit data for {sha}: {error:?}");
4822 }
4823 }
4824 }
4825 _ = futures::FutureExt::fuse(timeout) => {
4826 break;
4827 }
4828 }
4829 }
4830
4831 drop(result_tx);
4832 })
4833 .detach();
4834
4835 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4836 _task: foreground_task,
4837 commit_data_request: request_tx_for_handler,
4838 });
4839 }
4840
4841 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4842 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4843 }
4844
4845 fn save_buffers<'a>(
4846 &self,
4847 entries: impl IntoIterator<Item = &'a RepoPath>,
4848 cx: &mut Context<Self>,
4849 ) -> Vec<Task<anyhow::Result<()>>> {
4850 let mut save_futures = Vec::new();
4851 if let Some(buffer_store) = self.buffer_store(cx) {
4852 buffer_store.update(cx, |buffer_store, cx| {
4853 for path in entries {
4854 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4855 continue;
4856 };
4857 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4858 && buffer
4859 .read(cx)
4860 .file()
4861 .is_some_and(|file| file.disk_state().exists())
4862 && buffer.read(cx).has_unsaved_edits()
4863 {
4864 save_futures.push(buffer_store.save_buffer(buffer, cx));
4865 }
4866 }
4867 })
4868 }
4869 save_futures
4870 }
4871
4872 pub fn stage_entries(
4873 &mut self,
4874 entries: Vec<RepoPath>,
4875 cx: &mut Context<Self>,
4876 ) -> Task<anyhow::Result<()>> {
4877 self.stage_or_unstage_entries(true, entries, cx)
4878 }
4879
4880 pub fn unstage_entries(
4881 &mut self,
4882 entries: Vec<RepoPath>,
4883 cx: &mut Context<Self>,
4884 ) -> Task<anyhow::Result<()>> {
4885 self.stage_or_unstage_entries(false, entries, cx)
4886 }
4887
4888 fn stage_or_unstage_entries(
4889 &mut self,
4890 stage: bool,
4891 entries: Vec<RepoPath>,
4892 cx: &mut Context<Self>,
4893 ) -> Task<anyhow::Result<()>> {
4894 if entries.is_empty() {
4895 return Task::ready(Ok(()));
4896 }
4897 let Some(git_store) = self.git_store.upgrade() else {
4898 return Task::ready(Ok(()));
4899 };
4900 let id = self.id;
4901 let save_tasks = self.save_buffers(&entries, cx);
4902 let paths = entries
4903 .iter()
4904 .map(|p| p.as_unix_str())
4905 .collect::<Vec<_>>()
4906 .join(" ");
4907 let status = if stage {
4908 format!("git add {paths}")
4909 } else {
4910 format!("git reset {paths}")
4911 };
4912 let job_key = GitJobKey::WriteIndex(entries.clone());
4913
4914 self.spawn_job_with_tracking(
4915 entries.clone(),
4916 if stage {
4917 pending_op::GitStatus::Staged
4918 } else {
4919 pending_op::GitStatus::Unstaged
4920 },
4921 cx,
4922 async move |this, cx| {
4923 for save_task in save_tasks {
4924 save_task.await?;
4925 }
4926
4927 this.update(cx, |this, cx| {
4928 let weak_this = cx.weak_entity();
4929 this.send_keyed_job(
4930 Some(job_key),
4931 Some(status.into()),
4932 move |git_repo, mut cx| async move {
4933 let hunk_staging_operation_counts = weak_this
4934 .update(&mut cx, |this, cx| {
4935 let mut hunk_staging_operation_counts = HashMap::default();
4936 for path in &entries {
4937 let Some(project_path) =
4938 this.repo_path_to_project_path(path, cx)
4939 else {
4940 continue;
4941 };
4942 let Some(buffer) = git_store
4943 .read(cx)
4944 .buffer_store
4945 .read(cx)
4946 .get_by_path(&project_path)
4947 else {
4948 continue;
4949 };
4950 let Some(diff_state) = git_store
4951 .read(cx)
4952 .diffs
4953 .get(&buffer.read(cx).remote_id())
4954 .cloned()
4955 else {
4956 continue;
4957 };
4958 let Some(uncommitted_diff) =
4959 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4960 |uncommitted_diff| uncommitted_diff.upgrade(),
4961 )
4962 else {
4963 continue;
4964 };
4965 let buffer_snapshot = buffer.read(cx).text_snapshot();
4966 let file_exists = buffer
4967 .read(cx)
4968 .file()
4969 .is_some_and(|file| file.disk_state().exists());
4970 let hunk_staging_operation_count =
4971 diff_state.update(cx, |diff_state, cx| {
4972 uncommitted_diff.update(
4973 cx,
4974 |uncommitted_diff, cx| {
4975 uncommitted_diff
4976 .stage_or_unstage_all_hunks(
4977 stage,
4978 &buffer_snapshot,
4979 file_exists,
4980 cx,
4981 );
4982 },
4983 );
4984
4985 diff_state.hunk_staging_operation_count += 1;
4986 diff_state.hunk_staging_operation_count
4987 });
4988 hunk_staging_operation_counts.insert(
4989 diff_state.downgrade(),
4990 hunk_staging_operation_count,
4991 );
4992 }
4993 hunk_staging_operation_counts
4994 })
4995 .unwrap_or_default();
4996
4997 let result = match git_repo {
4998 RepositoryState::Local(LocalRepositoryState {
4999 backend,
5000 environment,
5001 ..
5002 }) => {
5003 if stage {
5004 backend.stage_paths(entries, environment.clone()).await
5005 } else {
5006 backend.unstage_paths(entries, environment.clone()).await
5007 }
5008 }
5009 RepositoryState::Remote(RemoteRepositoryState {
5010 project_id,
5011 client,
5012 }) => {
5013 if stage {
5014 client
5015 .request(proto::Stage {
5016 project_id: project_id.0,
5017 repository_id: id.to_proto(),
5018 paths: entries
5019 .into_iter()
5020 .map(|repo_path| repo_path.to_proto())
5021 .collect(),
5022 })
5023 .await
5024 .context("sending stage request")
5025 .map(|_| ())
5026 } else {
5027 client
5028 .request(proto::Unstage {
5029 project_id: project_id.0,
5030 repository_id: id.to_proto(),
5031 paths: entries
5032 .into_iter()
5033 .map(|repo_path| repo_path.to_proto())
5034 .collect(),
5035 })
5036 .await
5037 .context("sending unstage request")
5038 .map(|_| ())
5039 }
5040 }
5041 };
5042
5043 for (diff_state, hunk_staging_operation_count) in
5044 hunk_staging_operation_counts
5045 {
5046 diff_state
5047 .update(&mut cx, |diff_state, cx| {
5048 if result.is_ok() {
5049 diff_state.hunk_staging_operation_count_as_of_write =
5050 hunk_staging_operation_count;
5051 } else if let Some(uncommitted_diff) =
5052 &diff_state.uncommitted_diff
5053 {
5054 uncommitted_diff
5055 .update(cx, |uncommitted_diff, cx| {
5056 uncommitted_diff.clear_pending_hunks(cx);
5057 })
5058 .ok();
5059 }
5060 })
5061 .ok();
5062 }
5063
5064 result
5065 },
5066 )
5067 })?
5068 .await?
5069 },
5070 )
5071 }
5072
5073 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5074 let snapshot = self.snapshot.clone();
5075 let pending_ops = self.pending_ops.clone();
5076 let to_stage = cx.background_spawn(async move {
5077 snapshot
5078 .status()
5079 .filter_map(|entry| {
5080 if let Some(ops) =
5081 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5082 {
5083 if ops.staging() || ops.staged() {
5084 None
5085 } else {
5086 Some(entry.repo_path)
5087 }
5088 } else if entry.status.staging().is_fully_staged() {
5089 None
5090 } else {
5091 Some(entry.repo_path)
5092 }
5093 })
5094 .collect()
5095 });
5096
5097 cx.spawn(async move |this, cx| {
5098 let to_stage = to_stage.await;
5099 this.update(cx, |this, cx| {
5100 this.stage_or_unstage_entries(true, to_stage, cx)
5101 })?
5102 .await
5103 })
5104 }
5105
5106 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5107 let snapshot = self.snapshot.clone();
5108 let pending_ops = self.pending_ops.clone();
5109 let to_unstage = cx.background_spawn(async move {
5110 snapshot
5111 .status()
5112 .filter_map(|entry| {
5113 if let Some(ops) =
5114 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5115 {
5116 if !ops.staging() && !ops.staged() {
5117 None
5118 } else {
5119 Some(entry.repo_path)
5120 }
5121 } else if entry.status.staging().is_fully_unstaged() {
5122 None
5123 } else {
5124 Some(entry.repo_path)
5125 }
5126 })
5127 .collect()
5128 });
5129
5130 cx.spawn(async move |this, cx| {
5131 let to_unstage = to_unstage.await;
5132 this.update(cx, |this, cx| {
5133 this.stage_or_unstage_entries(false, to_unstage, cx)
5134 })?
5135 .await
5136 })
5137 }
5138
5139 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5140 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5141
5142 self.stash_entries(to_stash, cx)
5143 }
5144
5145 pub fn stash_entries(
5146 &mut self,
5147 entries: Vec<RepoPath>,
5148 cx: &mut Context<Self>,
5149 ) -> Task<anyhow::Result<()>> {
5150 let id = self.id;
5151
5152 cx.spawn(async move |this, cx| {
5153 this.update(cx, |this, _| {
5154 this.send_job(None, move |git_repo, _cx| async move {
5155 match git_repo {
5156 RepositoryState::Local(LocalRepositoryState {
5157 backend,
5158 environment,
5159 ..
5160 }) => backend.stash_paths(entries, environment).await,
5161 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5162 client
5163 .request(proto::Stash {
5164 project_id: project_id.0,
5165 repository_id: id.to_proto(),
5166 paths: entries
5167 .into_iter()
5168 .map(|repo_path| repo_path.to_proto())
5169 .collect(),
5170 })
5171 .await?;
5172 Ok(())
5173 }
5174 }
5175 })
5176 })?
5177 .await??;
5178 Ok(())
5179 })
5180 }
5181
5182 pub fn stash_pop(
5183 &mut self,
5184 index: Option<usize>,
5185 cx: &mut Context<Self>,
5186 ) -> Task<anyhow::Result<()>> {
5187 let id = self.id;
5188 cx.spawn(async move |this, cx| {
5189 this.update(cx, |this, _| {
5190 this.send_job(None, move |git_repo, _cx| async move {
5191 match git_repo {
5192 RepositoryState::Local(LocalRepositoryState {
5193 backend,
5194 environment,
5195 ..
5196 }) => backend.stash_pop(index, environment).await,
5197 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5198 client
5199 .request(proto::StashPop {
5200 project_id: project_id.0,
5201 repository_id: id.to_proto(),
5202 stash_index: index.map(|i| i as u64),
5203 })
5204 .await
5205 .context("sending stash pop request")?;
5206 Ok(())
5207 }
5208 }
5209 })
5210 })?
5211 .await??;
5212 Ok(())
5213 })
5214 }
5215
5216 pub fn stash_apply(
5217 &mut self,
5218 index: Option<usize>,
5219 cx: &mut Context<Self>,
5220 ) -> Task<anyhow::Result<()>> {
5221 let id = self.id;
5222 cx.spawn(async move |this, cx| {
5223 this.update(cx, |this, _| {
5224 this.send_job(None, move |git_repo, _cx| async move {
5225 match git_repo {
5226 RepositoryState::Local(LocalRepositoryState {
5227 backend,
5228 environment,
5229 ..
5230 }) => backend.stash_apply(index, environment).await,
5231 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5232 client
5233 .request(proto::StashApply {
5234 project_id: project_id.0,
5235 repository_id: id.to_proto(),
5236 stash_index: index.map(|i| i as u64),
5237 })
5238 .await
5239 .context("sending stash apply request")?;
5240 Ok(())
5241 }
5242 }
5243 })
5244 })?
5245 .await??;
5246 Ok(())
5247 })
5248 }
5249
5250 pub fn stash_drop(
5251 &mut self,
5252 index: Option<usize>,
5253 cx: &mut Context<Self>,
5254 ) -> oneshot::Receiver<anyhow::Result<()>> {
5255 let id = self.id;
5256 let updates_tx = self
5257 .git_store()
5258 .and_then(|git_store| match &git_store.read(cx).state {
5259 GitStoreState::Local { downstream, .. } => downstream
5260 .as_ref()
5261 .map(|downstream| downstream.updates_tx.clone()),
5262 _ => None,
5263 });
5264 let this = cx.weak_entity();
5265 self.send_job(None, move |git_repo, mut cx| async move {
5266 match git_repo {
5267 RepositoryState::Local(LocalRepositoryState {
5268 backend,
5269 environment,
5270 ..
5271 }) => {
5272 // TODO would be nice to not have to do this manually
5273 let result = backend.stash_drop(index, environment).await;
5274 if result.is_ok()
5275 && let Ok(stash_entries) = backend.stash_entries().await
5276 {
5277 let snapshot = this.update(&mut cx, |this, cx| {
5278 this.snapshot.stash_entries = stash_entries;
5279 cx.emit(RepositoryEvent::StashEntriesChanged);
5280 this.snapshot.clone()
5281 })?;
5282 if let Some(updates_tx) = updates_tx {
5283 updates_tx
5284 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5285 .ok();
5286 }
5287 }
5288
5289 result
5290 }
5291 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5292 client
5293 .request(proto::StashDrop {
5294 project_id: project_id.0,
5295 repository_id: id.to_proto(),
5296 stash_index: index.map(|i| i as u64),
5297 })
5298 .await
5299 .context("sending stash pop request")?;
5300 Ok(())
5301 }
5302 }
5303 })
5304 }
5305
5306 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5307 let id = self.id;
5308 self.send_job(
5309 Some(format!("git hook {}", hook.as_str()).into()),
5310 move |git_repo, _cx| async move {
5311 match git_repo {
5312 RepositoryState::Local(LocalRepositoryState {
5313 backend,
5314 environment,
5315 ..
5316 }) => backend.run_hook(hook, environment.clone()).await,
5317 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5318 client
5319 .request(proto::RunGitHook {
5320 project_id: project_id.0,
5321 repository_id: id.to_proto(),
5322 hook: hook.to_proto(),
5323 })
5324 .await?;
5325
5326 Ok(())
5327 }
5328 }
5329 },
5330 )
5331 }
5332
5333 pub fn commit(
5334 &mut self,
5335 message: SharedString,
5336 name_and_email: Option<(SharedString, SharedString)>,
5337 options: CommitOptions,
5338 askpass: AskPassDelegate,
5339 cx: &mut App,
5340 ) -> oneshot::Receiver<Result<()>> {
5341 let id = self.id;
5342 let askpass_delegates = self.askpass_delegates.clone();
5343 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5344
5345 let rx = self.run_hook(RunHook::PreCommit, cx);
5346
5347 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5348 rx.await??;
5349
5350 match git_repo {
5351 RepositoryState::Local(LocalRepositoryState {
5352 backend,
5353 environment,
5354 ..
5355 }) => {
5356 backend
5357 .commit(message, name_and_email, options, askpass, environment)
5358 .await
5359 }
5360 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5361 askpass_delegates.lock().insert(askpass_id, askpass);
5362 let _defer = util::defer(|| {
5363 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5364 debug_assert!(askpass_delegate.is_some());
5365 });
5366 let (name, email) = name_and_email.unzip();
5367 client
5368 .request(proto::Commit {
5369 project_id: project_id.0,
5370 repository_id: id.to_proto(),
5371 message: String::from(message),
5372 name: name.map(String::from),
5373 email: email.map(String::from),
5374 options: Some(proto::commit::CommitOptions {
5375 amend: options.amend,
5376 signoff: options.signoff,
5377 }),
5378 askpass_id,
5379 })
5380 .await?;
5381
5382 Ok(())
5383 }
5384 }
5385 })
5386 }
5387
5388 pub fn fetch(
5389 &mut self,
5390 fetch_options: FetchOptions,
5391 askpass: AskPassDelegate,
5392 _cx: &mut App,
5393 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5394 let askpass_delegates = self.askpass_delegates.clone();
5395 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5396 let id = self.id;
5397
5398 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5399 match git_repo {
5400 RepositoryState::Local(LocalRepositoryState {
5401 backend,
5402 environment,
5403 ..
5404 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5405 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5406 askpass_delegates.lock().insert(askpass_id, askpass);
5407 let _defer = util::defer(|| {
5408 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5409 debug_assert!(askpass_delegate.is_some());
5410 });
5411
5412 let response = client
5413 .request(proto::Fetch {
5414 project_id: project_id.0,
5415 repository_id: id.to_proto(),
5416 askpass_id,
5417 remote: fetch_options.to_proto(),
5418 })
5419 .await?;
5420
5421 Ok(RemoteCommandOutput {
5422 stdout: response.stdout,
5423 stderr: response.stderr,
5424 })
5425 }
5426 }
5427 })
5428 }
5429
5430 pub fn push(
5431 &mut self,
5432 branch: SharedString,
5433 remote_branch: SharedString,
5434 remote: SharedString,
5435 options: Option<PushOptions>,
5436 askpass: AskPassDelegate,
5437 cx: &mut Context<Self>,
5438 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5439 let askpass_delegates = self.askpass_delegates.clone();
5440 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5441 let id = self.id;
5442
5443 let args = options
5444 .map(|option| match option {
5445 PushOptions::SetUpstream => " --set-upstream",
5446 PushOptions::Force => " --force-with-lease",
5447 })
5448 .unwrap_or("");
5449
5450 let updates_tx = self
5451 .git_store()
5452 .and_then(|git_store| match &git_store.read(cx).state {
5453 GitStoreState::Local { downstream, .. } => downstream
5454 .as_ref()
5455 .map(|downstream| downstream.updates_tx.clone()),
5456 _ => None,
5457 });
5458
5459 let this = cx.weak_entity();
5460 self.send_job(
5461 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5462 move |git_repo, mut cx| async move {
5463 match git_repo {
5464 RepositoryState::Local(LocalRepositoryState {
5465 backend,
5466 environment,
5467 ..
5468 }) => {
5469 let result = backend
5470 .push(
5471 branch.to_string(),
5472 remote_branch.to_string(),
5473 remote.to_string(),
5474 options,
5475 askpass,
5476 environment.clone(),
5477 cx.clone(),
5478 )
5479 .await;
5480 // TODO would be nice to not have to do this manually
5481 if result.is_ok() {
5482 let branches = backend.branches().await?;
5483 let branch = branches.into_iter().find(|branch| branch.is_head);
5484 log::info!("head branch after scan is {branch:?}");
5485 let snapshot = this.update(&mut cx, |this, cx| {
5486 this.snapshot.branch = branch;
5487 cx.emit(RepositoryEvent::BranchChanged);
5488 this.snapshot.clone()
5489 })?;
5490 if let Some(updates_tx) = updates_tx {
5491 updates_tx
5492 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5493 .ok();
5494 }
5495 }
5496 result
5497 }
5498 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5499 askpass_delegates.lock().insert(askpass_id, askpass);
5500 let _defer = util::defer(|| {
5501 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5502 debug_assert!(askpass_delegate.is_some());
5503 });
5504 let response = client
5505 .request(proto::Push {
5506 project_id: project_id.0,
5507 repository_id: id.to_proto(),
5508 askpass_id,
5509 branch_name: branch.to_string(),
5510 remote_branch_name: remote_branch.to_string(),
5511 remote_name: remote.to_string(),
5512 options: options.map(|options| match options {
5513 PushOptions::Force => proto::push::PushOptions::Force,
5514 PushOptions::SetUpstream => {
5515 proto::push::PushOptions::SetUpstream
5516 }
5517 }
5518 as i32),
5519 })
5520 .await?;
5521
5522 Ok(RemoteCommandOutput {
5523 stdout: response.stdout,
5524 stderr: response.stderr,
5525 })
5526 }
5527 }
5528 },
5529 )
5530 }
5531
5532 pub fn pull(
5533 &mut self,
5534 branch: Option<SharedString>,
5535 remote: SharedString,
5536 rebase: bool,
5537 askpass: AskPassDelegate,
5538 _cx: &mut App,
5539 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5540 let askpass_delegates = self.askpass_delegates.clone();
5541 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5542 let id = self.id;
5543
5544 let mut status = "git pull".to_string();
5545 if rebase {
5546 status.push_str(" --rebase");
5547 }
5548 status.push_str(&format!(" {}", remote));
5549 if let Some(b) = &branch {
5550 status.push_str(&format!(" {}", b));
5551 }
5552
5553 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5554 match git_repo {
5555 RepositoryState::Local(LocalRepositoryState {
5556 backend,
5557 environment,
5558 ..
5559 }) => {
5560 backend
5561 .pull(
5562 branch.as_ref().map(|b| b.to_string()),
5563 remote.to_string(),
5564 rebase,
5565 askpass,
5566 environment.clone(),
5567 cx,
5568 )
5569 .await
5570 }
5571 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5572 askpass_delegates.lock().insert(askpass_id, askpass);
5573 let _defer = util::defer(|| {
5574 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5575 debug_assert!(askpass_delegate.is_some());
5576 });
5577 let response = client
5578 .request(proto::Pull {
5579 project_id: project_id.0,
5580 repository_id: id.to_proto(),
5581 askpass_id,
5582 rebase,
5583 branch_name: branch.as_ref().map(|b| b.to_string()),
5584 remote_name: remote.to_string(),
5585 })
5586 .await?;
5587
5588 Ok(RemoteCommandOutput {
5589 stdout: response.stdout,
5590 stderr: response.stderr,
5591 })
5592 }
5593 }
5594 })
5595 }
5596
5597 fn spawn_set_index_text_job(
5598 &mut self,
5599 path: RepoPath,
5600 content: Option<String>,
5601 hunk_staging_operation_count: Option<usize>,
5602 cx: &mut Context<Self>,
5603 ) -> oneshot::Receiver<anyhow::Result<()>> {
5604 let id = self.id;
5605 let this = cx.weak_entity();
5606 let git_store = self.git_store.clone();
5607 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5608 self.send_keyed_job(
5609 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5610 None,
5611 move |git_repo, mut cx| async move {
5612 log::debug!(
5613 "start updating index text for buffer {}",
5614 path.as_unix_str()
5615 );
5616
5617 match git_repo {
5618 RepositoryState::Local(LocalRepositoryState {
5619 fs,
5620 backend,
5621 environment,
5622 ..
5623 }) => {
5624 let executable = match fs.metadata(&abs_path).await {
5625 Ok(Some(meta)) => meta.is_executable,
5626 Ok(None) => false,
5627 Err(_err) => false,
5628 };
5629 backend
5630 .set_index_text(path.clone(), content, environment.clone(), executable)
5631 .await?;
5632 }
5633 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5634 client
5635 .request(proto::SetIndexText {
5636 project_id: project_id.0,
5637 repository_id: id.to_proto(),
5638 path: path.to_proto(),
5639 text: content,
5640 })
5641 .await?;
5642 }
5643 }
5644 log::debug!(
5645 "finish updating index text for buffer {}",
5646 path.as_unix_str()
5647 );
5648
5649 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5650 let project_path = this
5651 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5652 .ok()
5653 .flatten();
5654 git_store
5655 .update(&mut cx, |git_store, cx| {
5656 let buffer_id = git_store
5657 .buffer_store
5658 .read(cx)
5659 .get_by_path(&project_path?)?
5660 .read(cx)
5661 .remote_id();
5662 let diff_state = git_store.diffs.get(&buffer_id)?;
5663 diff_state.update(cx, |diff_state, _| {
5664 diff_state.hunk_staging_operation_count_as_of_write =
5665 hunk_staging_operation_count;
5666 });
5667 Some(())
5668 })
5669 .context("Git store dropped")?;
5670 }
5671 Ok(())
5672 },
5673 )
5674 }
5675
5676 pub fn create_remote(
5677 &mut self,
5678 remote_name: String,
5679 remote_url: String,
5680 ) -> oneshot::Receiver<Result<()>> {
5681 let id = self.id;
5682 self.send_job(
5683 Some(format!("git remote add {remote_name} {remote_url}").into()),
5684 move |repo, _cx| async move {
5685 match repo {
5686 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5687 backend.create_remote(remote_name, remote_url).await
5688 }
5689 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5690 client
5691 .request(proto::GitCreateRemote {
5692 project_id: project_id.0,
5693 repository_id: id.to_proto(),
5694 remote_name,
5695 remote_url,
5696 })
5697 .await?;
5698
5699 Ok(())
5700 }
5701 }
5702 },
5703 )
5704 }
5705
5706 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5707 let id = self.id;
5708 self.send_job(
5709 Some(format!("git remove remote {remote_name}").into()),
5710 move |repo, _cx| async move {
5711 match repo {
5712 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5713 backend.remove_remote(remote_name).await
5714 }
5715 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5716 client
5717 .request(proto::GitRemoveRemote {
5718 project_id: project_id.0,
5719 repository_id: id.to_proto(),
5720 remote_name,
5721 })
5722 .await?;
5723
5724 Ok(())
5725 }
5726 }
5727 },
5728 )
5729 }
5730
5731 pub fn get_remotes(
5732 &mut self,
5733 branch_name: Option<String>,
5734 is_push: bool,
5735 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5736 let id = self.id;
5737 self.send_job(None, move |repo, _cx| async move {
5738 match repo {
5739 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5740 let remote = if let Some(branch_name) = branch_name {
5741 if is_push {
5742 backend.get_push_remote(branch_name).await?
5743 } else {
5744 backend.get_branch_remote(branch_name).await?
5745 }
5746 } else {
5747 None
5748 };
5749
5750 match remote {
5751 Some(remote) => Ok(vec![remote]),
5752 None => backend.get_all_remotes().await,
5753 }
5754 }
5755 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5756 let response = client
5757 .request(proto::GetRemotes {
5758 project_id: project_id.0,
5759 repository_id: id.to_proto(),
5760 branch_name,
5761 is_push,
5762 })
5763 .await?;
5764
5765 let remotes = response
5766 .remotes
5767 .into_iter()
5768 .map(|remotes| Remote {
5769 name: remotes.name.into(),
5770 })
5771 .collect();
5772
5773 Ok(remotes)
5774 }
5775 }
5776 })
5777 }
5778
5779 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5780 let id = self.id;
5781 self.send_job(None, move |repo, _| async move {
5782 match repo {
5783 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5784 backend.branches().await
5785 }
5786 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5787 let response = client
5788 .request(proto::GitGetBranches {
5789 project_id: project_id.0,
5790 repository_id: id.to_proto(),
5791 })
5792 .await?;
5793
5794 let branches = response
5795 .branches
5796 .into_iter()
5797 .map(|branch| proto_to_branch(&branch))
5798 .collect();
5799
5800 Ok(branches)
5801 }
5802 }
5803 })
5804 }
5805
5806 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5807 /// returns the pathed for the linked worktree.
5808 ///
5809 /// Returns None if this is the main checkout.
5810 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5811 if self.work_directory_abs_path != self.original_repo_abs_path {
5812 Some(&self.work_directory_abs_path)
5813 } else {
5814 None
5815 }
5816 }
5817
5818 pub fn path_for_new_linked_worktree(
5819 &self,
5820 branch_name: &str,
5821 worktree_directory_setting: &str,
5822 ) -> Result<PathBuf> {
5823 let original_repo = self.original_repo_abs_path.clone();
5824 let project_name = original_repo
5825 .file_name()
5826 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5827 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5828 Ok(directory.join(branch_name).join(project_name))
5829 }
5830
5831 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5832 let id = self.id;
5833 self.send_job(None, move |repo, _| async move {
5834 match repo {
5835 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5836 backend.worktrees().await
5837 }
5838 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5839 let response = client
5840 .request(proto::GitGetWorktrees {
5841 project_id: project_id.0,
5842 repository_id: id.to_proto(),
5843 })
5844 .await?;
5845
5846 let worktrees = response
5847 .worktrees
5848 .into_iter()
5849 .map(|worktree| proto_to_worktree(&worktree))
5850 .collect();
5851
5852 Ok(worktrees)
5853 }
5854 }
5855 })
5856 }
5857
5858 pub fn create_worktree(
5859 &mut self,
5860 branch_name: String,
5861 path: PathBuf,
5862 commit: Option<String>,
5863 ) -> oneshot::Receiver<Result<()>> {
5864 let id = self.id;
5865 self.send_job(
5866 Some(format!("git worktree add: {}", branch_name).into()),
5867 move |repo, _cx| async move {
5868 match repo {
5869 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5870 backend.create_worktree(branch_name, path, commit).await
5871 }
5872 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5873 client
5874 .request(proto::GitCreateWorktree {
5875 project_id: project_id.0,
5876 repository_id: id.to_proto(),
5877 name: branch_name,
5878 directory: path.to_string_lossy().to_string(),
5879 commit,
5880 })
5881 .await?;
5882
5883 Ok(())
5884 }
5885 }
5886 },
5887 )
5888 }
5889
5890 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5891 let id = self.id;
5892 self.send_job(
5893 Some(format!("git worktree remove: {}", path.display()).into()),
5894 move |repo, _cx| async move {
5895 match repo {
5896 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5897 backend.remove_worktree(path, force).await
5898 }
5899 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5900 client
5901 .request(proto::GitRemoveWorktree {
5902 project_id: project_id.0,
5903 repository_id: id.to_proto(),
5904 path: path.to_string_lossy().to_string(),
5905 force,
5906 })
5907 .await?;
5908
5909 Ok(())
5910 }
5911 }
5912 },
5913 )
5914 }
5915
5916 pub fn rename_worktree(
5917 &mut self,
5918 old_path: PathBuf,
5919 new_path: PathBuf,
5920 ) -> oneshot::Receiver<Result<()>> {
5921 let id = self.id;
5922 self.send_job(
5923 Some(format!("git worktree move: {}", old_path.display()).into()),
5924 move |repo, _cx| async move {
5925 match repo {
5926 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5927 backend.rename_worktree(old_path, new_path).await
5928 }
5929 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5930 client
5931 .request(proto::GitRenameWorktree {
5932 project_id: project_id.0,
5933 repository_id: id.to_proto(),
5934 old_path: old_path.to_string_lossy().to_string(),
5935 new_path: new_path.to_string_lossy().to_string(),
5936 })
5937 .await?;
5938
5939 Ok(())
5940 }
5941 }
5942 },
5943 )
5944 }
5945
5946 pub fn default_branch(
5947 &mut self,
5948 include_remote_name: bool,
5949 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5950 let id = self.id;
5951 self.send_job(None, move |repo, _| async move {
5952 match repo {
5953 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5954 backend.default_branch(include_remote_name).await
5955 }
5956 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5957 let response = client
5958 .request(proto::GetDefaultBranch {
5959 project_id: project_id.0,
5960 repository_id: id.to_proto(),
5961 })
5962 .await?;
5963
5964 anyhow::Ok(response.branch.map(SharedString::from))
5965 }
5966 }
5967 })
5968 }
5969
5970 pub fn diff_tree(
5971 &mut self,
5972 diff_type: DiffTreeType,
5973 _cx: &App,
5974 ) -> oneshot::Receiver<Result<TreeDiff>> {
5975 let repository_id = self.snapshot.id;
5976 self.send_job(None, move |repo, _cx| async move {
5977 match repo {
5978 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5979 backend.diff_tree(diff_type).await
5980 }
5981 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5982 let response = client
5983 .request(proto::GetTreeDiff {
5984 project_id: project_id.0,
5985 repository_id: repository_id.0,
5986 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5987 base: diff_type.base().to_string(),
5988 head: diff_type.head().to_string(),
5989 })
5990 .await?;
5991
5992 let entries = response
5993 .entries
5994 .into_iter()
5995 .filter_map(|entry| {
5996 let status = match entry.status() {
5997 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5998 proto::tree_diff_status::Status::Modified => {
5999 TreeDiffStatus::Modified {
6000 old: git::Oid::from_str(
6001 &entry.oid.context("missing oid").log_err()?,
6002 )
6003 .log_err()?,
6004 }
6005 }
6006 proto::tree_diff_status::Status::Deleted => {
6007 TreeDiffStatus::Deleted {
6008 old: git::Oid::from_str(
6009 &entry.oid.context("missing oid").log_err()?,
6010 )
6011 .log_err()?,
6012 }
6013 }
6014 };
6015 Some((
6016 RepoPath::from_rel_path(
6017 &RelPath::from_proto(&entry.path).log_err()?,
6018 ),
6019 status,
6020 ))
6021 })
6022 .collect();
6023
6024 Ok(TreeDiff { entries })
6025 }
6026 }
6027 })
6028 }
6029
6030 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6031 let id = self.id;
6032 self.send_job(None, move |repo, _cx| async move {
6033 match repo {
6034 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6035 backend.diff(diff_type).await
6036 }
6037 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6038 let (proto_diff_type, merge_base_ref) = match &diff_type {
6039 DiffType::HeadToIndex => {
6040 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6041 }
6042 DiffType::HeadToWorktree => {
6043 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6044 }
6045 DiffType::MergeBase { base_ref } => (
6046 proto::git_diff::DiffType::MergeBase.into(),
6047 Some(base_ref.to_string()),
6048 ),
6049 };
6050 let response = client
6051 .request(proto::GitDiff {
6052 project_id: project_id.0,
6053 repository_id: id.to_proto(),
6054 diff_type: proto_diff_type,
6055 merge_base_ref,
6056 })
6057 .await?;
6058
6059 Ok(response.diff)
6060 }
6061 }
6062 })
6063 }
6064
6065 pub fn create_branch(
6066 &mut self,
6067 branch_name: String,
6068 base_branch: Option<String>,
6069 ) -> oneshot::Receiver<Result<()>> {
6070 let id = self.id;
6071 let status_msg = if let Some(ref base) = base_branch {
6072 format!("git switch -c {branch_name} {base}").into()
6073 } else {
6074 format!("git switch -c {branch_name}").into()
6075 };
6076 self.send_job(Some(status_msg), move |repo, _cx| async move {
6077 match repo {
6078 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6079 backend.create_branch(branch_name, base_branch).await
6080 }
6081 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6082 client
6083 .request(proto::GitCreateBranch {
6084 project_id: project_id.0,
6085 repository_id: id.to_proto(),
6086 branch_name,
6087 })
6088 .await?;
6089
6090 Ok(())
6091 }
6092 }
6093 })
6094 }
6095
6096 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6097 let id = self.id;
6098 self.send_job(
6099 Some(format!("git switch {branch_name}").into()),
6100 move |repo, _cx| async move {
6101 match repo {
6102 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6103 backend.change_branch(branch_name).await
6104 }
6105 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6106 client
6107 .request(proto::GitChangeBranch {
6108 project_id: project_id.0,
6109 repository_id: id.to_proto(),
6110 branch_name,
6111 })
6112 .await?;
6113
6114 Ok(())
6115 }
6116 }
6117 },
6118 )
6119 }
6120
6121 pub fn delete_branch(
6122 &mut self,
6123 is_remote: bool,
6124 branch_name: String,
6125 ) -> oneshot::Receiver<Result<()>> {
6126 let id = self.id;
6127 self.send_job(
6128 Some(
6129 format!(
6130 "git branch {} {}",
6131 if is_remote { "-dr" } else { "-d" },
6132 branch_name
6133 )
6134 .into(),
6135 ),
6136 move |repo, _cx| async move {
6137 match repo {
6138 RepositoryState::Local(state) => {
6139 state.backend.delete_branch(is_remote, branch_name).await
6140 }
6141 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6142 client
6143 .request(proto::GitDeleteBranch {
6144 project_id: project_id.0,
6145 repository_id: id.to_proto(),
6146 is_remote,
6147 branch_name,
6148 })
6149 .await?;
6150
6151 Ok(())
6152 }
6153 }
6154 },
6155 )
6156 }
6157
6158 pub fn rename_branch(
6159 &mut self,
6160 branch: String,
6161 new_name: String,
6162 ) -> oneshot::Receiver<Result<()>> {
6163 let id = self.id;
6164 self.send_job(
6165 Some(format!("git branch -m {branch} {new_name}").into()),
6166 move |repo, _cx| async move {
6167 match repo {
6168 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6169 backend.rename_branch(branch, new_name).await
6170 }
6171 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6172 client
6173 .request(proto::GitRenameBranch {
6174 project_id: project_id.0,
6175 repository_id: id.to_proto(),
6176 branch,
6177 new_name,
6178 })
6179 .await?;
6180
6181 Ok(())
6182 }
6183 }
6184 },
6185 )
6186 }
6187
6188 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6189 let id = self.id;
6190 self.send_job(None, move |repo, _cx| async move {
6191 match repo {
6192 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6193 backend.check_for_pushed_commit().await
6194 }
6195 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6196 let response = client
6197 .request(proto::CheckForPushedCommits {
6198 project_id: project_id.0,
6199 repository_id: id.to_proto(),
6200 })
6201 .await?;
6202
6203 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6204
6205 Ok(branches)
6206 }
6207 }
6208 })
6209 }
6210
6211 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6212 self.send_job(None, |repo, _cx| async move {
6213 match repo {
6214 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6215 backend.checkpoint().await
6216 }
6217 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6218 }
6219 })
6220 }
6221
6222 pub fn restore_checkpoint(
6223 &mut self,
6224 checkpoint: GitRepositoryCheckpoint,
6225 ) -> oneshot::Receiver<Result<()>> {
6226 self.send_job(None, move |repo, _cx| async move {
6227 match repo {
6228 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6229 backend.restore_checkpoint(checkpoint).await
6230 }
6231 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6232 }
6233 })
6234 }
6235
6236 pub(crate) fn apply_remote_update(
6237 &mut self,
6238 update: proto::UpdateRepository,
6239 cx: &mut Context<Self>,
6240 ) -> Result<()> {
6241 if let Some(main_path) = &update.original_repo_abs_path {
6242 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6243 }
6244
6245 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6246 let new_head_commit = update
6247 .head_commit_details
6248 .as_ref()
6249 .map(proto_to_commit_details);
6250 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6251 cx.emit(RepositoryEvent::BranchChanged)
6252 }
6253 self.snapshot.branch = new_branch;
6254 self.snapshot.head_commit = new_head_commit;
6255
6256 // We don't store any merge head state for downstream projects; the upstream
6257 // will track it and we will just get the updated conflicts
6258 let new_merge_heads = TreeMap::from_ordered_entries(
6259 update
6260 .current_merge_conflicts
6261 .into_iter()
6262 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6263 );
6264 let conflicts_changed =
6265 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6266 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6267 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6268 let new_stash_entries = GitStash {
6269 entries: update
6270 .stash_entries
6271 .iter()
6272 .filter_map(|entry| proto_to_stash(entry).ok())
6273 .collect(),
6274 };
6275 if self.snapshot.stash_entries != new_stash_entries {
6276 cx.emit(RepositoryEvent::StashEntriesChanged)
6277 }
6278 self.snapshot.stash_entries = new_stash_entries;
6279 let new_linked_worktrees: Arc<[GitWorktree]> = update
6280 .linked_worktrees
6281 .iter()
6282 .map(proto_to_worktree)
6283 .collect();
6284 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6285 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6286 }
6287 self.snapshot.linked_worktrees = new_linked_worktrees;
6288 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6289 self.snapshot.remote_origin_url = update.remote_origin_url;
6290
6291 let edits = update
6292 .removed_statuses
6293 .into_iter()
6294 .filter_map(|path| {
6295 Some(sum_tree::Edit::Remove(PathKey(
6296 RelPath::from_proto(&path).log_err()?,
6297 )))
6298 })
6299 .chain(
6300 update
6301 .updated_statuses
6302 .into_iter()
6303 .filter_map(|updated_status| {
6304 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6305 }),
6306 )
6307 .collect::<Vec<_>>();
6308 if conflicts_changed || !edits.is_empty() {
6309 cx.emit(RepositoryEvent::StatusesChanged);
6310 }
6311 self.snapshot.statuses_by_path.edit(edits, ());
6312
6313 if update.is_last_update {
6314 self.snapshot.scan_id = update.scan_id;
6315 }
6316 self.clear_pending_ops(cx);
6317 Ok(())
6318 }
6319
6320 pub fn compare_checkpoints(
6321 &mut self,
6322 left: GitRepositoryCheckpoint,
6323 right: GitRepositoryCheckpoint,
6324 ) -> oneshot::Receiver<Result<bool>> {
6325 self.send_job(None, move |repo, _cx| async move {
6326 match repo {
6327 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6328 backend.compare_checkpoints(left, right).await
6329 }
6330 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6331 }
6332 })
6333 }
6334
6335 pub fn diff_checkpoints(
6336 &mut self,
6337 base_checkpoint: GitRepositoryCheckpoint,
6338 target_checkpoint: GitRepositoryCheckpoint,
6339 ) -> oneshot::Receiver<Result<String>> {
6340 self.send_job(None, move |repo, _cx| async move {
6341 match repo {
6342 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6343 backend
6344 .diff_checkpoints(base_checkpoint, target_checkpoint)
6345 .await
6346 }
6347 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6348 }
6349 })
6350 }
6351
6352 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6353 let updated = SumTree::from_iter(
6354 self.pending_ops.iter().filter_map(|ops| {
6355 let inner_ops: Vec<PendingOp> =
6356 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6357 if inner_ops.is_empty() {
6358 None
6359 } else {
6360 Some(PendingOps {
6361 repo_path: ops.repo_path.clone(),
6362 ops: inner_ops,
6363 })
6364 }
6365 }),
6366 (),
6367 );
6368
6369 if updated != self.pending_ops {
6370 cx.emit(RepositoryEvent::PendingOpsChanged {
6371 pending_ops: self.pending_ops.clone(),
6372 })
6373 }
6374
6375 self.pending_ops = updated;
6376 }
6377
6378 fn schedule_scan(
6379 &mut self,
6380 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6381 cx: &mut Context<Self>,
6382 ) {
6383 let this = cx.weak_entity();
6384 let _ = self.send_keyed_job(
6385 Some(GitJobKey::ReloadGitState),
6386 None,
6387 |state, mut cx| async move {
6388 log::debug!("run scheduled git status scan");
6389
6390 let Some(this) = this.upgrade() else {
6391 return Ok(());
6392 };
6393 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6394 bail!("not a local repository")
6395 };
6396 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6397 this.update(&mut cx, |this, cx| {
6398 this.clear_pending_ops(cx);
6399 });
6400 if let Some(updates_tx) = updates_tx {
6401 updates_tx
6402 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6403 .ok();
6404 }
6405 Ok(())
6406 },
6407 );
6408 }
6409
6410 fn spawn_local_git_worker(
6411 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6412 cx: &mut Context<Self>,
6413 ) -> mpsc::UnboundedSender<GitJob> {
6414 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6415
6416 cx.spawn(async move |_, cx| {
6417 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6418 if let Some(git_hosting_provider_registry) =
6419 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6420 {
6421 git_hosting_providers::register_additional_providers(
6422 git_hosting_provider_registry,
6423 state.backend.clone(),
6424 )
6425 .await;
6426 }
6427 let state = RepositoryState::Local(state);
6428 let mut jobs = VecDeque::new();
6429 loop {
6430 while let Ok(Some(next_job)) = job_rx.try_next() {
6431 jobs.push_back(next_job);
6432 }
6433
6434 if let Some(job) = jobs.pop_front() {
6435 if let Some(current_key) = &job.key
6436 && jobs
6437 .iter()
6438 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6439 {
6440 continue;
6441 }
6442 (job.job)(state.clone(), cx).await;
6443 } else if let Some(job) = job_rx.next().await {
6444 jobs.push_back(job);
6445 } else {
6446 break;
6447 }
6448 }
6449 anyhow::Ok(())
6450 })
6451 .detach_and_log_err(cx);
6452
6453 job_tx
6454 }
6455
6456 fn spawn_remote_git_worker(
6457 state: RemoteRepositoryState,
6458 cx: &mut Context<Self>,
6459 ) -> mpsc::UnboundedSender<GitJob> {
6460 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6461
6462 cx.spawn(async move |_, cx| {
6463 let state = RepositoryState::Remote(state);
6464 let mut jobs = VecDeque::new();
6465 loop {
6466 while let Ok(Some(next_job)) = job_rx.try_next() {
6467 jobs.push_back(next_job);
6468 }
6469
6470 if let Some(job) = jobs.pop_front() {
6471 if let Some(current_key) = &job.key
6472 && jobs
6473 .iter()
6474 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6475 {
6476 continue;
6477 }
6478 (job.job)(state.clone(), cx).await;
6479 } else if let Some(job) = job_rx.next().await {
6480 jobs.push_back(job);
6481 } else {
6482 break;
6483 }
6484 }
6485 anyhow::Ok(())
6486 })
6487 .detach_and_log_err(cx);
6488
6489 job_tx
6490 }
6491
6492 fn load_staged_text(
6493 &mut self,
6494 buffer_id: BufferId,
6495 repo_path: RepoPath,
6496 cx: &App,
6497 ) -> Task<Result<Option<String>>> {
6498 let rx = self.send_job(None, move |state, _| async move {
6499 match state {
6500 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6501 anyhow::Ok(backend.load_index_text(repo_path).await)
6502 }
6503 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6504 let response = client
6505 .request(proto::OpenUnstagedDiff {
6506 project_id: project_id.to_proto(),
6507 buffer_id: buffer_id.to_proto(),
6508 })
6509 .await?;
6510 Ok(response.staged_text)
6511 }
6512 }
6513 });
6514 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6515 }
6516
6517 fn load_committed_text(
6518 &mut self,
6519 buffer_id: BufferId,
6520 repo_path: RepoPath,
6521 cx: &App,
6522 ) -> Task<Result<DiffBasesChange>> {
6523 let rx = self.send_job(None, move |state, _| async move {
6524 match state {
6525 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6526 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6527 let staged_text = backend.load_index_text(repo_path).await;
6528 let diff_bases_change = if committed_text == staged_text {
6529 DiffBasesChange::SetBoth(committed_text)
6530 } else {
6531 DiffBasesChange::SetEach {
6532 index: staged_text,
6533 head: committed_text,
6534 }
6535 };
6536 anyhow::Ok(diff_bases_change)
6537 }
6538 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6539 use proto::open_uncommitted_diff_response::Mode;
6540
6541 let response = client
6542 .request(proto::OpenUncommittedDiff {
6543 project_id: project_id.to_proto(),
6544 buffer_id: buffer_id.to_proto(),
6545 })
6546 .await?;
6547 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6548 let bases = match mode {
6549 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6550 Mode::IndexAndHead => DiffBasesChange::SetEach {
6551 head: response.committed_text,
6552 index: response.staged_text,
6553 },
6554 };
6555 Ok(bases)
6556 }
6557 }
6558 });
6559
6560 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6561 }
6562
6563 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6564 let repository_id = self.snapshot.id;
6565 let rx = self.send_job(None, move |state, _| async move {
6566 match state {
6567 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6568 backend.load_blob_content(oid).await
6569 }
6570 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6571 let response = client
6572 .request(proto::GetBlobContent {
6573 project_id: project_id.to_proto(),
6574 repository_id: repository_id.0,
6575 oid: oid.to_string(),
6576 })
6577 .await?;
6578 Ok(response.content)
6579 }
6580 }
6581 });
6582 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6583 }
6584
6585 fn paths_changed(
6586 &mut self,
6587 paths: Vec<RepoPath>,
6588 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6589 cx: &mut Context<Self>,
6590 ) {
6591 if !paths.is_empty() {
6592 self.paths_needing_status_update.push(paths);
6593 }
6594
6595 let this = cx.weak_entity();
6596 let _ = self.send_keyed_job(
6597 Some(GitJobKey::RefreshStatuses),
6598 None,
6599 |state, mut cx| async move {
6600 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6601 (
6602 this.snapshot.clone(),
6603 mem::take(&mut this.paths_needing_status_update),
6604 )
6605 })?;
6606 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6607 bail!("not a local repository")
6608 };
6609
6610 if changed_paths.is_empty() {
6611 return Ok(());
6612 }
6613
6614 let has_head = prev_snapshot.head_commit.is_some();
6615
6616 let stash_entries = backend.stash_entries().await?;
6617 let changed_path_statuses = cx
6618 .background_spawn(async move {
6619 let mut changed_paths =
6620 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6621 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6622
6623 let status_task = backend.status(&changed_paths_vec);
6624 let diff_stat_future = if has_head {
6625 backend.diff_stat(&changed_paths_vec)
6626 } else {
6627 future::ready(Ok(status::GitDiffStat {
6628 entries: Arc::default(),
6629 }))
6630 .boxed()
6631 };
6632
6633 let (statuses, diff_stats) =
6634 futures::future::try_join(status_task, diff_stat_future).await?;
6635
6636 let diff_stats: HashMap<RepoPath, DiffStat> =
6637 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6638
6639 let mut changed_path_statuses = Vec::new();
6640 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6641 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6642
6643 for (repo_path, status) in &*statuses.entries {
6644 let current_diff_stat = diff_stats.get(repo_path).copied();
6645
6646 changed_paths.remove(repo_path);
6647 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6648 && cursor.item().is_some_and(|entry| {
6649 entry.status == *status && entry.diff_stat == current_diff_stat
6650 })
6651 {
6652 continue;
6653 }
6654
6655 changed_path_statuses.push(Edit::Insert(StatusEntry {
6656 repo_path: repo_path.clone(),
6657 status: *status,
6658 diff_stat: current_diff_stat,
6659 }));
6660 }
6661 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6662 for path in changed_paths.into_iter() {
6663 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6664 changed_path_statuses
6665 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6666 }
6667 }
6668 anyhow::Ok(changed_path_statuses)
6669 })
6670 .await?;
6671
6672 this.update(&mut cx, |this, cx| {
6673 if this.snapshot.stash_entries != stash_entries {
6674 cx.emit(RepositoryEvent::StashEntriesChanged);
6675 this.snapshot.stash_entries = stash_entries;
6676 }
6677
6678 if !changed_path_statuses.is_empty() {
6679 cx.emit(RepositoryEvent::StatusesChanged);
6680 this.snapshot
6681 .statuses_by_path
6682 .edit(changed_path_statuses, ());
6683 this.snapshot.scan_id += 1;
6684 }
6685
6686 if let Some(updates_tx) = updates_tx {
6687 updates_tx
6688 .unbounded_send(DownstreamUpdate::UpdateRepository(
6689 this.snapshot.clone(),
6690 ))
6691 .ok();
6692 }
6693 })
6694 },
6695 );
6696 }
6697
6698 /// currently running git command and when it started
6699 pub fn current_job(&self) -> Option<JobInfo> {
6700 self.active_jobs.values().next().cloned()
6701 }
6702
6703 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6704 self.send_job(None, |_, _| async {})
6705 }
6706
6707 fn spawn_job_with_tracking<AsyncFn>(
6708 &mut self,
6709 paths: Vec<RepoPath>,
6710 git_status: pending_op::GitStatus,
6711 cx: &mut Context<Self>,
6712 f: AsyncFn,
6713 ) -> Task<Result<()>>
6714 where
6715 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6716 {
6717 let ids = self.new_pending_ops_for_paths(paths, git_status);
6718
6719 cx.spawn(async move |this, cx| {
6720 let (job_status, result) = match f(this.clone(), cx).await {
6721 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6722 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6723 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6724 };
6725
6726 this.update(cx, |this, _| {
6727 let mut edits = Vec::with_capacity(ids.len());
6728 for (id, entry) in ids {
6729 if let Some(mut ops) = this
6730 .pending_ops
6731 .get(&PathKey(entry.as_ref().clone()), ())
6732 .cloned()
6733 {
6734 if let Some(op) = ops.op_by_id_mut(id) {
6735 op.job_status = job_status;
6736 }
6737 edits.push(sum_tree::Edit::Insert(ops));
6738 }
6739 }
6740 this.pending_ops.edit(edits, ());
6741 })?;
6742
6743 result
6744 })
6745 }
6746
6747 fn new_pending_ops_for_paths(
6748 &mut self,
6749 paths: Vec<RepoPath>,
6750 git_status: pending_op::GitStatus,
6751 ) -> Vec<(PendingOpId, RepoPath)> {
6752 let mut edits = Vec::with_capacity(paths.len());
6753 let mut ids = Vec::with_capacity(paths.len());
6754 for path in paths {
6755 let mut ops = self
6756 .pending_ops
6757 .get(&PathKey(path.as_ref().clone()), ())
6758 .cloned()
6759 .unwrap_or_else(|| PendingOps::new(&path));
6760 let id = ops.max_id() + 1;
6761 ops.ops.push(PendingOp {
6762 id,
6763 git_status,
6764 job_status: pending_op::JobStatus::Running,
6765 });
6766 edits.push(sum_tree::Edit::Insert(ops));
6767 ids.push((id, path));
6768 }
6769 self.pending_ops.edit(edits, ());
6770 ids
6771 }
6772 pub fn default_remote_url(&self) -> Option<String> {
6773 self.remote_upstream_url
6774 .clone()
6775 .or(self.remote_origin_url.clone())
6776 }
6777}
6778
6779/// If `path` is a git linked worktree checkout, resolves it to the main
6780/// repository's working directory path. Returns `None` if `path` is a normal
6781/// repository, not a git repo, or if resolution fails.
6782///
6783/// Resolution works by:
6784/// 1. Reading the `.git` file to get the `gitdir:` pointer
6785/// 2. Following that to the worktree-specific git directory
6786/// 3. Reading the `commondir` file to find the shared `.git` directory
6787/// 4. Deriving the main repo's working directory from the common dir
6788pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6789 let dot_git = path.join(".git");
6790 let metadata = fs.metadata(&dot_git).await.ok()??;
6791 if metadata.is_dir {
6792 return None; // Normal repo, not a linked worktree
6793 }
6794 // It's a .git file — parse the gitdir: pointer
6795 let content = fs.load(&dot_git).await.ok()?;
6796 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6797 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6798 // Read commondir to find the main .git directory
6799 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6800 let common_dir = fs
6801 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6802 .await
6803 .ok()?;
6804 Some(git::repository::original_repo_path_from_common_dir(
6805 &common_dir,
6806 ))
6807}
6808
6809/// Validates that the resolved worktree directory is acceptable:
6810/// - The setting must not be an absolute path.
6811/// - The resolved path must be either a subdirectory of the working
6812/// directory or a subdirectory of its parent (i.e., a sibling).
6813///
6814/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6815pub fn worktrees_directory_for_repo(
6816 original_repo_abs_path: &Path,
6817 worktree_directory_setting: &str,
6818) -> Result<PathBuf> {
6819 // Check the original setting before trimming, since a path like "///"
6820 // is absolute but becomes "" after stripping trailing separators.
6821 // Also check for leading `/` or `\` explicitly, because on Windows
6822 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6823 // would slip through even though it's clearly not a relative path.
6824 if Path::new(worktree_directory_setting).is_absolute()
6825 || worktree_directory_setting.starts_with('/')
6826 || worktree_directory_setting.starts_with('\\')
6827 {
6828 anyhow::bail!(
6829 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6830 );
6831 }
6832
6833 if worktree_directory_setting.is_empty() {
6834 anyhow::bail!("git.worktree_directory must not be empty");
6835 }
6836
6837 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6838 if trimmed == ".." {
6839 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6840 }
6841
6842 let joined = original_repo_abs_path.join(trimmed);
6843 let resolved = util::normalize_path(&joined);
6844 let resolved = if resolved.starts_with(original_repo_abs_path) {
6845 resolved
6846 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
6847 resolved.join(repo_dir_name)
6848 } else {
6849 resolved
6850 };
6851
6852 let parent = original_repo_abs_path
6853 .parent()
6854 .unwrap_or(original_repo_abs_path);
6855
6856 if !resolved.starts_with(parent) {
6857 anyhow::bail!(
6858 "git.worktree_directory resolved to {resolved:?}, which is outside \
6859 the project root and its parent directory. It must resolve to a \
6860 subdirectory of {original_repo_abs_path:?} or a sibling of it."
6861 );
6862 }
6863
6864 Ok(resolved)
6865}
6866
6867/// Returns a short name for a linked worktree suitable for UI display
6868///
6869/// Uses the main worktree path to come up with a short name that disambiguates
6870/// the linked worktree from the main worktree.
6871pub fn linked_worktree_short_name(
6872 main_worktree_path: &Path,
6873 linked_worktree_path: &Path,
6874) -> Option<SharedString> {
6875 if main_worktree_path == linked_worktree_path {
6876 return None;
6877 }
6878
6879 let project_name = main_worktree_path.file_name()?.to_str()?;
6880 let directory_name = linked_worktree_path.file_name()?.to_str()?;
6881 let name = if directory_name != project_name {
6882 directory_name.to_string()
6883 } else {
6884 linked_worktree_path
6885 .parent()?
6886 .file_name()?
6887 .to_str()?
6888 .to_string()
6889 };
6890 Some(name.into())
6891}
6892
6893fn get_permalink_in_rust_registry_src(
6894 provider_registry: Arc<GitHostingProviderRegistry>,
6895 path: PathBuf,
6896 selection: Range<u32>,
6897) -> Result<url::Url> {
6898 #[derive(Deserialize)]
6899 struct CargoVcsGit {
6900 sha1: String,
6901 }
6902
6903 #[derive(Deserialize)]
6904 struct CargoVcsInfo {
6905 git: CargoVcsGit,
6906 path_in_vcs: String,
6907 }
6908
6909 #[derive(Deserialize)]
6910 struct CargoPackage {
6911 repository: String,
6912 }
6913
6914 #[derive(Deserialize)]
6915 struct CargoToml {
6916 package: CargoPackage,
6917 }
6918
6919 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6920 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6921 Some((dir, json))
6922 }) else {
6923 bail!("No .cargo_vcs_info.json found in parent directories")
6924 };
6925 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6926 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6927 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6928 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6929 .context("parsing package.repository field of manifest")?;
6930 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6931 let permalink = provider.build_permalink(
6932 remote,
6933 BuildPermalinkParams::new(
6934 &cargo_vcs_info.git.sha1,
6935 &RepoPath::from_rel_path(
6936 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6937 ),
6938 Some(selection),
6939 ),
6940 );
6941 Ok(permalink)
6942}
6943
6944fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6945 let Some(blame) = blame else {
6946 return proto::BlameBufferResponse {
6947 blame_response: None,
6948 };
6949 };
6950
6951 let entries = blame
6952 .entries
6953 .into_iter()
6954 .map(|entry| proto::BlameEntry {
6955 sha: entry.sha.as_bytes().into(),
6956 start_line: entry.range.start,
6957 end_line: entry.range.end,
6958 original_line_number: entry.original_line_number,
6959 author: entry.author,
6960 author_mail: entry.author_mail,
6961 author_time: entry.author_time,
6962 author_tz: entry.author_tz,
6963 committer: entry.committer_name,
6964 committer_mail: entry.committer_email,
6965 committer_time: entry.committer_time,
6966 committer_tz: entry.committer_tz,
6967 summary: entry.summary,
6968 previous: entry.previous,
6969 filename: entry.filename,
6970 })
6971 .collect::<Vec<_>>();
6972
6973 let messages = blame
6974 .messages
6975 .into_iter()
6976 .map(|(oid, message)| proto::CommitMessage {
6977 oid: oid.as_bytes().into(),
6978 message,
6979 })
6980 .collect::<Vec<_>>();
6981
6982 proto::BlameBufferResponse {
6983 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6984 }
6985}
6986
6987fn deserialize_blame_buffer_response(
6988 response: proto::BlameBufferResponse,
6989) -> Option<git::blame::Blame> {
6990 let response = response.blame_response?;
6991 let entries = response
6992 .entries
6993 .into_iter()
6994 .filter_map(|entry| {
6995 Some(git::blame::BlameEntry {
6996 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6997 range: entry.start_line..entry.end_line,
6998 original_line_number: entry.original_line_number,
6999 committer_name: entry.committer,
7000 committer_time: entry.committer_time,
7001 committer_tz: entry.committer_tz,
7002 committer_email: entry.committer_mail,
7003 author: entry.author,
7004 author_mail: entry.author_mail,
7005 author_time: entry.author_time,
7006 author_tz: entry.author_tz,
7007 summary: entry.summary,
7008 previous: entry.previous,
7009 filename: entry.filename,
7010 })
7011 })
7012 .collect::<Vec<_>>();
7013
7014 let messages = response
7015 .messages
7016 .into_iter()
7017 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7018 .collect::<HashMap<_, _>>();
7019
7020 Some(Blame { entries, messages })
7021}
7022
7023fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7024 proto::Branch {
7025 is_head: branch.is_head,
7026 ref_name: branch.ref_name.to_string(),
7027 unix_timestamp: branch
7028 .most_recent_commit
7029 .as_ref()
7030 .map(|commit| commit.commit_timestamp as u64),
7031 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7032 ref_name: upstream.ref_name.to_string(),
7033 tracking: upstream
7034 .tracking
7035 .status()
7036 .map(|upstream| proto::UpstreamTracking {
7037 ahead: upstream.ahead as u64,
7038 behind: upstream.behind as u64,
7039 }),
7040 }),
7041 most_recent_commit: branch
7042 .most_recent_commit
7043 .as_ref()
7044 .map(|commit| proto::CommitSummary {
7045 sha: commit.sha.to_string(),
7046 subject: commit.subject.to_string(),
7047 commit_timestamp: commit.commit_timestamp,
7048 author_name: commit.author_name.to_string(),
7049 }),
7050 }
7051}
7052
7053fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7054 proto::Worktree {
7055 path: worktree.path.to_string_lossy().to_string(),
7056 ref_name: worktree
7057 .ref_name
7058 .as_ref()
7059 .map(|s| s.to_string())
7060 .unwrap_or_default(),
7061 sha: worktree.sha.to_string(),
7062 is_main: worktree.is_main,
7063 }
7064}
7065
7066fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7067 git::repository::Worktree {
7068 path: PathBuf::from(proto.path.clone()),
7069 ref_name: Some(SharedString::from(&proto.ref_name)),
7070 sha: proto.sha.clone().into(),
7071 is_main: proto.is_main,
7072 }
7073}
7074
7075fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7076 git::repository::Branch {
7077 is_head: proto.is_head,
7078 ref_name: proto.ref_name.clone().into(),
7079 upstream: proto
7080 .upstream
7081 .as_ref()
7082 .map(|upstream| git::repository::Upstream {
7083 ref_name: upstream.ref_name.to_string().into(),
7084 tracking: upstream
7085 .tracking
7086 .as_ref()
7087 .map(|tracking| {
7088 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7089 ahead: tracking.ahead as u32,
7090 behind: tracking.behind as u32,
7091 })
7092 })
7093 .unwrap_or(git::repository::UpstreamTracking::Gone),
7094 }),
7095 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7096 git::repository::CommitSummary {
7097 sha: commit.sha.to_string().into(),
7098 subject: commit.subject.to_string().into(),
7099 commit_timestamp: commit.commit_timestamp,
7100 author_name: commit.author_name.to_string().into(),
7101 has_parent: true,
7102 }
7103 }),
7104 }
7105}
7106
7107fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7108 proto::GitCommitDetails {
7109 sha: commit.sha.to_string(),
7110 message: commit.message.to_string(),
7111 commit_timestamp: commit.commit_timestamp,
7112 author_email: commit.author_email.to_string(),
7113 author_name: commit.author_name.to_string(),
7114 }
7115}
7116
7117fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7118 CommitDetails {
7119 sha: proto.sha.clone().into(),
7120 message: proto.message.clone().into(),
7121 commit_timestamp: proto.commit_timestamp,
7122 author_email: proto.author_email.clone().into(),
7123 author_name: proto.author_name.clone().into(),
7124 }
7125}
7126
7127/// This snapshot computes the repository state on the foreground thread while
7128/// running the git commands on the background thread. We update branch, head,
7129/// remotes, and worktrees first so the UI can react sooner, then compute file
7130/// state and emit those events immediately after.
7131async fn compute_snapshot(
7132 this: Entity<Repository>,
7133 backend: Arc<dyn GitRepository>,
7134 cx: &mut AsyncApp,
7135) -> Result<RepositorySnapshot> {
7136 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7137 this.paths_needing_status_update.clear();
7138 (
7139 this.id,
7140 this.work_directory_abs_path.clone(),
7141 this.snapshot.clone(),
7142 )
7143 });
7144
7145 let head_commit_future = {
7146 let backend = backend.clone();
7147 async move {
7148 Ok(match backend.head_sha().await {
7149 Some(head_sha) => backend.show(head_sha).await.log_err(),
7150 None => None,
7151 })
7152 }
7153 };
7154 let (branches, head_commit, all_worktrees) = cx
7155 .background_spawn({
7156 let backend = backend.clone();
7157 async move {
7158 futures::future::try_join3(
7159 backend.branches(),
7160 head_commit_future,
7161 backend.worktrees(),
7162 )
7163 .await
7164 }
7165 })
7166 .await?;
7167 let branch = branches.into_iter().find(|branch| branch.is_head);
7168
7169 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7170 .into_iter()
7171 .filter(|wt| wt.path != *work_directory_abs_path)
7172 .collect();
7173
7174 let (remote_origin_url, remote_upstream_url) = cx
7175 .background_spawn({
7176 let backend = backend.clone();
7177 async move {
7178 Ok::<_, anyhow::Error>(
7179 futures::future::join(
7180 backend.remote_url("origin"),
7181 backend.remote_url("upstream"),
7182 )
7183 .await,
7184 )
7185 }
7186 })
7187 .await?;
7188
7189 let snapshot = this.update(cx, |this, cx| {
7190 let branch_changed =
7191 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7192 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7193
7194 this.snapshot = RepositorySnapshot {
7195 id,
7196 work_directory_abs_path,
7197 branch,
7198 head_commit,
7199 remote_origin_url,
7200 remote_upstream_url,
7201 linked_worktrees,
7202 scan_id: prev_snapshot.scan_id + 1,
7203 ..prev_snapshot
7204 };
7205
7206 if branch_changed {
7207 cx.emit(RepositoryEvent::BranchChanged);
7208 }
7209
7210 if worktrees_changed {
7211 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7212 }
7213
7214 this.snapshot.clone()
7215 });
7216
7217 let (statuses, diff_stats, stash_entries) = cx
7218 .background_spawn({
7219 let backend = backend.clone();
7220 let snapshot = snapshot.clone();
7221 async move {
7222 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7223 if snapshot.head_commit.is_some() {
7224 backend.diff_stat(&[])
7225 } else {
7226 future::ready(Ok(status::GitDiffStat {
7227 entries: Arc::default(),
7228 }))
7229 .boxed()
7230 };
7231 futures::future::try_join3(
7232 backend.status(&[RepoPath::from_rel_path(
7233 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7234 )]),
7235 diff_stat_future,
7236 backend.stash_entries(),
7237 )
7238 .await
7239 }
7240 })
7241 .await?;
7242
7243 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7244 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7245 let mut conflicted_paths = Vec::new();
7246 let statuses_by_path = SumTree::from_iter(
7247 statuses.entries.iter().map(|(repo_path, status)| {
7248 if status.is_conflicted() {
7249 conflicted_paths.push(repo_path.clone());
7250 }
7251 StatusEntry {
7252 repo_path: repo_path.clone(),
7253 status: *status,
7254 diff_stat: diff_stat_map.get(repo_path).copied(),
7255 }
7256 }),
7257 (),
7258 );
7259
7260 let merge_details = cx
7261 .background_spawn({
7262 let backend = backend.clone();
7263 let mut merge_details = snapshot.merge.clone();
7264 async move {
7265 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7266 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7267 }
7268 })
7269 .await?;
7270 let (merge_details, conflicts_changed) = merge_details;
7271 log::debug!("new merge details: {merge_details:?}");
7272
7273 Ok(this.update(cx, |this, cx| {
7274 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7275 cx.emit(RepositoryEvent::StatusesChanged);
7276 }
7277 if stash_entries != this.snapshot.stash_entries {
7278 cx.emit(RepositoryEvent::StashEntriesChanged);
7279 }
7280
7281 this.snapshot.scan_id += 1;
7282 this.snapshot.merge = merge_details;
7283 this.snapshot.statuses_by_path = statuses_by_path;
7284 this.snapshot.stash_entries = stash_entries;
7285
7286 this.snapshot.clone()
7287 }))
7288}
7289
7290fn status_from_proto(
7291 simple_status: i32,
7292 status: Option<proto::GitFileStatus>,
7293) -> anyhow::Result<FileStatus> {
7294 use proto::git_file_status::Variant;
7295
7296 let Some(variant) = status.and_then(|status| status.variant) else {
7297 let code = proto::GitStatus::from_i32(simple_status)
7298 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7299 let result = match code {
7300 proto::GitStatus::Added => TrackedStatus {
7301 worktree_status: StatusCode::Added,
7302 index_status: StatusCode::Unmodified,
7303 }
7304 .into(),
7305 proto::GitStatus::Modified => TrackedStatus {
7306 worktree_status: StatusCode::Modified,
7307 index_status: StatusCode::Unmodified,
7308 }
7309 .into(),
7310 proto::GitStatus::Conflict => UnmergedStatus {
7311 first_head: UnmergedStatusCode::Updated,
7312 second_head: UnmergedStatusCode::Updated,
7313 }
7314 .into(),
7315 proto::GitStatus::Deleted => TrackedStatus {
7316 worktree_status: StatusCode::Deleted,
7317 index_status: StatusCode::Unmodified,
7318 }
7319 .into(),
7320 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7321 };
7322 return Ok(result);
7323 };
7324
7325 let result = match variant {
7326 Variant::Untracked(_) => FileStatus::Untracked,
7327 Variant::Ignored(_) => FileStatus::Ignored,
7328 Variant::Unmerged(unmerged) => {
7329 let [first_head, second_head] =
7330 [unmerged.first_head, unmerged.second_head].map(|head| {
7331 let code = proto::GitStatus::from_i32(head)
7332 .with_context(|| format!("Invalid git status code: {head}"))?;
7333 let result = match code {
7334 proto::GitStatus::Added => UnmergedStatusCode::Added,
7335 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7336 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7337 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7338 };
7339 Ok(result)
7340 });
7341 let [first_head, second_head] = [first_head?, second_head?];
7342 UnmergedStatus {
7343 first_head,
7344 second_head,
7345 }
7346 .into()
7347 }
7348 Variant::Tracked(tracked) => {
7349 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7350 .map(|status| {
7351 let code = proto::GitStatus::from_i32(status)
7352 .with_context(|| format!("Invalid git status code: {status}"))?;
7353 let result = match code {
7354 proto::GitStatus::Modified => StatusCode::Modified,
7355 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7356 proto::GitStatus::Added => StatusCode::Added,
7357 proto::GitStatus::Deleted => StatusCode::Deleted,
7358 proto::GitStatus::Renamed => StatusCode::Renamed,
7359 proto::GitStatus::Copied => StatusCode::Copied,
7360 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7361 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7362 };
7363 Ok(result)
7364 });
7365 let [index_status, worktree_status] = [index_status?, worktree_status?];
7366 TrackedStatus {
7367 index_status,
7368 worktree_status,
7369 }
7370 .into()
7371 }
7372 };
7373 Ok(result)
7374}
7375
7376fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7377 use proto::git_file_status::{Tracked, Unmerged, Variant};
7378
7379 let variant = match status {
7380 FileStatus::Untracked => Variant::Untracked(Default::default()),
7381 FileStatus::Ignored => Variant::Ignored(Default::default()),
7382 FileStatus::Unmerged(UnmergedStatus {
7383 first_head,
7384 second_head,
7385 }) => Variant::Unmerged(Unmerged {
7386 first_head: unmerged_status_to_proto(first_head),
7387 second_head: unmerged_status_to_proto(second_head),
7388 }),
7389 FileStatus::Tracked(TrackedStatus {
7390 index_status,
7391 worktree_status,
7392 }) => Variant::Tracked(Tracked {
7393 index_status: tracked_status_to_proto(index_status),
7394 worktree_status: tracked_status_to_proto(worktree_status),
7395 }),
7396 };
7397 proto::GitFileStatus {
7398 variant: Some(variant),
7399 }
7400}
7401
7402fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7403 match code {
7404 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7405 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7406 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7407 }
7408}
7409
7410fn tracked_status_to_proto(code: StatusCode) -> i32 {
7411 match code {
7412 StatusCode::Added => proto::GitStatus::Added as _,
7413 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7414 StatusCode::Modified => proto::GitStatus::Modified as _,
7415 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7416 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7417 StatusCode::Copied => proto::GitStatus::Copied as _,
7418 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7419 }
7420}