1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let unstaged_diff = this
825 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
826 .await?;
827 buffer_diff.update(cx, |buffer_diff, _| {
828 buffer_diff.set_secondary_diff(unstaged_diff);
829 });
830
831 this.update(cx, |this, cx| {
832 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
833 .detach();
834
835 this.loading_diffs.remove(&(buffer_id, diff_kind));
836
837 let git_store = cx.weak_entity();
838 let diff_state = this
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842
843 diff_state.update(cx, |state, _| {
844 if let Some(oid) = oid {
845 if let Some(content) = content {
846 state.oid_texts.insert(oid, content);
847 }
848 }
849 state.oid_diffs.insert(oid, buffer_diff.downgrade());
850 });
851 })?;
852
853 Ok(buffer_diff)
854 }
855 .await;
856 result.map_err(Arc::new)
857 })
858 .shared();
859
860 self.loading_diffs
861 .insert((buffer_id, diff_kind), task.clone());
862 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
863 }
864
865 #[ztracing::instrument(skip_all)]
866 pub fn open_uncommitted_diff(
867 &mut self,
868 buffer: Entity<Buffer>,
869 cx: &mut Context<Self>,
870 ) -> Task<Result<Entity<BufferDiff>>> {
871 let buffer_id = buffer.read(cx).remote_id();
872
873 if let Some(diff_state) = self.diffs.get(&buffer_id)
874 && let Some(uncommitted_diff) = diff_state
875 .read(cx)
876 .uncommitted_diff
877 .as_ref()
878 .and_then(|weak| weak.upgrade())
879 {
880 if let Some(task) =
881 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
882 {
883 return cx.background_executor().spawn(async move {
884 task.await;
885 Ok(uncommitted_diff)
886 });
887 }
888 return Task::ready(Ok(uncommitted_diff));
889 }
890
891 let Some((repo, repo_path)) =
892 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
893 else {
894 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
895 };
896
897 let task = self
898 .loading_diffs
899 .entry((buffer_id, DiffKind::Uncommitted))
900 .or_insert_with(|| {
901 let changes = repo.update(cx, |repo, cx| {
902 repo.load_committed_text(buffer_id, repo_path, cx)
903 });
904
905 // todo(lw): hot foreground spawn
906 cx.spawn(async move |this, cx| {
907 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
908 .await
909 .map_err(Arc::new)
910 })
911 .shared()
912 })
913 .clone();
914
915 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
916 }
917
918 #[ztracing::instrument(skip_all)]
919 async fn open_diff_internal(
920 this: WeakEntity<Self>,
921 kind: DiffKind,
922 texts: Result<DiffBasesChange>,
923 buffer_entity: Entity<Buffer>,
924 cx: &mut AsyncApp,
925 ) -> Result<Entity<BufferDiff>> {
926 let diff_bases_change = match texts {
927 Err(e) => {
928 this.update(cx, |this, cx| {
929 let buffer = buffer_entity.read(cx);
930 let buffer_id = buffer.remote_id();
931 this.loading_diffs.remove(&(buffer_id, kind));
932 })?;
933 return Err(e);
934 }
935 Ok(change) => change,
936 };
937
938 this.update(cx, |this, cx| {
939 let buffer = buffer_entity.read(cx);
940 let buffer_id = buffer.remote_id();
941 let language = buffer.language().cloned();
942 let language_registry = buffer.language_registry();
943 let text_snapshot = buffer.text_snapshot();
944 this.loading_diffs.remove(&(buffer_id, kind));
945
946 let git_store = cx.weak_entity();
947 let diff_state = this
948 .diffs
949 .entry(buffer_id)
950 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
951
952 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953
954 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
955 diff_state.update(cx, |diff_state, cx| {
956 diff_state.language_changed = true;
957 diff_state.language = language;
958 diff_state.language_registry = language_registry;
959
960 match kind {
961 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
962 DiffKind::Uncommitted => {
963 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
964 diff
965 } else {
966 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
967 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
968 unstaged_diff
969 };
970
971 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
972 diff_state.uncommitted_diff = Some(diff.downgrade())
973 }
974 DiffKind::SinceOid(_) => {
975 unreachable!("open_diff_internal is not used for OID diffs")
976 }
977 }
978
979 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
980 let rx = diff_state.wait_for_recalculation();
981
982 anyhow::Ok(async move {
983 if let Some(rx) = rx {
984 rx.await;
985 }
986 Ok(diff)
987 })
988 })
989 })??
990 .await
991 }
992
993 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
994 let diff_state = self.diffs.get(&buffer_id)?;
995 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
996 }
997
998 pub fn get_uncommitted_diff(
999 &self,
1000 buffer_id: BufferId,
1001 cx: &App,
1002 ) -> Option<Entity<BufferDiff>> {
1003 let diff_state = self.diffs.get(&buffer_id)?;
1004 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1005 }
1006
1007 pub fn get_diff_since_oid(
1008 &self,
1009 buffer_id: BufferId,
1010 oid: Option<git::Oid>,
1011 cx: &App,
1012 ) -> Option<Entity<BufferDiff>> {
1013 let diff_state = self.diffs.get(&buffer_id)?;
1014 diff_state.read(cx).oid_diff(oid)
1015 }
1016
1017 pub fn open_conflict_set(
1018 &mut self,
1019 buffer: Entity<Buffer>,
1020 cx: &mut Context<Self>,
1021 ) -> Entity<ConflictSet> {
1022 log::debug!("open conflict set");
1023 let buffer_id = buffer.read(cx).remote_id();
1024
1025 if let Some(git_state) = self.diffs.get(&buffer_id)
1026 && let Some(conflict_set) = git_state
1027 .read(cx)
1028 .conflict_set
1029 .as_ref()
1030 .and_then(|weak| weak.upgrade())
1031 {
1032 let conflict_set = conflict_set;
1033 let buffer_snapshot = buffer.read(cx).text_snapshot();
1034
1035 git_state.update(cx, |state, cx| {
1036 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1037 });
1038
1039 return conflict_set;
1040 }
1041
1042 let is_unmerged = self
1043 .repository_and_path_for_buffer_id(buffer_id, cx)
1044 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1045 let git_store = cx.weak_entity();
1046 let buffer_git_state = self
1047 .diffs
1048 .entry(buffer_id)
1049 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1050 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1051
1052 self._subscriptions
1053 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1054 cx.emit(GitStoreEvent::ConflictsUpdated);
1055 }));
1056
1057 buffer_git_state.update(cx, |state, cx| {
1058 state.conflict_set = Some(conflict_set.downgrade());
1059 let buffer_snapshot = buffer.read(cx).text_snapshot();
1060 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1061 });
1062
1063 conflict_set
1064 }
1065
1066 pub fn project_path_git_status(
1067 &self,
1068 project_path: &ProjectPath,
1069 cx: &App,
1070 ) -> Option<FileStatus> {
1071 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1072 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1073 }
1074
1075 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1076 let mut work_directory_abs_paths = Vec::new();
1077 let mut checkpoints = Vec::new();
1078 for repository in self.repositories.values() {
1079 repository.update(cx, |repository, _| {
1080 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1081 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1082 });
1083 }
1084
1085 cx.background_executor().spawn(async move {
1086 let checkpoints = future::try_join_all(checkpoints).await?;
1087 Ok(GitStoreCheckpoint {
1088 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1089 .into_iter()
1090 .zip(checkpoints)
1091 .collect(),
1092 })
1093 })
1094 }
1095
1096 pub fn restore_checkpoint(
1097 &self,
1098 checkpoint: GitStoreCheckpoint,
1099 cx: &mut App,
1100 ) -> Task<Result<()>> {
1101 let repositories_by_work_dir_abs_path = self
1102 .repositories
1103 .values()
1104 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1105 .collect::<HashMap<_, _>>();
1106
1107 let mut tasks = Vec::new();
1108 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1109 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1110 let restore = repository.update(cx, |repository, _| {
1111 repository.restore_checkpoint(checkpoint)
1112 });
1113 tasks.push(async move { restore.await? });
1114 }
1115 }
1116 cx.background_spawn(async move {
1117 future::try_join_all(tasks).await?;
1118 Ok(())
1119 })
1120 }
1121
1122 /// Compares two checkpoints, returning true if they are equal.
1123 pub fn compare_checkpoints(
1124 &self,
1125 left: GitStoreCheckpoint,
1126 mut right: GitStoreCheckpoint,
1127 cx: &mut App,
1128 ) -> Task<Result<bool>> {
1129 let repositories_by_work_dir_abs_path = self
1130 .repositories
1131 .values()
1132 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1133 .collect::<HashMap<_, _>>();
1134
1135 let mut tasks = Vec::new();
1136 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1137 if let Some(right_checkpoint) = right
1138 .checkpoints_by_work_dir_abs_path
1139 .remove(&work_dir_abs_path)
1140 {
1141 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1142 {
1143 let compare = repository.update(cx, |repository, _| {
1144 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1145 });
1146
1147 tasks.push(async move { compare.await? });
1148 }
1149 } else {
1150 return Task::ready(Ok(false));
1151 }
1152 }
1153 cx.background_spawn(async move {
1154 Ok(future::try_join_all(tasks)
1155 .await?
1156 .into_iter()
1157 .all(|result| result))
1158 })
1159 }
1160
1161 /// Blames a buffer.
1162 pub fn blame_buffer(
1163 &self,
1164 buffer: &Entity<Buffer>,
1165 version: Option<clock::Global>,
1166 cx: &mut Context<Self>,
1167 ) -> Task<Result<Option<Blame>>> {
1168 let buffer = buffer.read(cx);
1169 let Some((repo, repo_path)) =
1170 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1171 else {
1172 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1173 };
1174 let content = match &version {
1175 Some(version) => buffer.rope_for_version(version),
1176 None => buffer.as_rope().clone(),
1177 };
1178 let line_ending = buffer.line_ending();
1179 let version = version.unwrap_or(buffer.version());
1180 let buffer_id = buffer.remote_id();
1181
1182 let repo = repo.downgrade();
1183 cx.spawn(async move |_, cx| {
1184 let repository_state = repo
1185 .update(cx, |repo, _| repo.repository_state.clone())?
1186 .await
1187 .map_err(|err| anyhow::anyhow!(err))?;
1188 match repository_state {
1189 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1190 .blame(repo_path.clone(), content, line_ending)
1191 .await
1192 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1193 .map(Some),
1194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1195 let response = client
1196 .request(proto::BlameBuffer {
1197 project_id: project_id.to_proto(),
1198 buffer_id: buffer_id.into(),
1199 version: serialize_version(&version),
1200 })
1201 .await?;
1202 Ok(deserialize_blame_buffer_response(response))
1203 }
1204 }
1205 })
1206 }
1207
1208 pub fn file_history(
1209 &self,
1210 repo: &Entity<Repository>,
1211 path: RepoPath,
1212 cx: &mut App,
1213 ) -> Task<Result<git::repository::FileHistory>> {
1214 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1215
1216 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1217 }
1218
1219 pub fn file_history_paginated(
1220 &self,
1221 repo: &Entity<Repository>,
1222 path: RepoPath,
1223 skip: usize,
1224 limit: Option<usize>,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn get_permalink_to_line(
1233 &self,
1234 buffer: &Entity<Buffer>,
1235 selection: Range<u32>,
1236 cx: &mut App,
1237 ) -> Task<Result<url::Url>> {
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 return Task::ready(Err(anyhow!("buffer has no file")));
1240 };
1241
1242 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1243 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1244 cx,
1245 ) else {
1246 // If we're not in a Git repo, check whether this is a Rust source
1247 // file in the Cargo registry (presumably opened with go-to-definition
1248 // from a normal Rust file). If so, we can put together a permalink
1249 // using crate metadata.
1250 if buffer
1251 .read(cx)
1252 .language()
1253 .is_none_or(|lang| lang.name() != "Rust")
1254 {
1255 return Task::ready(Err(anyhow!("no permalink available")));
1256 }
1257 let file_path = file.worktree.read(cx).absolutize(&file.path);
1258 return cx.spawn(async move |cx| {
1259 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1260 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1261 .context("no permalink available")
1262 });
1263 };
1264
1265 let buffer_id = buffer.read(cx).remote_id();
1266 let branch = repo.read(cx).branch.clone();
1267 let remote = branch
1268 .as_ref()
1269 .and_then(|b| b.upstream.as_ref())
1270 .and_then(|b| b.remote_name())
1271 .unwrap_or("origin")
1272 .to_string();
1273
1274 let rx = repo.update(cx, |repo, _| {
1275 repo.send_job(None, move |state, cx| async move {
1276 match state {
1277 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1278 let origin_url = backend
1279 .remote_url(&remote)
1280 .await
1281 .with_context(|| format!("remote \"{remote}\" not found"))?;
1282
1283 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1284
1285 let provider_registry =
1286 cx.update(GitHostingProviderRegistry::default_global);
1287
1288 let (provider, remote) =
1289 parse_git_remote_url(provider_registry, &origin_url)
1290 .context("parsing Git remote URL")?;
1291
1292 Ok(provider.build_permalink(
1293 remote,
1294 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1295 ))
1296 }
1297 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1298 let response = client
1299 .request(proto::GetPermalinkToLine {
1300 project_id: project_id.to_proto(),
1301 buffer_id: buffer_id.into(),
1302 selection: Some(proto::Range {
1303 start: selection.start as u64,
1304 end: selection.end as u64,
1305 }),
1306 })
1307 .await?;
1308
1309 url::Url::parse(&response.permalink).context("failed to parse permalink")
1310 }
1311 }
1312 })
1313 });
1314 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1315 }
1316
1317 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1318 match &self.state {
1319 GitStoreState::Local {
1320 downstream: downstream_client,
1321 ..
1322 } => downstream_client
1323 .as_ref()
1324 .map(|state| (state.client.clone(), state.project_id)),
1325 GitStoreState::Remote {
1326 downstream: downstream_client,
1327 ..
1328 } => downstream_client.clone(),
1329 }
1330 }
1331
1332 fn upstream_client(&self) -> Option<AnyProtoClient> {
1333 match &self.state {
1334 GitStoreState::Local { .. } => None,
1335 GitStoreState::Remote {
1336 upstream_client, ..
1337 } => Some(upstream_client.clone()),
1338 }
1339 }
1340
1341 fn on_worktree_store_event(
1342 &mut self,
1343 worktree_store: Entity<WorktreeStore>,
1344 event: &WorktreeStoreEvent,
1345 cx: &mut Context<Self>,
1346 ) {
1347 let GitStoreState::Local {
1348 project_environment,
1349 downstream,
1350 next_repository_id,
1351 fs,
1352 } = &self.state
1353 else {
1354 return;
1355 };
1356
1357 match event {
1358 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1359 if let Some(worktree) = self
1360 .worktree_store
1361 .read(cx)
1362 .worktree_for_id(*worktree_id, cx)
1363 {
1364 let paths_by_git_repo =
1365 self.process_updated_entries(&worktree, updated_entries, cx);
1366 let downstream = downstream
1367 .as_ref()
1368 .map(|downstream| downstream.updates_tx.clone());
1369 cx.spawn(async move |_, cx| {
1370 let paths_by_git_repo = paths_by_git_repo.await;
1371 for (repo, paths) in paths_by_git_repo {
1372 repo.update(cx, |repo, cx| {
1373 repo.paths_changed(paths, downstream.clone(), cx);
1374 });
1375 }
1376 })
1377 .detach();
1378 }
1379 }
1380 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1381 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1382 else {
1383 return;
1384 };
1385 if !worktree.read(cx).is_visible() {
1386 log::debug!(
1387 "not adding repositories for local worktree {:?} because it's not visible",
1388 worktree.read(cx).abs_path()
1389 );
1390 return;
1391 }
1392 self.update_repositories_from_worktree(
1393 *worktree_id,
1394 project_environment.clone(),
1395 next_repository_id.clone(),
1396 downstream
1397 .as_ref()
1398 .map(|downstream| downstream.updates_tx.clone()),
1399 changed_repos.clone(),
1400 fs.clone(),
1401 cx,
1402 );
1403 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1404 }
1405 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1406 let repos_without_worktree: Vec<RepositoryId> = self
1407 .worktree_ids
1408 .iter_mut()
1409 .filter_map(|(repo_id, worktree_ids)| {
1410 worktree_ids.remove(worktree_id);
1411 if worktree_ids.is_empty() {
1412 Some(*repo_id)
1413 } else {
1414 None
1415 }
1416 })
1417 .collect();
1418 let is_active_repo_removed = repos_without_worktree
1419 .iter()
1420 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1421
1422 for repo_id in repos_without_worktree {
1423 self.repositories.remove(&repo_id);
1424 self.worktree_ids.remove(&repo_id);
1425 if let Some(updates_tx) =
1426 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1427 {
1428 updates_tx
1429 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1430 .ok();
1431 }
1432 }
1433
1434 if is_active_repo_removed {
1435 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1436 self.active_repo_id = Some(repo_id);
1437 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1438 } else {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 }
1443 }
1444 _ => {}
1445 }
1446 }
1447 fn on_repository_event(
1448 &mut self,
1449 repo: Entity<Repository>,
1450 event: &RepositoryEvent,
1451 cx: &mut Context<Self>,
1452 ) {
1453 let id = repo.read(cx).id;
1454 let repo_snapshot = repo.read(cx).snapshot.clone();
1455 for (buffer_id, diff) in self.diffs.iter() {
1456 if let Some((buffer_repo, repo_path)) =
1457 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1458 && buffer_repo == repo
1459 {
1460 diff.update(cx, |diff, cx| {
1461 if let Some(conflict_set) = &diff.conflict_set {
1462 let conflict_status_changed =
1463 conflict_set.update(cx, |conflict_set, cx| {
1464 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1465 conflict_set.set_has_conflict(has_conflict, cx)
1466 })?;
1467 if conflict_status_changed {
1468 let buffer_store = self.buffer_store.read(cx);
1469 if let Some(buffer) = buffer_store.get(*buffer_id) {
1470 let _ = diff
1471 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1472 }
1473 }
1474 }
1475 anyhow::Ok(())
1476 })
1477 .ok();
1478 }
1479 }
1480 cx.emit(GitStoreEvent::RepositoryUpdated(
1481 id,
1482 event.clone(),
1483 self.active_repo_id == Some(id),
1484 ))
1485 }
1486
1487 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1488 cx.emit(GitStoreEvent::JobsUpdated)
1489 }
1490
1491 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1492 fn update_repositories_from_worktree(
1493 &mut self,
1494 worktree_id: WorktreeId,
1495 project_environment: Entity<ProjectEnvironment>,
1496 next_repository_id: Arc<AtomicU64>,
1497 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1498 updated_git_repositories: UpdatedGitRepositoriesSet,
1499 fs: Arc<dyn Fs>,
1500 cx: &mut Context<Self>,
1501 ) {
1502 let mut removed_ids = Vec::new();
1503 for update in updated_git_repositories.iter() {
1504 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1505 let existing_work_directory_abs_path =
1506 repo.read(cx).work_directory_abs_path.clone();
1507 Some(&existing_work_directory_abs_path)
1508 == update.old_work_directory_abs_path.as_ref()
1509 || Some(&existing_work_directory_abs_path)
1510 == update.new_work_directory_abs_path.as_ref()
1511 }) {
1512 let repo_id = *id;
1513 if let Some(new_work_directory_abs_path) =
1514 update.new_work_directory_abs_path.clone()
1515 {
1516 self.worktree_ids
1517 .entry(repo_id)
1518 .or_insert_with(HashSet::new)
1519 .insert(worktree_id);
1520 existing.update(cx, |existing, cx| {
1521 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1522 existing.schedule_scan(updates_tx.clone(), cx);
1523 });
1524 } else {
1525 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1526 worktree_ids.remove(&worktree_id);
1527 if worktree_ids.is_empty() {
1528 removed_ids.push(repo_id);
1529 }
1530 }
1531 }
1532 } else if let UpdatedGitRepository {
1533 new_work_directory_abs_path: Some(work_directory_abs_path),
1534 dot_git_abs_path: Some(dot_git_abs_path),
1535 repository_dir_abs_path: Some(repository_dir_abs_path),
1536 common_dir_abs_path: Some(common_dir_abs_path),
1537 ..
1538 } = update
1539 {
1540 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1541 work_directory_abs_path,
1542 common_dir_abs_path,
1543 repository_dir_abs_path,
1544 )
1545 .into();
1546 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1547 let is_trusted = TrustedWorktrees::try_get_global(cx)
1548 .map(|trusted_worktrees| {
1549 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1550 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1551 })
1552 })
1553 .unwrap_or(false);
1554 let git_store = cx.weak_entity();
1555 let repo = cx.new(|cx| {
1556 let mut repo = Repository::local(
1557 id,
1558 work_directory_abs_path.clone(),
1559 original_repo_abs_path.clone(),
1560 dot_git_abs_path.clone(),
1561 project_environment.downgrade(),
1562 fs.clone(),
1563 is_trusted,
1564 git_store,
1565 cx,
1566 );
1567 if let Some(updates_tx) = updates_tx.as_ref() {
1568 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1569 updates_tx
1570 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1571 .ok();
1572 }
1573 repo.schedule_scan(updates_tx.clone(), cx);
1574 repo
1575 });
1576 self._subscriptions
1577 .push(cx.subscribe(&repo, Self::on_repository_event));
1578 self._subscriptions
1579 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1580 self.repositories.insert(id, repo);
1581 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1582 cx.emit(GitStoreEvent::RepositoryAdded);
1583 self.active_repo_id.get_or_insert_with(|| {
1584 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1585 id
1586 });
1587 }
1588 }
1589
1590 for id in removed_ids {
1591 if self.active_repo_id == Some(id) {
1592 self.active_repo_id = None;
1593 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1594 }
1595 self.repositories.remove(&id);
1596 if let Some(updates_tx) = updates_tx.as_ref() {
1597 updates_tx
1598 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1599 .ok();
1600 }
1601 }
1602 }
1603
1604 fn on_trusted_worktrees_event(
1605 &mut self,
1606 _: Entity<TrustedWorktreesStore>,
1607 event: &TrustedWorktreesEvent,
1608 cx: &mut Context<Self>,
1609 ) {
1610 if !matches!(self.state, GitStoreState::Local { .. }) {
1611 return;
1612 }
1613
1614 let (is_trusted, event_paths) = match event {
1615 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1616 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1617 };
1618
1619 for (repo_id, worktree_ids) in &self.worktree_ids {
1620 if worktree_ids
1621 .iter()
1622 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1623 {
1624 if let Some(repo) = self.repositories.get(repo_id) {
1625 let repository_state = repo.read(cx).repository_state.clone();
1626 cx.background_spawn(async move {
1627 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1628 state.backend.set_trusted(is_trusted);
1629 }
1630 })
1631 .detach();
1632 }
1633 }
1634 }
1635 }
1636
1637 fn on_buffer_store_event(
1638 &mut self,
1639 _: Entity<BufferStore>,
1640 event: &BufferStoreEvent,
1641 cx: &mut Context<Self>,
1642 ) {
1643 match event {
1644 BufferStoreEvent::BufferAdded(buffer) => {
1645 cx.subscribe(buffer, |this, buffer, event, cx| {
1646 if let BufferEvent::LanguageChanged(_) = event {
1647 let buffer_id = buffer.read(cx).remote_id();
1648 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1649 diff_state.update(cx, |diff_state, cx| {
1650 diff_state.buffer_language_changed(buffer, cx);
1651 });
1652 }
1653 }
1654 })
1655 .detach();
1656 }
1657 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1658 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1659 diffs.remove(buffer_id);
1660 }
1661 }
1662 BufferStoreEvent::BufferDropped(buffer_id) => {
1663 self.diffs.remove(buffer_id);
1664 for diffs in self.shared_diffs.values_mut() {
1665 diffs.remove(buffer_id);
1666 }
1667 }
1668 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1669 // Whenever a buffer's file path changes, it's possible that the
1670 // new path is actually a path that is being tracked by a git
1671 // repository. In that case, we'll want to update the buffer's
1672 // `BufferDiffState`, in case it already has one.
1673 let buffer_id = buffer.read(cx).remote_id();
1674 let diff_state = self.diffs.get(&buffer_id);
1675 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1676
1677 if let Some(diff_state) = diff_state
1678 && let Some((repo, repo_path)) = repo
1679 {
1680 let buffer = buffer.clone();
1681 let diff_state = diff_state.clone();
1682
1683 cx.spawn(async move |_git_store, cx| {
1684 async {
1685 let diff_bases_change = repo
1686 .update(cx, |repo, cx| {
1687 repo.load_committed_text(buffer_id, repo_path, cx)
1688 })
1689 .await?;
1690
1691 diff_state.update(cx, |diff_state, cx| {
1692 let buffer_snapshot = buffer.read(cx).text_snapshot();
1693 diff_state.diff_bases_changed(
1694 buffer_snapshot,
1695 Some(diff_bases_change),
1696 cx,
1697 );
1698 });
1699 anyhow::Ok(())
1700 }
1701 .await
1702 .log_err();
1703 })
1704 .detach();
1705 }
1706 }
1707 }
1708 }
1709
1710 pub fn recalculate_buffer_diffs(
1711 &mut self,
1712 buffers: Vec<Entity<Buffer>>,
1713 cx: &mut Context<Self>,
1714 ) -> impl Future<Output = ()> + use<> {
1715 let mut futures = Vec::new();
1716 for buffer in buffers {
1717 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1718 let buffer = buffer.read(cx).text_snapshot();
1719 diff_state.update(cx, |diff_state, cx| {
1720 diff_state.recalculate_diffs(buffer.clone(), cx);
1721 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1722 });
1723 futures.push(diff_state.update(cx, |diff_state, cx| {
1724 diff_state
1725 .reparse_conflict_markers(buffer, cx)
1726 .map(|_| {})
1727 .boxed()
1728 }));
1729 }
1730 }
1731 async move {
1732 futures::future::join_all(futures).await;
1733 }
1734 }
1735
1736 fn on_buffer_diff_event(
1737 &mut self,
1738 diff: Entity<buffer_diff::BufferDiff>,
1739 event: &BufferDiffEvent,
1740 cx: &mut Context<Self>,
1741 ) {
1742 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1743 let buffer_id = diff.read(cx).buffer_id;
1744 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1745 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1746 diff_state.hunk_staging_operation_count += 1;
1747 diff_state.hunk_staging_operation_count
1748 });
1749 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1750 let recv = repo.update(cx, |repo, cx| {
1751 log::debug!("hunks changed for {}", path.as_unix_str());
1752 repo.spawn_set_index_text_job(
1753 path,
1754 new_index_text.as_ref().map(|rope| rope.to_string()),
1755 Some(hunk_staging_operation_count),
1756 cx,
1757 )
1758 });
1759 let diff = diff.downgrade();
1760 cx.spawn(async move |this, cx| {
1761 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1762 diff.update(cx, |diff, cx| {
1763 diff.clear_pending_hunks(cx);
1764 })
1765 .ok();
1766 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1767 .ok();
1768 }
1769 })
1770 .detach();
1771 }
1772 }
1773 }
1774 }
1775
1776 fn local_worktree_git_repos_changed(
1777 &mut self,
1778 worktree: Entity<Worktree>,
1779 changed_repos: &UpdatedGitRepositoriesSet,
1780 cx: &mut Context<Self>,
1781 ) {
1782 log::debug!("local worktree repos changed");
1783 debug_assert!(worktree.read(cx).is_local());
1784
1785 for repository in self.repositories.values() {
1786 repository.update(cx, |repository, cx| {
1787 let repo_abs_path = &repository.work_directory_abs_path;
1788 if changed_repos.iter().any(|update| {
1789 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1790 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1791 }) {
1792 repository.reload_buffer_diff_bases(cx);
1793 }
1794 });
1795 }
1796 }
1797
1798 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1799 &self.repositories
1800 }
1801
1802 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1803 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1804 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1805 Some(status.status)
1806 }
1807
1808 pub fn repository_and_path_for_buffer_id(
1809 &self,
1810 buffer_id: BufferId,
1811 cx: &App,
1812 ) -> Option<(Entity<Repository>, RepoPath)> {
1813 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1814 let project_path = buffer.read(cx).project_path(cx)?;
1815 self.repository_and_path_for_project_path(&project_path, cx)
1816 }
1817
1818 pub fn repository_and_path_for_project_path(
1819 &self,
1820 path: &ProjectPath,
1821 cx: &App,
1822 ) -> Option<(Entity<Repository>, RepoPath)> {
1823 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1824 self.repositories
1825 .values()
1826 .filter_map(|repo| {
1827 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1828 Some((repo.clone(), repo_path))
1829 })
1830 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1831 }
1832
1833 pub fn git_init(
1834 &self,
1835 path: Arc<Path>,
1836 fallback_branch_name: String,
1837 cx: &App,
1838 ) -> Task<Result<()>> {
1839 match &self.state {
1840 GitStoreState::Local { fs, .. } => {
1841 let fs = fs.clone();
1842 cx.background_executor()
1843 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1844 }
1845 GitStoreState::Remote {
1846 upstream_client,
1847 upstream_project_id: project_id,
1848 ..
1849 } => {
1850 let client = upstream_client.clone();
1851 let project_id = *project_id;
1852 cx.background_executor().spawn(async move {
1853 client
1854 .request(proto::GitInit {
1855 project_id: project_id,
1856 abs_path: path.to_string_lossy().into_owned(),
1857 fallback_branch_name,
1858 })
1859 .await?;
1860 Ok(())
1861 })
1862 }
1863 }
1864 }
1865
1866 pub fn git_clone(
1867 &self,
1868 repo: String,
1869 path: impl Into<Arc<std::path::Path>>,
1870 cx: &App,
1871 ) -> Task<Result<()>> {
1872 let path = path.into();
1873 match &self.state {
1874 GitStoreState::Local { fs, .. } => {
1875 let fs = fs.clone();
1876 cx.background_executor()
1877 .spawn(async move { fs.git_clone(&repo, &path).await })
1878 }
1879 GitStoreState::Remote {
1880 upstream_client,
1881 upstream_project_id,
1882 ..
1883 } => {
1884 if upstream_client.is_via_collab() {
1885 return Task::ready(Err(anyhow!(
1886 "Git Clone isn't supported for project guests"
1887 )));
1888 }
1889 let request = upstream_client.request(proto::GitClone {
1890 project_id: *upstream_project_id,
1891 abs_path: path.to_string_lossy().into_owned(),
1892 remote_repo: repo,
1893 });
1894
1895 cx.background_spawn(async move {
1896 let result = request.await?;
1897
1898 match result.success {
1899 true => Ok(()),
1900 false => Err(anyhow!("Git Clone failed")),
1901 }
1902 })
1903 }
1904 }
1905 }
1906
1907 async fn handle_update_repository(
1908 this: Entity<Self>,
1909 envelope: TypedEnvelope<proto::UpdateRepository>,
1910 mut cx: AsyncApp,
1911 ) -> Result<()> {
1912 this.update(&mut cx, |this, cx| {
1913 let path_style = this.worktree_store.read(cx).path_style();
1914 let mut update = envelope.payload;
1915
1916 let id = RepositoryId::from_proto(update.id);
1917 let client = this.upstream_client().context("no upstream client")?;
1918
1919 let original_repo_abs_path: Option<Arc<Path>> = update
1920 .original_repo_abs_path
1921 .as_deref()
1922 .map(|p| Path::new(p).into());
1923
1924 let mut repo_subscription = None;
1925 let repo = this.repositories.entry(id).or_insert_with(|| {
1926 let git_store = cx.weak_entity();
1927 let repo = cx.new(|cx| {
1928 Repository::remote(
1929 id,
1930 Path::new(&update.abs_path).into(),
1931 original_repo_abs_path.clone(),
1932 path_style,
1933 ProjectId(update.project_id),
1934 client,
1935 git_store,
1936 cx,
1937 )
1938 });
1939 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1940 cx.emit(GitStoreEvent::RepositoryAdded);
1941 repo
1942 });
1943 this._subscriptions.extend(repo_subscription);
1944
1945 repo.update(cx, {
1946 let update = update.clone();
1947 |repo, cx| repo.apply_remote_update(update, cx)
1948 })?;
1949
1950 this.active_repo_id.get_or_insert_with(|| {
1951 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1952 id
1953 });
1954
1955 if let Some((client, project_id)) = this.downstream_client() {
1956 update.project_id = project_id.to_proto();
1957 client.send(update).log_err();
1958 }
1959 Ok(())
1960 })
1961 }
1962
1963 async fn handle_remove_repository(
1964 this: Entity<Self>,
1965 envelope: TypedEnvelope<proto::RemoveRepository>,
1966 mut cx: AsyncApp,
1967 ) -> Result<()> {
1968 this.update(&mut cx, |this, cx| {
1969 let mut update = envelope.payload;
1970 let id = RepositoryId::from_proto(update.id);
1971 this.repositories.remove(&id);
1972 if let Some((client, project_id)) = this.downstream_client() {
1973 update.project_id = project_id.to_proto();
1974 client.send(update).log_err();
1975 }
1976 if this.active_repo_id == Some(id) {
1977 this.active_repo_id = None;
1978 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1979 }
1980 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1981 });
1982 Ok(())
1983 }
1984
1985 async fn handle_git_init(
1986 this: Entity<Self>,
1987 envelope: TypedEnvelope<proto::GitInit>,
1988 cx: AsyncApp,
1989 ) -> Result<proto::Ack> {
1990 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1991 let name = envelope.payload.fallback_branch_name;
1992 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1993 .await?;
1994
1995 Ok(proto::Ack {})
1996 }
1997
1998 async fn handle_git_clone(
1999 this: Entity<Self>,
2000 envelope: TypedEnvelope<proto::GitClone>,
2001 cx: AsyncApp,
2002 ) -> Result<proto::GitCloneResponse> {
2003 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2004 let repo_name = envelope.payload.remote_repo;
2005 let result = cx
2006 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2007 .await;
2008
2009 Ok(proto::GitCloneResponse {
2010 success: result.is_ok(),
2011 })
2012 }
2013
2014 async fn handle_fetch(
2015 this: Entity<Self>,
2016 envelope: TypedEnvelope<proto::Fetch>,
2017 mut cx: AsyncApp,
2018 ) -> Result<proto::RemoteMessageResponse> {
2019 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2020 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2021 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2022 let askpass_id = envelope.payload.askpass_id;
2023
2024 let askpass = make_remote_delegate(
2025 this,
2026 envelope.payload.project_id,
2027 repository_id,
2028 askpass_id,
2029 &mut cx,
2030 );
2031
2032 let remote_output = repository_handle
2033 .update(&mut cx, |repository_handle, cx| {
2034 repository_handle.fetch(fetch_options, askpass, cx)
2035 })
2036 .await??;
2037
2038 Ok(proto::RemoteMessageResponse {
2039 stdout: remote_output.stdout,
2040 stderr: remote_output.stderr,
2041 })
2042 }
2043
2044 async fn handle_push(
2045 this: Entity<Self>,
2046 envelope: TypedEnvelope<proto::Push>,
2047 mut cx: AsyncApp,
2048 ) -> Result<proto::RemoteMessageResponse> {
2049 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2050 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2051
2052 let askpass_id = envelope.payload.askpass_id;
2053 let askpass = make_remote_delegate(
2054 this,
2055 envelope.payload.project_id,
2056 repository_id,
2057 askpass_id,
2058 &mut cx,
2059 );
2060
2061 let options = envelope
2062 .payload
2063 .options
2064 .as_ref()
2065 .map(|_| match envelope.payload.options() {
2066 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2067 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2068 });
2069
2070 let branch_name = envelope.payload.branch_name.into();
2071 let remote_branch_name = envelope.payload.remote_branch_name.into();
2072 let remote_name = envelope.payload.remote_name.into();
2073
2074 let remote_output = repository_handle
2075 .update(&mut cx, |repository_handle, cx| {
2076 repository_handle.push(
2077 branch_name,
2078 remote_branch_name,
2079 remote_name,
2080 options,
2081 askpass,
2082 cx,
2083 )
2084 })
2085 .await??;
2086 Ok(proto::RemoteMessageResponse {
2087 stdout: remote_output.stdout,
2088 stderr: remote_output.stderr,
2089 })
2090 }
2091
2092 async fn handle_pull(
2093 this: Entity<Self>,
2094 envelope: TypedEnvelope<proto::Pull>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::RemoteMessageResponse> {
2097 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2098 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2099 let askpass_id = envelope.payload.askpass_id;
2100 let askpass = make_remote_delegate(
2101 this,
2102 envelope.payload.project_id,
2103 repository_id,
2104 askpass_id,
2105 &mut cx,
2106 );
2107
2108 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2109 let remote_name = envelope.payload.remote_name.into();
2110 let rebase = envelope.payload.rebase;
2111
2112 let remote_message = repository_handle
2113 .update(&mut cx, |repository_handle, cx| {
2114 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2115 })
2116 .await??;
2117
2118 Ok(proto::RemoteMessageResponse {
2119 stdout: remote_message.stdout,
2120 stderr: remote_message.stderr,
2121 })
2122 }
2123
2124 async fn handle_stage(
2125 this: Entity<Self>,
2126 envelope: TypedEnvelope<proto::Stage>,
2127 mut cx: AsyncApp,
2128 ) -> Result<proto::Ack> {
2129 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2130 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2131
2132 let entries = envelope
2133 .payload
2134 .paths
2135 .into_iter()
2136 .map(|path| RepoPath::new(&path))
2137 .collect::<Result<Vec<_>>>()?;
2138
2139 repository_handle
2140 .update(&mut cx, |repository_handle, cx| {
2141 repository_handle.stage_entries(entries, cx)
2142 })
2143 .await?;
2144 Ok(proto::Ack {})
2145 }
2146
2147 async fn handle_unstage(
2148 this: Entity<Self>,
2149 envelope: TypedEnvelope<proto::Unstage>,
2150 mut cx: AsyncApp,
2151 ) -> Result<proto::Ack> {
2152 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2153 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2154
2155 let entries = envelope
2156 .payload
2157 .paths
2158 .into_iter()
2159 .map(|path| RepoPath::new(&path))
2160 .collect::<Result<Vec<_>>>()?;
2161
2162 repository_handle
2163 .update(&mut cx, |repository_handle, cx| {
2164 repository_handle.unstage_entries(entries, cx)
2165 })
2166 .await?;
2167
2168 Ok(proto::Ack {})
2169 }
2170
2171 async fn handle_stash(
2172 this: Entity<Self>,
2173 envelope: TypedEnvelope<proto::Stash>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::Ack> {
2176 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2177 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2178
2179 let entries = envelope
2180 .payload
2181 .paths
2182 .into_iter()
2183 .map(|path| RepoPath::new(&path))
2184 .collect::<Result<Vec<_>>>()?;
2185
2186 repository_handle
2187 .update(&mut cx, |repository_handle, cx| {
2188 repository_handle.stash_entries(entries, cx)
2189 })
2190 .await?;
2191
2192 Ok(proto::Ack {})
2193 }
2194
2195 async fn handle_stash_pop(
2196 this: Entity<Self>,
2197 envelope: TypedEnvelope<proto::StashPop>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::Ack> {
2200 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2201 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2202 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2203
2204 repository_handle
2205 .update(&mut cx, |repository_handle, cx| {
2206 repository_handle.stash_pop(stash_index, cx)
2207 })
2208 .await?;
2209
2210 Ok(proto::Ack {})
2211 }
2212
2213 async fn handle_stash_apply(
2214 this: Entity<Self>,
2215 envelope: TypedEnvelope<proto::StashApply>,
2216 mut cx: AsyncApp,
2217 ) -> Result<proto::Ack> {
2218 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2219 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2220 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2221
2222 repository_handle
2223 .update(&mut cx, |repository_handle, cx| {
2224 repository_handle.stash_apply(stash_index, cx)
2225 })
2226 .await?;
2227
2228 Ok(proto::Ack {})
2229 }
2230
2231 async fn handle_stash_drop(
2232 this: Entity<Self>,
2233 envelope: TypedEnvelope<proto::StashDrop>,
2234 mut cx: AsyncApp,
2235 ) -> Result<proto::Ack> {
2236 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2237 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2238 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2239
2240 repository_handle
2241 .update(&mut cx, |repository_handle, cx| {
2242 repository_handle.stash_drop(stash_index, cx)
2243 })
2244 .await??;
2245
2246 Ok(proto::Ack {})
2247 }
2248
2249 async fn handle_set_index_text(
2250 this: Entity<Self>,
2251 envelope: TypedEnvelope<proto::SetIndexText>,
2252 mut cx: AsyncApp,
2253 ) -> Result<proto::Ack> {
2254 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2255 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2256 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2257
2258 repository_handle
2259 .update(&mut cx, |repository_handle, cx| {
2260 repository_handle.spawn_set_index_text_job(
2261 repo_path,
2262 envelope.payload.text,
2263 None,
2264 cx,
2265 )
2266 })
2267 .await??;
2268 Ok(proto::Ack {})
2269 }
2270
2271 async fn handle_run_hook(
2272 this: Entity<Self>,
2273 envelope: TypedEnvelope<proto::RunGitHook>,
2274 mut cx: AsyncApp,
2275 ) -> Result<proto::Ack> {
2276 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2277 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2278 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2279 repository_handle
2280 .update(&mut cx, |repository_handle, cx| {
2281 repository_handle.run_hook(hook, cx)
2282 })
2283 .await??;
2284 Ok(proto::Ack {})
2285 }
2286
2287 async fn handle_commit(
2288 this: Entity<Self>,
2289 envelope: TypedEnvelope<proto::Commit>,
2290 mut cx: AsyncApp,
2291 ) -> Result<proto::Ack> {
2292 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2293 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2294 let askpass_id = envelope.payload.askpass_id;
2295
2296 let askpass = make_remote_delegate(
2297 this,
2298 envelope.payload.project_id,
2299 repository_id,
2300 askpass_id,
2301 &mut cx,
2302 );
2303
2304 let message = SharedString::from(envelope.payload.message);
2305 let name = envelope.payload.name.map(SharedString::from);
2306 let email = envelope.payload.email.map(SharedString::from);
2307 let options = envelope.payload.options.unwrap_or_default();
2308
2309 repository_handle
2310 .update(&mut cx, |repository_handle, cx| {
2311 repository_handle.commit(
2312 message,
2313 name.zip(email),
2314 CommitOptions {
2315 amend: options.amend,
2316 signoff: options.signoff,
2317 },
2318 askpass,
2319 cx,
2320 )
2321 })
2322 .await??;
2323 Ok(proto::Ack {})
2324 }
2325
2326 async fn handle_get_remotes(
2327 this: Entity<Self>,
2328 envelope: TypedEnvelope<proto::GetRemotes>,
2329 mut cx: AsyncApp,
2330 ) -> Result<proto::GetRemotesResponse> {
2331 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2332 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2333
2334 let branch_name = envelope.payload.branch_name;
2335 let is_push = envelope.payload.is_push;
2336
2337 let remotes = repository_handle
2338 .update(&mut cx, |repository_handle, _| {
2339 repository_handle.get_remotes(branch_name, is_push)
2340 })
2341 .await??;
2342
2343 Ok(proto::GetRemotesResponse {
2344 remotes: remotes
2345 .into_iter()
2346 .map(|remotes| proto::get_remotes_response::Remote {
2347 name: remotes.name.to_string(),
2348 })
2349 .collect::<Vec<_>>(),
2350 })
2351 }
2352
2353 async fn handle_get_worktrees(
2354 this: Entity<Self>,
2355 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2356 mut cx: AsyncApp,
2357 ) -> Result<proto::GitWorktreesResponse> {
2358 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2359 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2360
2361 let worktrees = repository_handle
2362 .update(&mut cx, |repository_handle, _| {
2363 repository_handle.worktrees()
2364 })
2365 .await??;
2366
2367 Ok(proto::GitWorktreesResponse {
2368 worktrees: worktrees
2369 .into_iter()
2370 .map(|worktree| worktree_to_proto(&worktree))
2371 .collect::<Vec<_>>(),
2372 })
2373 }
2374
2375 async fn handle_create_worktree(
2376 this: Entity<Self>,
2377 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2378 mut cx: AsyncApp,
2379 ) -> Result<proto::Ack> {
2380 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2381 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2382 let directory = PathBuf::from(envelope.payload.directory);
2383 let name = envelope.payload.name;
2384 let commit = envelope.payload.commit;
2385
2386 repository_handle
2387 .update(&mut cx, |repository_handle, _| {
2388 repository_handle.create_worktree(name, directory, commit)
2389 })
2390 .await??;
2391
2392 Ok(proto::Ack {})
2393 }
2394
2395 async fn handle_remove_worktree(
2396 this: Entity<Self>,
2397 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2398 mut cx: AsyncApp,
2399 ) -> Result<proto::Ack> {
2400 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2401 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2402 let path = PathBuf::from(envelope.payload.path);
2403 let force = envelope.payload.force;
2404
2405 repository_handle
2406 .update(&mut cx, |repository_handle, _| {
2407 repository_handle.remove_worktree(path, force)
2408 })
2409 .await??;
2410
2411 Ok(proto::Ack {})
2412 }
2413
2414 async fn handle_rename_worktree(
2415 this: Entity<Self>,
2416 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2417 mut cx: AsyncApp,
2418 ) -> Result<proto::Ack> {
2419 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2420 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2421 let old_path = PathBuf::from(envelope.payload.old_path);
2422 let new_path = PathBuf::from(envelope.payload.new_path);
2423
2424 repository_handle
2425 .update(&mut cx, |repository_handle, _| {
2426 repository_handle.rename_worktree(old_path, new_path)
2427 })
2428 .await??;
2429
2430 Ok(proto::Ack {})
2431 }
2432
2433 async fn handle_get_branches(
2434 this: Entity<Self>,
2435 envelope: TypedEnvelope<proto::GitGetBranches>,
2436 mut cx: AsyncApp,
2437 ) -> Result<proto::GitBranchesResponse> {
2438 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2439 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2440
2441 let branches = repository_handle
2442 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2443 .await??;
2444
2445 Ok(proto::GitBranchesResponse {
2446 branches: branches
2447 .into_iter()
2448 .map(|branch| branch_to_proto(&branch))
2449 .collect::<Vec<_>>(),
2450 })
2451 }
2452 async fn handle_get_default_branch(
2453 this: Entity<Self>,
2454 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2455 mut cx: AsyncApp,
2456 ) -> Result<proto::GetDefaultBranchResponse> {
2457 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2458 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2459
2460 let branch = repository_handle
2461 .update(&mut cx, |repository_handle, _| {
2462 repository_handle.default_branch(false)
2463 })
2464 .await??
2465 .map(Into::into);
2466
2467 Ok(proto::GetDefaultBranchResponse { branch })
2468 }
2469 async fn handle_create_branch(
2470 this: Entity<Self>,
2471 envelope: TypedEnvelope<proto::GitCreateBranch>,
2472 mut cx: AsyncApp,
2473 ) -> Result<proto::Ack> {
2474 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2475 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2476 let branch_name = envelope.payload.branch_name;
2477
2478 repository_handle
2479 .update(&mut cx, |repository_handle, _| {
2480 repository_handle.create_branch(branch_name, None)
2481 })
2482 .await??;
2483
2484 Ok(proto::Ack {})
2485 }
2486
2487 async fn handle_change_branch(
2488 this: Entity<Self>,
2489 envelope: TypedEnvelope<proto::GitChangeBranch>,
2490 mut cx: AsyncApp,
2491 ) -> Result<proto::Ack> {
2492 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2493 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2494 let branch_name = envelope.payload.branch_name;
2495
2496 repository_handle
2497 .update(&mut cx, |repository_handle, _| {
2498 repository_handle.change_branch(branch_name)
2499 })
2500 .await??;
2501
2502 Ok(proto::Ack {})
2503 }
2504
2505 async fn handle_rename_branch(
2506 this: Entity<Self>,
2507 envelope: TypedEnvelope<proto::GitRenameBranch>,
2508 mut cx: AsyncApp,
2509 ) -> Result<proto::Ack> {
2510 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2511 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2512 let branch = envelope.payload.branch;
2513 let new_name = envelope.payload.new_name;
2514
2515 repository_handle
2516 .update(&mut cx, |repository_handle, _| {
2517 repository_handle.rename_branch(branch, new_name)
2518 })
2519 .await??;
2520
2521 Ok(proto::Ack {})
2522 }
2523
2524 async fn handle_create_remote(
2525 this: Entity<Self>,
2526 envelope: TypedEnvelope<proto::GitCreateRemote>,
2527 mut cx: AsyncApp,
2528 ) -> Result<proto::Ack> {
2529 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2530 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2531 let remote_name = envelope.payload.remote_name;
2532 let remote_url = envelope.payload.remote_url;
2533
2534 repository_handle
2535 .update(&mut cx, |repository_handle, _| {
2536 repository_handle.create_remote(remote_name, remote_url)
2537 })
2538 .await??;
2539
2540 Ok(proto::Ack {})
2541 }
2542
2543 async fn handle_delete_branch(
2544 this: Entity<Self>,
2545 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2546 mut cx: AsyncApp,
2547 ) -> Result<proto::Ack> {
2548 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2549 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2550 let is_remote = envelope.payload.is_remote;
2551 let branch_name = envelope.payload.branch_name;
2552
2553 repository_handle
2554 .update(&mut cx, |repository_handle, _| {
2555 repository_handle.delete_branch(is_remote, branch_name)
2556 })
2557 .await??;
2558
2559 Ok(proto::Ack {})
2560 }
2561
2562 async fn handle_remove_remote(
2563 this: Entity<Self>,
2564 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2565 mut cx: AsyncApp,
2566 ) -> Result<proto::Ack> {
2567 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2568 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2569 let remote_name = envelope.payload.remote_name;
2570
2571 repository_handle
2572 .update(&mut cx, |repository_handle, _| {
2573 repository_handle.remove_remote(remote_name)
2574 })
2575 .await??;
2576
2577 Ok(proto::Ack {})
2578 }
2579
2580 async fn handle_show(
2581 this: Entity<Self>,
2582 envelope: TypedEnvelope<proto::GitShow>,
2583 mut cx: AsyncApp,
2584 ) -> Result<proto::GitCommitDetails> {
2585 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2586 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2587
2588 let commit = repository_handle
2589 .update(&mut cx, |repository_handle, _| {
2590 repository_handle.show(envelope.payload.commit)
2591 })
2592 .await??;
2593 Ok(proto::GitCommitDetails {
2594 sha: commit.sha.into(),
2595 message: commit.message.into(),
2596 commit_timestamp: commit.commit_timestamp,
2597 author_email: commit.author_email.into(),
2598 author_name: commit.author_name.into(),
2599 })
2600 }
2601
2602 async fn handle_load_commit_diff(
2603 this: Entity<Self>,
2604 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2605 mut cx: AsyncApp,
2606 ) -> Result<proto::LoadCommitDiffResponse> {
2607 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2608 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2609
2610 let commit_diff = repository_handle
2611 .update(&mut cx, |repository_handle, _| {
2612 repository_handle.load_commit_diff(envelope.payload.commit)
2613 })
2614 .await??;
2615 Ok(proto::LoadCommitDiffResponse {
2616 files: commit_diff
2617 .files
2618 .into_iter()
2619 .map(|file| proto::CommitFile {
2620 path: file.path.to_proto(),
2621 old_text: file.old_text,
2622 new_text: file.new_text,
2623 is_binary: file.is_binary,
2624 })
2625 .collect(),
2626 })
2627 }
2628
2629 async fn handle_file_history(
2630 this: Entity<Self>,
2631 envelope: TypedEnvelope<proto::GitFileHistory>,
2632 mut cx: AsyncApp,
2633 ) -> Result<proto::GitFileHistoryResponse> {
2634 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2635 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2636 let path = RepoPath::from_proto(&envelope.payload.path)?;
2637 let skip = envelope.payload.skip as usize;
2638 let limit = envelope.payload.limit.map(|l| l as usize);
2639
2640 let file_history = repository_handle
2641 .update(&mut cx, |repository_handle, _| {
2642 repository_handle.file_history_paginated(path, skip, limit)
2643 })
2644 .await??;
2645
2646 Ok(proto::GitFileHistoryResponse {
2647 entries: file_history
2648 .entries
2649 .into_iter()
2650 .map(|entry| proto::FileHistoryEntry {
2651 sha: entry.sha.to_string(),
2652 subject: entry.subject.to_string(),
2653 message: entry.message.to_string(),
2654 commit_timestamp: entry.commit_timestamp,
2655 author_name: entry.author_name.to_string(),
2656 author_email: entry.author_email.to_string(),
2657 })
2658 .collect(),
2659 path: file_history.path.to_proto(),
2660 })
2661 }
2662
2663 async fn handle_reset(
2664 this: Entity<Self>,
2665 envelope: TypedEnvelope<proto::GitReset>,
2666 mut cx: AsyncApp,
2667 ) -> Result<proto::Ack> {
2668 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2669 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2670
2671 let mode = match envelope.payload.mode() {
2672 git_reset::ResetMode::Soft => ResetMode::Soft,
2673 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2674 };
2675
2676 repository_handle
2677 .update(&mut cx, |repository_handle, cx| {
2678 repository_handle.reset(envelope.payload.commit, mode, cx)
2679 })
2680 .await??;
2681 Ok(proto::Ack {})
2682 }
2683
2684 async fn handle_checkout_files(
2685 this: Entity<Self>,
2686 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2687 mut cx: AsyncApp,
2688 ) -> Result<proto::Ack> {
2689 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2690 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2691 let paths = envelope
2692 .payload
2693 .paths
2694 .iter()
2695 .map(|s| RepoPath::from_proto(s))
2696 .collect::<Result<Vec<_>>>()?;
2697
2698 repository_handle
2699 .update(&mut cx, |repository_handle, cx| {
2700 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2701 })
2702 .await?;
2703 Ok(proto::Ack {})
2704 }
2705
2706 async fn handle_open_commit_message_buffer(
2707 this: Entity<Self>,
2708 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2709 mut cx: AsyncApp,
2710 ) -> Result<proto::OpenBufferResponse> {
2711 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2712 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2713 let buffer = repository
2714 .update(&mut cx, |repository, cx| {
2715 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2716 })
2717 .await?;
2718
2719 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2720 this.update(&mut cx, |this, cx| {
2721 this.buffer_store.update(cx, |buffer_store, cx| {
2722 buffer_store
2723 .create_buffer_for_peer(
2724 &buffer,
2725 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2726 cx,
2727 )
2728 .detach_and_log_err(cx);
2729 })
2730 });
2731
2732 Ok(proto::OpenBufferResponse {
2733 buffer_id: buffer_id.to_proto(),
2734 })
2735 }
2736
2737 async fn handle_askpass(
2738 this: Entity<Self>,
2739 envelope: TypedEnvelope<proto::AskPassRequest>,
2740 mut cx: AsyncApp,
2741 ) -> Result<proto::AskPassResponse> {
2742 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2743 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2744
2745 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2746 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2747 debug_panic!("no askpass found");
2748 anyhow::bail!("no askpass found");
2749 };
2750
2751 let response = askpass
2752 .ask_password(envelope.payload.prompt)
2753 .await
2754 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2755
2756 delegates
2757 .lock()
2758 .insert(envelope.payload.askpass_id, askpass);
2759
2760 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2761 Ok(proto::AskPassResponse {
2762 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2763 })
2764 }
2765
2766 async fn handle_check_for_pushed_commits(
2767 this: Entity<Self>,
2768 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2769 mut cx: AsyncApp,
2770 ) -> Result<proto::CheckForPushedCommitsResponse> {
2771 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2772 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2773
2774 let branches = repository_handle
2775 .update(&mut cx, |repository_handle, _| {
2776 repository_handle.check_for_pushed_commits()
2777 })
2778 .await??;
2779 Ok(proto::CheckForPushedCommitsResponse {
2780 pushed_to: branches
2781 .into_iter()
2782 .map(|commit| commit.to_string())
2783 .collect(),
2784 })
2785 }
2786
2787 async fn handle_git_diff(
2788 this: Entity<Self>,
2789 envelope: TypedEnvelope<proto::GitDiff>,
2790 mut cx: AsyncApp,
2791 ) -> Result<proto::GitDiffResponse> {
2792 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2793 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2794 let diff_type = match envelope.payload.diff_type() {
2795 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2796 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2797 proto::git_diff::DiffType::MergeBase => {
2798 let base_ref = envelope
2799 .payload
2800 .merge_base_ref
2801 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2802 DiffType::MergeBase {
2803 base_ref: base_ref.into(),
2804 }
2805 }
2806 };
2807
2808 let mut diff = repository_handle
2809 .update(&mut cx, |repository_handle, cx| {
2810 repository_handle.diff(diff_type, cx)
2811 })
2812 .await??;
2813 const ONE_MB: usize = 1_000_000;
2814 if diff.len() > ONE_MB {
2815 diff = diff.chars().take(ONE_MB).collect()
2816 }
2817
2818 Ok(proto::GitDiffResponse { diff })
2819 }
2820
2821 async fn handle_tree_diff(
2822 this: Entity<Self>,
2823 request: TypedEnvelope<proto::GetTreeDiff>,
2824 mut cx: AsyncApp,
2825 ) -> Result<proto::GetTreeDiffResponse> {
2826 let repository_id = RepositoryId(request.payload.repository_id);
2827 let diff_type = if request.payload.is_merge {
2828 DiffTreeType::MergeBase {
2829 base: request.payload.base.into(),
2830 head: request.payload.head.into(),
2831 }
2832 } else {
2833 DiffTreeType::Since {
2834 base: request.payload.base.into(),
2835 head: request.payload.head.into(),
2836 }
2837 };
2838
2839 let diff = this
2840 .update(&mut cx, |this, cx| {
2841 let repository = this.repositories().get(&repository_id)?;
2842 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2843 })
2844 .context("missing repository")?
2845 .await??;
2846
2847 Ok(proto::GetTreeDiffResponse {
2848 entries: diff
2849 .entries
2850 .into_iter()
2851 .map(|(path, status)| proto::TreeDiffStatus {
2852 path: path.as_ref().to_proto(),
2853 status: match status {
2854 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2855 TreeDiffStatus::Modified { .. } => {
2856 proto::tree_diff_status::Status::Modified.into()
2857 }
2858 TreeDiffStatus::Deleted { .. } => {
2859 proto::tree_diff_status::Status::Deleted.into()
2860 }
2861 },
2862 oid: match status {
2863 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2864 Some(old.to_string())
2865 }
2866 TreeDiffStatus::Added => None,
2867 },
2868 })
2869 .collect(),
2870 })
2871 }
2872
2873 async fn handle_get_blob_content(
2874 this: Entity<Self>,
2875 request: TypedEnvelope<proto::GetBlobContent>,
2876 mut cx: AsyncApp,
2877 ) -> Result<proto::GetBlobContentResponse> {
2878 let oid = git::Oid::from_str(&request.payload.oid)?;
2879 let repository_id = RepositoryId(request.payload.repository_id);
2880 let content = this
2881 .update(&mut cx, |this, cx| {
2882 let repository = this.repositories().get(&repository_id)?;
2883 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2884 })
2885 .context("missing repository")?
2886 .await?;
2887 Ok(proto::GetBlobContentResponse { content })
2888 }
2889
2890 async fn handle_open_unstaged_diff(
2891 this: Entity<Self>,
2892 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2893 mut cx: AsyncApp,
2894 ) -> Result<proto::OpenUnstagedDiffResponse> {
2895 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2896 let diff = this
2897 .update(&mut cx, |this, cx| {
2898 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2899 Some(this.open_unstaged_diff(buffer, cx))
2900 })
2901 .context("missing buffer")?
2902 .await?;
2903 this.update(&mut cx, |this, _| {
2904 let shared_diffs = this
2905 .shared_diffs
2906 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2907 .or_default();
2908 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2909 });
2910 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2911 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2912 }
2913
2914 async fn handle_open_uncommitted_diff(
2915 this: Entity<Self>,
2916 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2917 mut cx: AsyncApp,
2918 ) -> Result<proto::OpenUncommittedDiffResponse> {
2919 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2920 let diff = this
2921 .update(&mut cx, |this, cx| {
2922 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2923 Some(this.open_uncommitted_diff(buffer, cx))
2924 })
2925 .context("missing buffer")?
2926 .await?;
2927 this.update(&mut cx, |this, _| {
2928 let shared_diffs = this
2929 .shared_diffs
2930 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2931 .or_default();
2932 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2933 });
2934 Ok(diff.read_with(&cx, |diff, cx| {
2935 use proto::open_uncommitted_diff_response::Mode;
2936
2937 let unstaged_diff = diff.secondary_diff();
2938 let index_snapshot = unstaged_diff.and_then(|diff| {
2939 let diff = diff.read(cx);
2940 diff.base_text_exists().then(|| diff.base_text(cx))
2941 });
2942
2943 let mode;
2944 let staged_text;
2945 let committed_text;
2946 if diff.base_text_exists() {
2947 let committed_snapshot = diff.base_text(cx);
2948 committed_text = Some(committed_snapshot.text());
2949 if let Some(index_text) = index_snapshot {
2950 if index_text.remote_id() == committed_snapshot.remote_id() {
2951 mode = Mode::IndexMatchesHead;
2952 staged_text = None;
2953 } else {
2954 mode = Mode::IndexAndHead;
2955 staged_text = Some(index_text.text());
2956 }
2957 } else {
2958 mode = Mode::IndexAndHead;
2959 staged_text = None;
2960 }
2961 } else {
2962 mode = Mode::IndexAndHead;
2963 committed_text = None;
2964 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2965 }
2966
2967 proto::OpenUncommittedDiffResponse {
2968 committed_text,
2969 staged_text,
2970 mode: mode.into(),
2971 }
2972 }))
2973 }
2974
2975 async fn handle_update_diff_bases(
2976 this: Entity<Self>,
2977 request: TypedEnvelope<proto::UpdateDiffBases>,
2978 mut cx: AsyncApp,
2979 ) -> Result<()> {
2980 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2981 this.update(&mut cx, |this, cx| {
2982 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2983 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2984 {
2985 let buffer = buffer.read(cx).text_snapshot();
2986 diff_state.update(cx, |diff_state, cx| {
2987 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2988 })
2989 }
2990 });
2991 Ok(())
2992 }
2993
2994 async fn handle_blame_buffer(
2995 this: Entity<Self>,
2996 envelope: TypedEnvelope<proto::BlameBuffer>,
2997 mut cx: AsyncApp,
2998 ) -> Result<proto::BlameBufferResponse> {
2999 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3000 let version = deserialize_version(&envelope.payload.version);
3001 let buffer = this.read_with(&cx, |this, cx| {
3002 this.buffer_store.read(cx).get_existing(buffer_id)
3003 })?;
3004 buffer
3005 .update(&mut cx, |buffer, _| {
3006 buffer.wait_for_version(version.clone())
3007 })
3008 .await?;
3009 let blame = this
3010 .update(&mut cx, |this, cx| {
3011 this.blame_buffer(&buffer, Some(version), cx)
3012 })
3013 .await?;
3014 Ok(serialize_blame_buffer_response(blame))
3015 }
3016
3017 async fn handle_get_permalink_to_line(
3018 this: Entity<Self>,
3019 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3020 mut cx: AsyncApp,
3021 ) -> Result<proto::GetPermalinkToLineResponse> {
3022 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3023 // let version = deserialize_version(&envelope.payload.version);
3024 let selection = {
3025 let proto_selection = envelope
3026 .payload
3027 .selection
3028 .context("no selection to get permalink for defined")?;
3029 proto_selection.start as u32..proto_selection.end as u32
3030 };
3031 let buffer = this.read_with(&cx, |this, cx| {
3032 this.buffer_store.read(cx).get_existing(buffer_id)
3033 })?;
3034 let permalink = this
3035 .update(&mut cx, |this, cx| {
3036 this.get_permalink_to_line(&buffer, selection, cx)
3037 })
3038 .await?;
3039 Ok(proto::GetPermalinkToLineResponse {
3040 permalink: permalink.to_string(),
3041 })
3042 }
3043
3044 fn repository_for_request(
3045 this: &Entity<Self>,
3046 id: RepositoryId,
3047 cx: &mut AsyncApp,
3048 ) -> Result<Entity<Repository>> {
3049 this.read_with(cx, |this, _| {
3050 this.repositories
3051 .get(&id)
3052 .context("missing repository handle")
3053 .cloned()
3054 })
3055 }
3056
3057 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3058 self.repositories
3059 .iter()
3060 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3061 .collect()
3062 }
3063
3064 fn process_updated_entries(
3065 &self,
3066 worktree: &Entity<Worktree>,
3067 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3068 cx: &mut App,
3069 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3070 let path_style = worktree.read(cx).path_style();
3071 let mut repo_paths = self
3072 .repositories
3073 .values()
3074 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3075 .collect::<Vec<_>>();
3076 let mut entries: Vec<_> = updated_entries
3077 .iter()
3078 .map(|(path, _, _)| path.clone())
3079 .collect();
3080 entries.sort();
3081 let worktree = worktree.read(cx);
3082
3083 let entries = entries
3084 .into_iter()
3085 .map(|path| worktree.absolutize(&path))
3086 .collect::<Arc<[_]>>();
3087
3088 let executor = cx.background_executor().clone();
3089 cx.background_executor().spawn(async move {
3090 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3091 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3092 let mut tasks = FuturesOrdered::new();
3093 for (repo_path, repo) in repo_paths.into_iter().rev() {
3094 let entries = entries.clone();
3095 let task = executor.spawn(async move {
3096 // Find all repository paths that belong to this repo
3097 let mut ix = entries.partition_point(|path| path < &*repo_path);
3098 if ix == entries.len() {
3099 return None;
3100 };
3101
3102 let mut paths = Vec::new();
3103 // All paths prefixed by a given repo will constitute a continuous range.
3104 while let Some(path) = entries.get(ix)
3105 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3106 &repo_path, path, path_style,
3107 )
3108 {
3109 paths.push((repo_path, ix));
3110 ix += 1;
3111 }
3112 if paths.is_empty() {
3113 None
3114 } else {
3115 Some((repo, paths))
3116 }
3117 });
3118 tasks.push_back(task);
3119 }
3120
3121 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3122 let mut path_was_used = vec![false; entries.len()];
3123 let tasks = tasks.collect::<Vec<_>>().await;
3124 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3125 // We always want to assign a path to it's innermost repository.
3126 for t in tasks {
3127 let Some((repo, paths)) = t else {
3128 continue;
3129 };
3130 let entry = paths_by_git_repo.entry(repo).or_default();
3131 for (repo_path, ix) in paths {
3132 if path_was_used[ix] {
3133 continue;
3134 }
3135 path_was_used[ix] = true;
3136 entry.push(repo_path);
3137 }
3138 }
3139
3140 paths_by_git_repo
3141 })
3142 }
3143}
3144
3145impl BufferGitState {
3146 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3147 Self {
3148 unstaged_diff: Default::default(),
3149 uncommitted_diff: Default::default(),
3150 oid_diffs: Default::default(),
3151 recalculate_diff_task: Default::default(),
3152 language: Default::default(),
3153 language_registry: Default::default(),
3154 recalculating_tx: postage::watch::channel_with(false).0,
3155 hunk_staging_operation_count: 0,
3156 hunk_staging_operation_count_as_of_write: 0,
3157 head_text: Default::default(),
3158 index_text: Default::default(),
3159 oid_texts: Default::default(),
3160 head_changed: Default::default(),
3161 index_changed: Default::default(),
3162 language_changed: Default::default(),
3163 conflict_updated_futures: Default::default(),
3164 conflict_set: Default::default(),
3165 reparse_conflict_markers_task: Default::default(),
3166 }
3167 }
3168
3169 #[ztracing::instrument(skip_all)]
3170 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3171 self.language = buffer.read(cx).language().cloned();
3172 self.language_changed = true;
3173 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3174 }
3175
3176 fn reparse_conflict_markers(
3177 &mut self,
3178 buffer: text::BufferSnapshot,
3179 cx: &mut Context<Self>,
3180 ) -> oneshot::Receiver<()> {
3181 let (tx, rx) = oneshot::channel();
3182
3183 let Some(conflict_set) = self
3184 .conflict_set
3185 .as_ref()
3186 .and_then(|conflict_set| conflict_set.upgrade())
3187 else {
3188 return rx;
3189 };
3190
3191 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3192 if conflict_set.has_conflict {
3193 Some(conflict_set.snapshot())
3194 } else {
3195 None
3196 }
3197 });
3198
3199 if let Some(old_snapshot) = old_snapshot {
3200 self.conflict_updated_futures.push(tx);
3201 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3202 let (snapshot, changed_range) = cx
3203 .background_spawn(async move {
3204 let new_snapshot = ConflictSet::parse(&buffer);
3205 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3206 (new_snapshot, changed_range)
3207 })
3208 .await;
3209 this.update(cx, |this, cx| {
3210 if let Some(conflict_set) = &this.conflict_set {
3211 conflict_set
3212 .update(cx, |conflict_set, cx| {
3213 conflict_set.set_snapshot(snapshot, changed_range, cx);
3214 })
3215 .ok();
3216 }
3217 let futures = std::mem::take(&mut this.conflict_updated_futures);
3218 for tx in futures {
3219 tx.send(()).ok();
3220 }
3221 })
3222 }))
3223 }
3224
3225 rx
3226 }
3227
3228 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3229 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3230 }
3231
3232 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3233 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3234 }
3235
3236 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3237 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3238 }
3239
3240 fn handle_base_texts_updated(
3241 &mut self,
3242 buffer: text::BufferSnapshot,
3243 message: proto::UpdateDiffBases,
3244 cx: &mut Context<Self>,
3245 ) {
3246 use proto::update_diff_bases::Mode;
3247
3248 let Some(mode) = Mode::from_i32(message.mode) else {
3249 return;
3250 };
3251
3252 let diff_bases_change = match mode {
3253 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3254 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3255 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3256 Mode::IndexAndHead => DiffBasesChange::SetEach {
3257 index: message.staged_text,
3258 head: message.committed_text,
3259 },
3260 };
3261
3262 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3263 }
3264
3265 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3266 if *self.recalculating_tx.borrow() {
3267 let mut rx = self.recalculating_tx.subscribe();
3268 Some(async move {
3269 loop {
3270 let is_recalculating = rx.recv().await;
3271 if is_recalculating != Some(true) {
3272 break;
3273 }
3274 }
3275 })
3276 } else {
3277 None
3278 }
3279 }
3280
3281 fn diff_bases_changed(
3282 &mut self,
3283 buffer: text::BufferSnapshot,
3284 diff_bases_change: Option<DiffBasesChange>,
3285 cx: &mut Context<Self>,
3286 ) {
3287 match diff_bases_change {
3288 Some(DiffBasesChange::SetIndex(index)) => {
3289 self.index_text = index.map(|mut index| {
3290 text::LineEnding::normalize(&mut index);
3291 Arc::from(index.as_str())
3292 });
3293 self.index_changed = true;
3294 }
3295 Some(DiffBasesChange::SetHead(head)) => {
3296 self.head_text = head.map(|mut head| {
3297 text::LineEnding::normalize(&mut head);
3298 Arc::from(head.as_str())
3299 });
3300 self.head_changed = true;
3301 }
3302 Some(DiffBasesChange::SetBoth(text)) => {
3303 let text = text.map(|mut text| {
3304 text::LineEnding::normalize(&mut text);
3305 Arc::from(text.as_str())
3306 });
3307 self.head_text = text.clone();
3308 self.index_text = text;
3309 self.head_changed = true;
3310 self.index_changed = true;
3311 }
3312 Some(DiffBasesChange::SetEach { index, head }) => {
3313 self.index_text = index.map(|mut index| {
3314 text::LineEnding::normalize(&mut index);
3315 Arc::from(index.as_str())
3316 });
3317 self.index_changed = true;
3318 self.head_text = head.map(|mut head| {
3319 text::LineEnding::normalize(&mut head);
3320 Arc::from(head.as_str())
3321 });
3322 self.head_changed = true;
3323 }
3324 None => {}
3325 }
3326
3327 self.recalculate_diffs(buffer, cx)
3328 }
3329
3330 #[ztracing::instrument(skip_all)]
3331 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3332 *self.recalculating_tx.borrow_mut() = true;
3333
3334 let language = self.language.clone();
3335 let language_registry = self.language_registry.clone();
3336 let unstaged_diff = self.unstaged_diff();
3337 let uncommitted_diff = self.uncommitted_diff();
3338 let head = self.head_text.clone();
3339 let index = self.index_text.clone();
3340 let index_changed = self.index_changed;
3341 let head_changed = self.head_changed;
3342 let language_changed = self.language_changed;
3343 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3344 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3345 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3346 (None, None) => true,
3347 _ => false,
3348 };
3349
3350 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3351 .oid_diffs
3352 .iter()
3353 .filter_map(|(oid, weak)| {
3354 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3355 weak.upgrade().map(|diff| (*oid, diff, base_text))
3356 })
3357 .collect();
3358
3359 self.oid_diffs.retain(|oid, weak| {
3360 let alive = weak.upgrade().is_some();
3361 if !alive {
3362 if let Some(oid) = oid {
3363 self.oid_texts.remove(oid);
3364 }
3365 }
3366 alive
3367 });
3368 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3369 log::debug!(
3370 "start recalculating diffs for buffer {}",
3371 buffer.remote_id()
3372 );
3373
3374 let mut new_unstaged_diff = None;
3375 if let Some(unstaged_diff) = &unstaged_diff {
3376 new_unstaged_diff = Some(
3377 cx.update(|cx| {
3378 unstaged_diff.read(cx).update_diff(
3379 buffer.clone(),
3380 index,
3381 index_changed.then_some(false),
3382 language.clone(),
3383 cx,
3384 )
3385 })
3386 .await,
3387 );
3388 }
3389
3390 // Dropping BufferDiff can be expensive, so yield back to the event loop
3391 // for a bit
3392 yield_now().await;
3393
3394 let mut new_uncommitted_diff = None;
3395 if let Some(uncommitted_diff) = &uncommitted_diff {
3396 new_uncommitted_diff = if index_matches_head {
3397 new_unstaged_diff.clone()
3398 } else {
3399 Some(
3400 cx.update(|cx| {
3401 uncommitted_diff.read(cx).update_diff(
3402 buffer.clone(),
3403 head,
3404 head_changed.then_some(true),
3405 language.clone(),
3406 cx,
3407 )
3408 })
3409 .await,
3410 )
3411 }
3412 }
3413
3414 // Dropping BufferDiff can be expensive, so yield back to the event loop
3415 // for a bit
3416 yield_now().await;
3417
3418 let cancel = this.update(cx, |this, _| {
3419 // This checks whether all pending stage/unstage operations
3420 // have quiesced (i.e. both the corresponding write and the
3421 // read of that write have completed). If not, then we cancel
3422 // this recalculation attempt to avoid invalidating pending
3423 // state too quickly; another recalculation will come along
3424 // later and clear the pending state once the state of the index has settled.
3425 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3426 *this.recalculating_tx.borrow_mut() = false;
3427 true
3428 } else {
3429 false
3430 }
3431 })?;
3432 if cancel {
3433 log::debug!(
3434 concat!(
3435 "aborting recalculating diffs for buffer {}",
3436 "due to subsequent hunk operations",
3437 ),
3438 buffer.remote_id()
3439 );
3440 return Ok(());
3441 }
3442
3443 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3444 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3445 {
3446 let task = unstaged_diff.update(cx, |diff, cx| {
3447 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3448 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3449 // view multibuffers.
3450 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3451 });
3452 Some(task.await)
3453 } else {
3454 None
3455 };
3456
3457 yield_now().await;
3458
3459 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3460 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3461 {
3462 uncommitted_diff
3463 .update(cx, |diff, cx| {
3464 if language_changed {
3465 diff.language_changed(language.clone(), language_registry, cx);
3466 }
3467 diff.set_snapshot_with_secondary(
3468 new_uncommitted_diff,
3469 &buffer,
3470 unstaged_changed_range.flatten(),
3471 true,
3472 cx,
3473 )
3474 })
3475 .await;
3476 }
3477
3478 yield_now().await;
3479
3480 for (oid, oid_diff, base_text) in oid_diffs {
3481 let new_oid_diff = cx
3482 .update(|cx| {
3483 oid_diff.read(cx).update_diff(
3484 buffer.clone(),
3485 base_text,
3486 None,
3487 language.clone(),
3488 cx,
3489 )
3490 })
3491 .await;
3492
3493 oid_diff
3494 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3495 .await;
3496
3497 log::debug!(
3498 "finished recalculating oid diff for buffer {} oid {:?}",
3499 buffer.remote_id(),
3500 oid
3501 );
3502
3503 yield_now().await;
3504 }
3505
3506 log::debug!(
3507 "finished recalculating diffs for buffer {}",
3508 buffer.remote_id()
3509 );
3510
3511 if let Some(this) = this.upgrade() {
3512 this.update(cx, |this, _| {
3513 this.index_changed = false;
3514 this.head_changed = false;
3515 this.language_changed = false;
3516 *this.recalculating_tx.borrow_mut() = false;
3517 });
3518 }
3519
3520 Ok(())
3521 }));
3522 }
3523}
3524
3525fn make_remote_delegate(
3526 this: Entity<GitStore>,
3527 project_id: u64,
3528 repository_id: RepositoryId,
3529 askpass_id: u64,
3530 cx: &mut AsyncApp,
3531) -> AskPassDelegate {
3532 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3533 this.update(cx, |this, cx| {
3534 let Some((client, _)) = this.downstream_client() else {
3535 return;
3536 };
3537 let response = client.request(proto::AskPassRequest {
3538 project_id,
3539 repository_id: repository_id.to_proto(),
3540 askpass_id,
3541 prompt,
3542 });
3543 cx.spawn(async move |_, _| {
3544 let mut response = response.await?.response;
3545 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3546 .ok();
3547 response.zeroize();
3548 anyhow::Ok(())
3549 })
3550 .detach_and_log_err(cx);
3551 });
3552 })
3553}
3554
3555impl RepositoryId {
3556 pub fn to_proto(self) -> u64 {
3557 self.0
3558 }
3559
3560 pub fn from_proto(id: u64) -> Self {
3561 RepositoryId(id)
3562 }
3563}
3564
3565impl RepositorySnapshot {
3566 fn empty(
3567 id: RepositoryId,
3568 work_directory_abs_path: Arc<Path>,
3569 original_repo_abs_path: Option<Arc<Path>>,
3570 path_style: PathStyle,
3571 ) -> Self {
3572 Self {
3573 id,
3574 statuses_by_path: Default::default(),
3575 original_repo_abs_path: original_repo_abs_path
3576 .unwrap_or_else(|| work_directory_abs_path.clone()),
3577 work_directory_abs_path,
3578 branch: None,
3579 head_commit: None,
3580 scan_id: 0,
3581 merge: Default::default(),
3582 remote_origin_url: None,
3583 remote_upstream_url: None,
3584 stash_entries: Default::default(),
3585 linked_worktrees: Arc::from([]),
3586 path_style,
3587 }
3588 }
3589
3590 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3591 proto::UpdateRepository {
3592 branch_summary: self.branch.as_ref().map(branch_to_proto),
3593 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3594 updated_statuses: self
3595 .statuses_by_path
3596 .iter()
3597 .map(|entry| entry.to_proto())
3598 .collect(),
3599 removed_statuses: Default::default(),
3600 current_merge_conflicts: self
3601 .merge
3602 .merge_heads_by_conflicted_path
3603 .iter()
3604 .map(|(repo_path, _)| repo_path.to_proto())
3605 .collect(),
3606 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3607 project_id,
3608 id: self.id.to_proto(),
3609 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3610 entry_ids: vec![self.id.to_proto()],
3611 scan_id: self.scan_id,
3612 is_last_update: true,
3613 stash_entries: self
3614 .stash_entries
3615 .entries
3616 .iter()
3617 .map(stash_to_proto)
3618 .collect(),
3619 remote_upstream_url: self.remote_upstream_url.clone(),
3620 remote_origin_url: self.remote_origin_url.clone(),
3621 original_repo_abs_path: Some(
3622 self.original_repo_abs_path.to_string_lossy().into_owned(),
3623 ),
3624 linked_worktrees: self
3625 .linked_worktrees
3626 .iter()
3627 .map(worktree_to_proto)
3628 .collect(),
3629 }
3630 }
3631
3632 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3633 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3634 let mut removed_statuses: Vec<String> = Vec::new();
3635
3636 let mut new_statuses = self.statuses_by_path.iter().peekable();
3637 let mut old_statuses = old.statuses_by_path.iter().peekable();
3638
3639 let mut current_new_entry = new_statuses.next();
3640 let mut current_old_entry = old_statuses.next();
3641 loop {
3642 match (current_new_entry, current_old_entry) {
3643 (Some(new_entry), Some(old_entry)) => {
3644 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3645 Ordering::Less => {
3646 updated_statuses.push(new_entry.to_proto());
3647 current_new_entry = new_statuses.next();
3648 }
3649 Ordering::Equal => {
3650 if new_entry.status != old_entry.status
3651 || new_entry.diff_stat != old_entry.diff_stat
3652 {
3653 updated_statuses.push(new_entry.to_proto());
3654 }
3655 current_old_entry = old_statuses.next();
3656 current_new_entry = new_statuses.next();
3657 }
3658 Ordering::Greater => {
3659 removed_statuses.push(old_entry.repo_path.to_proto());
3660 current_old_entry = old_statuses.next();
3661 }
3662 }
3663 }
3664 (None, Some(old_entry)) => {
3665 removed_statuses.push(old_entry.repo_path.to_proto());
3666 current_old_entry = old_statuses.next();
3667 }
3668 (Some(new_entry), None) => {
3669 updated_statuses.push(new_entry.to_proto());
3670 current_new_entry = new_statuses.next();
3671 }
3672 (None, None) => break,
3673 }
3674 }
3675
3676 proto::UpdateRepository {
3677 branch_summary: self.branch.as_ref().map(branch_to_proto),
3678 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3679 updated_statuses,
3680 removed_statuses,
3681 current_merge_conflicts: self
3682 .merge
3683 .merge_heads_by_conflicted_path
3684 .iter()
3685 .map(|(path, _)| path.to_proto())
3686 .collect(),
3687 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3688 project_id,
3689 id: self.id.to_proto(),
3690 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3691 entry_ids: vec![],
3692 scan_id: self.scan_id,
3693 is_last_update: true,
3694 stash_entries: self
3695 .stash_entries
3696 .entries
3697 .iter()
3698 .map(stash_to_proto)
3699 .collect(),
3700 remote_upstream_url: self.remote_upstream_url.clone(),
3701 remote_origin_url: self.remote_origin_url.clone(),
3702 original_repo_abs_path: Some(
3703 self.original_repo_abs_path.to_string_lossy().into_owned(),
3704 ),
3705 linked_worktrees: self
3706 .linked_worktrees
3707 .iter()
3708 .map(worktree_to_proto)
3709 .collect(),
3710 }
3711 }
3712
3713 /// The main worktree is the original checkout that other worktrees were
3714 /// created from.
3715 ///
3716 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3717 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3718 ///
3719 /// Submodules also return `true` here, since they are not linked worktrees.
3720 pub fn is_main_worktree(&self) -> bool {
3721 self.work_directory_abs_path == self.original_repo_abs_path
3722 }
3723
3724 /// Returns true if this repository is a linked worktree, that is, one that
3725 /// was created from another worktree.
3726 ///
3727 /// Returns `false` for both the main worktree and submodules.
3728 pub fn is_linked_worktree(&self) -> bool {
3729 !self.is_main_worktree()
3730 }
3731
3732 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3733 &self.linked_worktrees
3734 }
3735
3736 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3737 self.statuses_by_path.iter().cloned()
3738 }
3739
3740 pub fn status_summary(&self) -> GitSummary {
3741 self.statuses_by_path.summary().item_summary
3742 }
3743
3744 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3745 self.statuses_by_path
3746 .get(&PathKey(path.as_ref().clone()), ())
3747 .cloned()
3748 }
3749
3750 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3751 self.statuses_by_path
3752 .get(&PathKey(path.as_ref().clone()), ())
3753 .and_then(|entry| entry.diff_stat)
3754 }
3755
3756 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3757 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3758 }
3759
3760 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3761 self.path_style
3762 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3763 .unwrap()
3764 .into()
3765 }
3766
3767 #[inline]
3768 fn abs_path_to_repo_path_inner(
3769 work_directory_abs_path: &Path,
3770 abs_path: &Path,
3771 path_style: PathStyle,
3772 ) -> Option<RepoPath> {
3773 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3774 Some(RepoPath::from_rel_path(&rel_path))
3775 }
3776
3777 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3778 self.merge
3779 .merge_heads_by_conflicted_path
3780 .contains_key(repo_path)
3781 }
3782
3783 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3784 let had_conflict_on_last_merge_head_change = self
3785 .merge
3786 .merge_heads_by_conflicted_path
3787 .contains_key(repo_path);
3788 let has_conflict_currently = self
3789 .status_for_path(repo_path)
3790 .is_some_and(|entry| entry.status.is_conflicted());
3791 had_conflict_on_last_merge_head_change || has_conflict_currently
3792 }
3793
3794 /// This is the name that will be displayed in the repository selector for this repository.
3795 pub fn display_name(&self) -> SharedString {
3796 self.work_directory_abs_path
3797 .file_name()
3798 .unwrap_or_default()
3799 .to_string_lossy()
3800 .to_string()
3801 .into()
3802 }
3803}
3804
3805pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3806 proto::StashEntry {
3807 oid: entry.oid.as_bytes().to_vec(),
3808 message: entry.message.clone(),
3809 branch: entry.branch.clone(),
3810 index: entry.index as u64,
3811 timestamp: entry.timestamp,
3812 }
3813}
3814
3815pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3816 Ok(StashEntry {
3817 oid: Oid::from_bytes(&entry.oid)?,
3818 message: entry.message.clone(),
3819 index: entry.index as usize,
3820 branch: entry.branch.clone(),
3821 timestamp: entry.timestamp,
3822 })
3823}
3824
3825impl MergeDetails {
3826 async fn update(
3827 &mut self,
3828 backend: &Arc<dyn GitRepository>,
3829 current_conflicted_paths: Vec<RepoPath>,
3830 ) -> Result<bool> {
3831 log::debug!("load merge details");
3832 self.message = backend.merge_message().await.map(SharedString::from);
3833 let heads = backend
3834 .revparse_batch(vec![
3835 "MERGE_HEAD".into(),
3836 "CHERRY_PICK_HEAD".into(),
3837 "REBASE_HEAD".into(),
3838 "REVERT_HEAD".into(),
3839 "APPLY_HEAD".into(),
3840 ])
3841 .await
3842 .log_err()
3843 .unwrap_or_default()
3844 .into_iter()
3845 .map(|opt| opt.map(SharedString::from))
3846 .collect::<Vec<_>>();
3847
3848 let mut conflicts_changed = false;
3849
3850 // Record the merge state for newly conflicted paths
3851 for path in ¤t_conflicted_paths {
3852 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3853 conflicts_changed = true;
3854 self.merge_heads_by_conflicted_path
3855 .insert(path.clone(), heads.clone());
3856 }
3857 }
3858
3859 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3860 self.merge_heads_by_conflicted_path
3861 .retain(|path, old_merge_heads| {
3862 let keep = current_conflicted_paths.contains(path)
3863 || (old_merge_heads == &heads
3864 && old_merge_heads.iter().any(|head| head.is_some()));
3865 if !keep {
3866 conflicts_changed = true;
3867 }
3868 keep
3869 });
3870
3871 Ok(conflicts_changed)
3872 }
3873}
3874
3875impl Repository {
3876 pub fn is_trusted(&self) -> bool {
3877 match self.repository_state.peek() {
3878 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3879 _ => false,
3880 }
3881 }
3882
3883 pub fn snapshot(&self) -> RepositorySnapshot {
3884 self.snapshot.clone()
3885 }
3886
3887 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3888 self.pending_ops.iter().cloned()
3889 }
3890
3891 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3892 self.pending_ops.summary().clone()
3893 }
3894
3895 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3896 self.pending_ops
3897 .get(&PathKey(path.as_ref().clone()), ())
3898 .cloned()
3899 }
3900
3901 fn local(
3902 id: RepositoryId,
3903 work_directory_abs_path: Arc<Path>,
3904 original_repo_abs_path: Arc<Path>,
3905 dot_git_abs_path: Arc<Path>,
3906 project_environment: WeakEntity<ProjectEnvironment>,
3907 fs: Arc<dyn Fs>,
3908 is_trusted: bool,
3909 git_store: WeakEntity<GitStore>,
3910 cx: &mut Context<Self>,
3911 ) -> Self {
3912 let snapshot = RepositorySnapshot::empty(
3913 id,
3914 work_directory_abs_path.clone(),
3915 Some(original_repo_abs_path),
3916 PathStyle::local(),
3917 );
3918 let state = cx
3919 .spawn(async move |_, cx| {
3920 LocalRepositoryState::new(
3921 work_directory_abs_path,
3922 dot_git_abs_path,
3923 project_environment,
3924 fs,
3925 is_trusted,
3926 cx,
3927 )
3928 .await
3929 .map_err(|err| err.to_string())
3930 })
3931 .shared();
3932 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3933 let state = cx
3934 .spawn(async move |_, _| {
3935 let state = state.await?;
3936 Ok(RepositoryState::Local(state))
3937 })
3938 .shared();
3939
3940 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3941 RepositoryEvent::BranchChanged => {
3942 if this.scan_id > 1 {
3943 this.initial_graph_data.clear();
3944 }
3945 }
3946 _ => {}
3947 })
3948 .detach();
3949
3950 Repository {
3951 this: cx.weak_entity(),
3952 git_store,
3953 snapshot,
3954 pending_ops: Default::default(),
3955 repository_state: state,
3956 commit_message_buffer: None,
3957 askpass_delegates: Default::default(),
3958 paths_needing_status_update: Default::default(),
3959 latest_askpass_id: 0,
3960 job_sender,
3961 job_id: 0,
3962 active_jobs: Default::default(),
3963 initial_graph_data: Default::default(),
3964 commit_data: Default::default(),
3965 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3966 }
3967 }
3968
3969 fn remote(
3970 id: RepositoryId,
3971 work_directory_abs_path: Arc<Path>,
3972 original_repo_abs_path: Option<Arc<Path>>,
3973 path_style: PathStyle,
3974 project_id: ProjectId,
3975 client: AnyProtoClient,
3976 git_store: WeakEntity<GitStore>,
3977 cx: &mut Context<Self>,
3978 ) -> Self {
3979 let snapshot = RepositorySnapshot::empty(
3980 id,
3981 work_directory_abs_path,
3982 original_repo_abs_path,
3983 path_style,
3984 );
3985 let repository_state = RemoteRepositoryState { project_id, client };
3986 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3987 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3988 Self {
3989 this: cx.weak_entity(),
3990 snapshot,
3991 commit_message_buffer: None,
3992 git_store,
3993 pending_ops: Default::default(),
3994 paths_needing_status_update: Default::default(),
3995 job_sender,
3996 repository_state,
3997 askpass_delegates: Default::default(),
3998 latest_askpass_id: 0,
3999 active_jobs: Default::default(),
4000 job_id: 0,
4001 initial_graph_data: Default::default(),
4002 commit_data: Default::default(),
4003 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4004 }
4005 }
4006
4007 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4008 self.git_store.upgrade()
4009 }
4010
4011 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4012 let this = cx.weak_entity();
4013 let git_store = self.git_store.clone();
4014 let _ = self.send_keyed_job(
4015 Some(GitJobKey::ReloadBufferDiffBases),
4016 None,
4017 |state, mut cx| async move {
4018 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4019 log::error!("tried to recompute diffs for a non-local repository");
4020 return Ok(());
4021 };
4022
4023 let Some(this) = this.upgrade() else {
4024 return Ok(());
4025 };
4026
4027 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4028 git_store.update(cx, |git_store, cx| {
4029 git_store
4030 .diffs
4031 .iter()
4032 .filter_map(|(buffer_id, diff_state)| {
4033 let buffer_store = git_store.buffer_store.read(cx);
4034 let buffer = buffer_store.get(*buffer_id)?;
4035 let file = File::from_dyn(buffer.read(cx).file())?;
4036 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4037 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4038 log::debug!(
4039 "start reload diff bases for repo path {}",
4040 repo_path.as_unix_str()
4041 );
4042 diff_state.update(cx, |diff_state, _| {
4043 let has_unstaged_diff = diff_state
4044 .unstaged_diff
4045 .as_ref()
4046 .is_some_and(|diff| diff.is_upgradable());
4047 let has_uncommitted_diff = diff_state
4048 .uncommitted_diff
4049 .as_ref()
4050 .is_some_and(|set| set.is_upgradable());
4051
4052 Some((
4053 buffer,
4054 repo_path,
4055 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4056 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4057 ))
4058 })
4059 })
4060 .collect::<Vec<_>>()
4061 })
4062 })?;
4063
4064 let buffer_diff_base_changes = cx
4065 .background_spawn(async move {
4066 let mut changes = Vec::new();
4067 for (buffer, repo_path, current_index_text, current_head_text) in
4068 &repo_diff_state_updates
4069 {
4070 let index_text = if current_index_text.is_some() {
4071 backend.load_index_text(repo_path.clone()).await
4072 } else {
4073 None
4074 };
4075 let head_text = if current_head_text.is_some() {
4076 backend.load_committed_text(repo_path.clone()).await
4077 } else {
4078 None
4079 };
4080
4081 let change =
4082 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4083 (Some(current_index), Some(current_head)) => {
4084 let index_changed =
4085 index_text.as_deref() != current_index.as_deref();
4086 let head_changed =
4087 head_text.as_deref() != current_head.as_deref();
4088 if index_changed && head_changed {
4089 if index_text == head_text {
4090 Some(DiffBasesChange::SetBoth(head_text))
4091 } else {
4092 Some(DiffBasesChange::SetEach {
4093 index: index_text,
4094 head: head_text,
4095 })
4096 }
4097 } else if index_changed {
4098 Some(DiffBasesChange::SetIndex(index_text))
4099 } else if head_changed {
4100 Some(DiffBasesChange::SetHead(head_text))
4101 } else {
4102 None
4103 }
4104 }
4105 (Some(current_index), None) => {
4106 let index_changed =
4107 index_text.as_deref() != current_index.as_deref();
4108 index_changed
4109 .then_some(DiffBasesChange::SetIndex(index_text))
4110 }
4111 (None, Some(current_head)) => {
4112 let head_changed =
4113 head_text.as_deref() != current_head.as_deref();
4114 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4115 }
4116 (None, None) => None,
4117 };
4118
4119 changes.push((buffer.clone(), change))
4120 }
4121 changes
4122 })
4123 .await;
4124
4125 git_store.update(&mut cx, |git_store, cx| {
4126 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4127 let buffer_snapshot = buffer.read(cx).text_snapshot();
4128 let buffer_id = buffer_snapshot.remote_id();
4129 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4130 continue;
4131 };
4132
4133 let downstream_client = git_store.downstream_client();
4134 diff_state.update(cx, |diff_state, cx| {
4135 use proto::update_diff_bases::Mode;
4136
4137 if let Some((diff_bases_change, (client, project_id))) =
4138 diff_bases_change.clone().zip(downstream_client)
4139 {
4140 let (staged_text, committed_text, mode) = match diff_bases_change {
4141 DiffBasesChange::SetIndex(index) => {
4142 (index, None, Mode::IndexOnly)
4143 }
4144 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4145 DiffBasesChange::SetEach { index, head } => {
4146 (index, head, Mode::IndexAndHead)
4147 }
4148 DiffBasesChange::SetBoth(text) => {
4149 (None, text, Mode::IndexMatchesHead)
4150 }
4151 };
4152 client
4153 .send(proto::UpdateDiffBases {
4154 project_id: project_id.to_proto(),
4155 buffer_id: buffer_id.to_proto(),
4156 staged_text,
4157 committed_text,
4158 mode: mode as i32,
4159 })
4160 .log_err();
4161 }
4162
4163 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4164 });
4165 }
4166 })
4167 },
4168 );
4169 }
4170
4171 pub fn send_job<F, Fut, R>(
4172 &mut self,
4173 status: Option<SharedString>,
4174 job: F,
4175 ) -> oneshot::Receiver<R>
4176 where
4177 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4178 Fut: Future<Output = R> + 'static,
4179 R: Send + 'static,
4180 {
4181 self.send_keyed_job(None, status, job)
4182 }
4183
4184 fn send_keyed_job<F, Fut, R>(
4185 &mut self,
4186 key: Option<GitJobKey>,
4187 status: Option<SharedString>,
4188 job: F,
4189 ) -> oneshot::Receiver<R>
4190 where
4191 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4192 Fut: Future<Output = R> + 'static,
4193 R: Send + 'static,
4194 {
4195 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4196 let job_id = post_inc(&mut self.job_id);
4197 let this = self.this.clone();
4198 self.job_sender
4199 .unbounded_send(GitJob {
4200 key,
4201 job: Box::new(move |state, cx: &mut AsyncApp| {
4202 let job = job(state, cx.clone());
4203 cx.spawn(async move |cx| {
4204 if let Some(s) = status.clone() {
4205 this.update(cx, |this, cx| {
4206 this.active_jobs.insert(
4207 job_id,
4208 JobInfo {
4209 start: Instant::now(),
4210 message: s.clone(),
4211 },
4212 );
4213
4214 cx.notify();
4215 })
4216 .ok();
4217 }
4218 let result = job.await;
4219
4220 this.update(cx, |this, cx| {
4221 this.active_jobs.remove(&job_id);
4222 cx.notify();
4223 })
4224 .ok();
4225
4226 result_tx.send(result).ok();
4227 })
4228 }),
4229 })
4230 .ok();
4231 result_rx
4232 }
4233
4234 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4235 let Some(git_store) = self.git_store.upgrade() else {
4236 return;
4237 };
4238 let entity = cx.entity();
4239 git_store.update(cx, |git_store, cx| {
4240 let Some((&id, _)) = git_store
4241 .repositories
4242 .iter()
4243 .find(|(_, handle)| *handle == &entity)
4244 else {
4245 return;
4246 };
4247 git_store.active_repo_id = Some(id);
4248 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4249 });
4250 }
4251
4252 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4253 self.snapshot.status()
4254 }
4255
4256 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4257 self.snapshot.diff_stat_for_path(path)
4258 }
4259
4260 pub fn cached_stash(&self) -> GitStash {
4261 self.snapshot.stash_entries.clone()
4262 }
4263
4264 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4265 let git_store = self.git_store.upgrade()?;
4266 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4267 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4268 let abs_path = SanitizedPath::new(&abs_path);
4269 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4270 Some(ProjectPath {
4271 worktree_id: worktree.read(cx).id(),
4272 path: relative_path,
4273 })
4274 }
4275
4276 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4277 let git_store = self.git_store.upgrade()?;
4278 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4279 let abs_path = worktree_store.absolutize(path, cx)?;
4280 self.snapshot.abs_path_to_repo_path(&abs_path)
4281 }
4282
4283 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4284 other
4285 .read(cx)
4286 .snapshot
4287 .work_directory_abs_path
4288 .starts_with(&self.snapshot.work_directory_abs_path)
4289 }
4290
4291 pub fn open_commit_buffer(
4292 &mut self,
4293 languages: Option<Arc<LanguageRegistry>>,
4294 buffer_store: Entity<BufferStore>,
4295 cx: &mut Context<Self>,
4296 ) -> Task<Result<Entity<Buffer>>> {
4297 let id = self.id;
4298 if let Some(buffer) = self.commit_message_buffer.clone() {
4299 return Task::ready(Ok(buffer));
4300 }
4301 let this = cx.weak_entity();
4302
4303 let rx = self.send_job(None, move |state, mut cx| async move {
4304 let Some(this) = this.upgrade() else {
4305 bail!("git store was dropped");
4306 };
4307 match state {
4308 RepositoryState::Local(..) => {
4309 this.update(&mut cx, |_, cx| {
4310 Self::open_local_commit_buffer(languages, buffer_store, cx)
4311 })
4312 .await
4313 }
4314 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4315 let request = client.request(proto::OpenCommitMessageBuffer {
4316 project_id: project_id.0,
4317 repository_id: id.to_proto(),
4318 });
4319 let response = request.await.context("requesting to open commit buffer")?;
4320 let buffer_id = BufferId::new(response.buffer_id)?;
4321 let buffer = buffer_store
4322 .update(&mut cx, |buffer_store, cx| {
4323 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4324 })
4325 .await?;
4326 if let Some(language_registry) = languages {
4327 let git_commit_language =
4328 language_registry.language_for_name("Git Commit").await?;
4329 buffer.update(&mut cx, |buffer, cx| {
4330 buffer.set_language(Some(git_commit_language), cx);
4331 });
4332 }
4333 this.update(&mut cx, |this, _| {
4334 this.commit_message_buffer = Some(buffer.clone());
4335 });
4336 Ok(buffer)
4337 }
4338 }
4339 });
4340
4341 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4342 }
4343
4344 fn open_local_commit_buffer(
4345 language_registry: Option<Arc<LanguageRegistry>>,
4346 buffer_store: Entity<BufferStore>,
4347 cx: &mut Context<Self>,
4348 ) -> Task<Result<Entity<Buffer>>> {
4349 cx.spawn(async move |repository, cx| {
4350 let git_commit_language = match language_registry {
4351 Some(language_registry) => {
4352 Some(language_registry.language_for_name("Git Commit").await?)
4353 }
4354 None => None,
4355 };
4356 let buffer = buffer_store
4357 .update(cx, |buffer_store, cx| {
4358 buffer_store.create_buffer(git_commit_language, false, cx)
4359 })
4360 .await?;
4361
4362 repository.update(cx, |repository, _| {
4363 repository.commit_message_buffer = Some(buffer.clone());
4364 })?;
4365 Ok(buffer)
4366 })
4367 }
4368
4369 pub fn checkout_files(
4370 &mut self,
4371 commit: &str,
4372 paths: Vec<RepoPath>,
4373 cx: &mut Context<Self>,
4374 ) -> Task<Result<()>> {
4375 let commit = commit.to_string();
4376 let id = self.id;
4377
4378 self.spawn_job_with_tracking(
4379 paths.clone(),
4380 pending_op::GitStatus::Reverted,
4381 cx,
4382 async move |this, cx| {
4383 this.update(cx, |this, _cx| {
4384 this.send_job(
4385 Some(format!("git checkout {}", commit).into()),
4386 move |git_repo, _| async move {
4387 match git_repo {
4388 RepositoryState::Local(LocalRepositoryState {
4389 backend,
4390 environment,
4391 ..
4392 }) => {
4393 backend
4394 .checkout_files(commit, paths, environment.clone())
4395 .await
4396 }
4397 RepositoryState::Remote(RemoteRepositoryState {
4398 project_id,
4399 client,
4400 }) => {
4401 client
4402 .request(proto::GitCheckoutFiles {
4403 project_id: project_id.0,
4404 repository_id: id.to_proto(),
4405 commit,
4406 paths: paths
4407 .into_iter()
4408 .map(|p| p.to_proto())
4409 .collect(),
4410 })
4411 .await?;
4412
4413 Ok(())
4414 }
4415 }
4416 },
4417 )
4418 })?
4419 .await?
4420 },
4421 )
4422 }
4423
4424 pub fn reset(
4425 &mut self,
4426 commit: String,
4427 reset_mode: ResetMode,
4428 _cx: &mut App,
4429 ) -> oneshot::Receiver<Result<()>> {
4430 let id = self.id;
4431
4432 self.send_job(None, move |git_repo, _| async move {
4433 match git_repo {
4434 RepositoryState::Local(LocalRepositoryState {
4435 backend,
4436 environment,
4437 ..
4438 }) => backend.reset(commit, reset_mode, environment).await,
4439 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4440 client
4441 .request(proto::GitReset {
4442 project_id: project_id.0,
4443 repository_id: id.to_proto(),
4444 commit,
4445 mode: match reset_mode {
4446 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4447 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4448 },
4449 })
4450 .await?;
4451
4452 Ok(())
4453 }
4454 }
4455 })
4456 }
4457
4458 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4459 let id = self.id;
4460 self.send_job(None, move |git_repo, _cx| async move {
4461 match git_repo {
4462 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4463 backend.show(commit).await
4464 }
4465 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4466 let resp = client
4467 .request(proto::GitShow {
4468 project_id: project_id.0,
4469 repository_id: id.to_proto(),
4470 commit,
4471 })
4472 .await?;
4473
4474 Ok(CommitDetails {
4475 sha: resp.sha.into(),
4476 message: resp.message.into(),
4477 commit_timestamp: resp.commit_timestamp,
4478 author_email: resp.author_email.into(),
4479 author_name: resp.author_name.into(),
4480 })
4481 }
4482 }
4483 })
4484 }
4485
4486 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4487 let id = self.id;
4488 self.send_job(None, move |git_repo, cx| async move {
4489 match git_repo {
4490 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4491 backend.load_commit(commit, cx).await
4492 }
4493 RepositoryState::Remote(RemoteRepositoryState {
4494 client, project_id, ..
4495 }) => {
4496 let response = client
4497 .request(proto::LoadCommitDiff {
4498 project_id: project_id.0,
4499 repository_id: id.to_proto(),
4500 commit,
4501 })
4502 .await?;
4503 Ok(CommitDiff {
4504 files: response
4505 .files
4506 .into_iter()
4507 .map(|file| {
4508 Ok(CommitFile {
4509 path: RepoPath::from_proto(&file.path)?,
4510 old_text: file.old_text,
4511 new_text: file.new_text,
4512 is_binary: file.is_binary,
4513 })
4514 })
4515 .collect::<Result<Vec<_>>>()?,
4516 })
4517 }
4518 }
4519 })
4520 }
4521
4522 pub fn file_history(
4523 &mut self,
4524 path: RepoPath,
4525 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4526 self.file_history_paginated(path, 0, None)
4527 }
4528
4529 pub fn file_history_paginated(
4530 &mut self,
4531 path: RepoPath,
4532 skip: usize,
4533 limit: Option<usize>,
4534 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4535 let id = self.id;
4536 self.send_job(None, move |git_repo, _cx| async move {
4537 match git_repo {
4538 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4539 backend.file_history_paginated(path, skip, limit).await
4540 }
4541 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4542 let response = client
4543 .request(proto::GitFileHistory {
4544 project_id: project_id.0,
4545 repository_id: id.to_proto(),
4546 path: path.to_proto(),
4547 skip: skip as u64,
4548 limit: limit.map(|l| l as u64),
4549 })
4550 .await?;
4551 Ok(git::repository::FileHistory {
4552 entries: response
4553 .entries
4554 .into_iter()
4555 .map(|entry| git::repository::FileHistoryEntry {
4556 sha: entry.sha.into(),
4557 subject: entry.subject.into(),
4558 message: entry.message.into(),
4559 commit_timestamp: entry.commit_timestamp,
4560 author_name: entry.author_name.into(),
4561 author_email: entry.author_email.into(),
4562 })
4563 .collect(),
4564 path: RepoPath::from_proto(&response.path)?,
4565 })
4566 }
4567 }
4568 })
4569 }
4570
4571 pub fn get_graph_data(
4572 &self,
4573 log_source: LogSource,
4574 log_order: LogOrder,
4575 ) -> Option<&InitialGitGraphData> {
4576 self.initial_graph_data.get(&(log_source, log_order))
4577 }
4578
4579 pub fn search_commits(
4580 &mut self,
4581 log_source: LogSource,
4582 search_args: SearchCommitArgs,
4583 request_tx: smol::channel::Sender<Oid>,
4584 cx: &mut Context<Self>,
4585 ) {
4586 let repository_state = self.repository_state.clone();
4587
4588 cx.background_spawn(async move {
4589 let repo_state = repository_state.await;
4590
4591 match repo_state {
4592 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4593 backend
4594 .search_commits(log_source, search_args, request_tx)
4595 .await
4596 .log_err();
4597 }
4598 Ok(RepositoryState::Remote(_)) => {}
4599 Err(_) => {}
4600 };
4601 })
4602 .detach();
4603 }
4604
4605 pub fn graph_data(
4606 &mut self,
4607 log_source: LogSource,
4608 log_order: LogOrder,
4609 range: Range<usize>,
4610 cx: &mut Context<Self>,
4611 ) -> GraphDataResponse<'_> {
4612 let initial_commit_data = self
4613 .initial_graph_data
4614 .entry((log_source.clone(), log_order))
4615 .or_insert_with(|| {
4616 let state = self.repository_state.clone();
4617 let log_source = log_source.clone();
4618
4619 let fetch_task = cx.spawn(async move |repository, cx| {
4620 let state = state.await;
4621 let result = match state {
4622 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4623 Self::local_git_graph_data(
4624 repository.clone(),
4625 backend,
4626 log_source.clone(),
4627 log_order,
4628 cx,
4629 )
4630 .await
4631 }
4632 Ok(RepositoryState::Remote(_)) => {
4633 Err("Git graph is not supported for collab yet".into())
4634 }
4635 Err(e) => Err(SharedString::from(e)),
4636 };
4637
4638 if let Err(fetch_task_error) = result {
4639 repository
4640 .update(cx, |repository, _| {
4641 if let Some(data) = repository
4642 .initial_graph_data
4643 .get_mut(&(log_source, log_order))
4644 {
4645 data.error = Some(fetch_task_error);
4646 } else {
4647 debug_panic!(
4648 "This task would be dropped if this entry doesn't exist"
4649 );
4650 }
4651 })
4652 .ok();
4653 }
4654 });
4655
4656 InitialGitGraphData {
4657 fetch_task,
4658 error: None,
4659 commit_data: Vec::new(),
4660 commit_oid_to_index: HashMap::default(),
4661 }
4662 });
4663
4664 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4665 let max_end = initial_commit_data.commit_data.len();
4666
4667 GraphDataResponse {
4668 commits: &initial_commit_data.commit_data
4669 [range.start.min(max_start)..range.end.min(max_end)],
4670 is_loading: !initial_commit_data.fetch_task.is_ready(),
4671 error: initial_commit_data.error.clone(),
4672 }
4673 }
4674
4675 async fn local_git_graph_data(
4676 this: WeakEntity<Self>,
4677 backend: Arc<dyn GitRepository>,
4678 log_source: LogSource,
4679 log_order: LogOrder,
4680 cx: &mut AsyncApp,
4681 ) -> Result<(), SharedString> {
4682 let (request_tx, request_rx) =
4683 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4684
4685 let task = cx.background_executor().spawn({
4686 let log_source = log_source.clone();
4687 async move {
4688 backend
4689 .initial_graph_data(log_source, log_order, request_tx)
4690 .await
4691 .map_err(|err| SharedString::from(err.to_string()))
4692 }
4693 });
4694
4695 let graph_data_key = (log_source, log_order);
4696
4697 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4698 this.update(cx, |repository, cx| {
4699 let graph_data = repository
4700 .initial_graph_data
4701 .entry(graph_data_key.clone())
4702 .and_modify(|graph_data| {
4703 for commit_data in initial_graph_commit_data {
4704 graph_data
4705 .commit_oid_to_index
4706 .insert(commit_data.sha, graph_data.commit_data.len());
4707 graph_data.commit_data.push(commit_data);
4708
4709 cx.emit(RepositoryEvent::GraphEvent(
4710 graph_data_key.clone(),
4711 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4712 ));
4713 }
4714 });
4715
4716 match &graph_data {
4717 Entry::Occupied(_) => {}
4718 Entry::Vacant(_) => {
4719 debug_panic!("This task should be dropped if data doesn't exist");
4720 }
4721 }
4722 })
4723 .ok();
4724 }
4725
4726 task.await?;
4727 Ok(())
4728 }
4729
4730 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4731 if !self.commit_data.contains_key(&sha) {
4732 match &self.graph_commit_data_handler {
4733 GraphCommitHandlerState::Open(handler) => {
4734 if handler.commit_data_request.try_send(sha).is_ok() {
4735 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4736 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4737 }
4738 }
4739 GraphCommitHandlerState::Closed => {
4740 self.open_graph_commit_data_handler(cx);
4741 }
4742 GraphCommitHandlerState::Starting => {}
4743 }
4744 }
4745
4746 self.commit_data
4747 .get(&sha)
4748 .unwrap_or(&CommitDataState::Loading)
4749 }
4750
4751 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4752 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4753
4754 let state = self.repository_state.clone();
4755 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4756 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4757
4758 let foreground_task = cx.spawn(async move |this, cx| {
4759 while let Ok((sha, commit_data)) = result_rx.recv().await {
4760 let result = this.update(cx, |this, cx| {
4761 let old_value = this
4762 .commit_data
4763 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4764 debug_assert!(
4765 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4766 "We should never overwrite commit data"
4767 );
4768
4769 cx.notify();
4770 });
4771 if result.is_err() {
4772 break;
4773 }
4774 }
4775
4776 this.update(cx, |this, _cx| {
4777 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4778 })
4779 .ok();
4780 });
4781
4782 let request_tx_for_handler = request_tx;
4783 let background_executor = cx.background_executor().clone();
4784
4785 cx.background_spawn(async move {
4786 let backend = match state.await {
4787 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4788 Ok(RepositoryState::Remote(_)) => {
4789 log::error!("commit_data_reader not supported for remote repositories");
4790 return;
4791 }
4792 Err(error) => {
4793 log::error!("failed to get repository state: {error}");
4794 return;
4795 }
4796 };
4797
4798 let reader = match backend.commit_data_reader() {
4799 Ok(reader) => reader,
4800 Err(error) => {
4801 log::error!("failed to create commit data reader: {error:?}");
4802 return;
4803 }
4804 };
4805
4806 loop {
4807 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4808
4809 futures::select_biased! {
4810 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4811 let Ok(sha) = sha else {
4812 break;
4813 };
4814
4815 match reader.read(sha).await {
4816 Ok(commit_data) => {
4817 if result_tx.send((sha, commit_data)).await.is_err() {
4818 break;
4819 }
4820 }
4821 Err(error) => {
4822 log::error!("failed to read commit data for {sha}: {error:?}");
4823 }
4824 }
4825 }
4826 _ = futures::FutureExt::fuse(timeout) => {
4827 break;
4828 }
4829 }
4830 }
4831
4832 drop(result_tx);
4833 })
4834 .detach();
4835
4836 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4837 _task: foreground_task,
4838 commit_data_request: request_tx_for_handler,
4839 });
4840 }
4841
4842 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4843 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4844 }
4845
4846 fn save_buffers<'a>(
4847 &self,
4848 entries: impl IntoIterator<Item = &'a RepoPath>,
4849 cx: &mut Context<Self>,
4850 ) -> Vec<Task<anyhow::Result<()>>> {
4851 let mut save_futures = Vec::new();
4852 if let Some(buffer_store) = self.buffer_store(cx) {
4853 buffer_store.update(cx, |buffer_store, cx| {
4854 for path in entries {
4855 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4856 continue;
4857 };
4858 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4859 && buffer
4860 .read(cx)
4861 .file()
4862 .is_some_and(|file| file.disk_state().exists())
4863 && buffer.read(cx).has_unsaved_edits()
4864 {
4865 save_futures.push(buffer_store.save_buffer(buffer, cx));
4866 }
4867 }
4868 })
4869 }
4870 save_futures
4871 }
4872
4873 pub fn stage_entries(
4874 &mut self,
4875 entries: Vec<RepoPath>,
4876 cx: &mut Context<Self>,
4877 ) -> Task<anyhow::Result<()>> {
4878 self.stage_or_unstage_entries(true, entries, cx)
4879 }
4880
4881 pub fn unstage_entries(
4882 &mut self,
4883 entries: Vec<RepoPath>,
4884 cx: &mut Context<Self>,
4885 ) -> Task<anyhow::Result<()>> {
4886 self.stage_or_unstage_entries(false, entries, cx)
4887 }
4888
4889 fn stage_or_unstage_entries(
4890 &mut self,
4891 stage: bool,
4892 entries: Vec<RepoPath>,
4893 cx: &mut Context<Self>,
4894 ) -> Task<anyhow::Result<()>> {
4895 if entries.is_empty() {
4896 return Task::ready(Ok(()));
4897 }
4898 let Some(git_store) = self.git_store.upgrade() else {
4899 return Task::ready(Ok(()));
4900 };
4901 let id = self.id;
4902 let save_tasks = self.save_buffers(&entries, cx);
4903 let paths = entries
4904 .iter()
4905 .map(|p| p.as_unix_str())
4906 .collect::<Vec<_>>()
4907 .join(" ");
4908 let status = if stage {
4909 format!("git add {paths}")
4910 } else {
4911 format!("git reset {paths}")
4912 };
4913 let job_key = GitJobKey::WriteIndex(entries.clone());
4914
4915 self.spawn_job_with_tracking(
4916 entries.clone(),
4917 if stage {
4918 pending_op::GitStatus::Staged
4919 } else {
4920 pending_op::GitStatus::Unstaged
4921 },
4922 cx,
4923 async move |this, cx| {
4924 for save_task in save_tasks {
4925 save_task.await?;
4926 }
4927
4928 this.update(cx, |this, cx| {
4929 let weak_this = cx.weak_entity();
4930 this.send_keyed_job(
4931 Some(job_key),
4932 Some(status.into()),
4933 move |git_repo, mut cx| async move {
4934 let hunk_staging_operation_counts = weak_this
4935 .update(&mut cx, |this, cx| {
4936 let mut hunk_staging_operation_counts = HashMap::default();
4937 for path in &entries {
4938 let Some(project_path) =
4939 this.repo_path_to_project_path(path, cx)
4940 else {
4941 continue;
4942 };
4943 let Some(buffer) = git_store
4944 .read(cx)
4945 .buffer_store
4946 .read(cx)
4947 .get_by_path(&project_path)
4948 else {
4949 continue;
4950 };
4951 let Some(diff_state) = git_store
4952 .read(cx)
4953 .diffs
4954 .get(&buffer.read(cx).remote_id())
4955 .cloned()
4956 else {
4957 continue;
4958 };
4959 let Some(uncommitted_diff) =
4960 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4961 |uncommitted_diff| uncommitted_diff.upgrade(),
4962 )
4963 else {
4964 continue;
4965 };
4966 let buffer_snapshot = buffer.read(cx).text_snapshot();
4967 let file_exists = buffer
4968 .read(cx)
4969 .file()
4970 .is_some_and(|file| file.disk_state().exists());
4971 let hunk_staging_operation_count =
4972 diff_state.update(cx, |diff_state, cx| {
4973 uncommitted_diff.update(
4974 cx,
4975 |uncommitted_diff, cx| {
4976 uncommitted_diff
4977 .stage_or_unstage_all_hunks(
4978 stage,
4979 &buffer_snapshot,
4980 file_exists,
4981 cx,
4982 );
4983 },
4984 );
4985
4986 diff_state.hunk_staging_operation_count += 1;
4987 diff_state.hunk_staging_operation_count
4988 });
4989 hunk_staging_operation_counts.insert(
4990 diff_state.downgrade(),
4991 hunk_staging_operation_count,
4992 );
4993 }
4994 hunk_staging_operation_counts
4995 })
4996 .unwrap_or_default();
4997
4998 let result = match git_repo {
4999 RepositoryState::Local(LocalRepositoryState {
5000 backend,
5001 environment,
5002 ..
5003 }) => {
5004 if stage {
5005 backend.stage_paths(entries, environment.clone()).await
5006 } else {
5007 backend.unstage_paths(entries, environment.clone()).await
5008 }
5009 }
5010 RepositoryState::Remote(RemoteRepositoryState {
5011 project_id,
5012 client,
5013 }) => {
5014 if stage {
5015 client
5016 .request(proto::Stage {
5017 project_id: project_id.0,
5018 repository_id: id.to_proto(),
5019 paths: entries
5020 .into_iter()
5021 .map(|repo_path| repo_path.to_proto())
5022 .collect(),
5023 })
5024 .await
5025 .context("sending stage request")
5026 .map(|_| ())
5027 } else {
5028 client
5029 .request(proto::Unstage {
5030 project_id: project_id.0,
5031 repository_id: id.to_proto(),
5032 paths: entries
5033 .into_iter()
5034 .map(|repo_path| repo_path.to_proto())
5035 .collect(),
5036 })
5037 .await
5038 .context("sending unstage request")
5039 .map(|_| ())
5040 }
5041 }
5042 };
5043
5044 for (diff_state, hunk_staging_operation_count) in
5045 hunk_staging_operation_counts
5046 {
5047 diff_state
5048 .update(&mut cx, |diff_state, cx| {
5049 if result.is_ok() {
5050 diff_state.hunk_staging_operation_count_as_of_write =
5051 hunk_staging_operation_count;
5052 } else if let Some(uncommitted_diff) =
5053 &diff_state.uncommitted_diff
5054 {
5055 uncommitted_diff
5056 .update(cx, |uncommitted_diff, cx| {
5057 uncommitted_diff.clear_pending_hunks(cx);
5058 })
5059 .ok();
5060 }
5061 })
5062 .ok();
5063 }
5064
5065 result
5066 },
5067 )
5068 })?
5069 .await?
5070 },
5071 )
5072 }
5073
5074 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5075 let snapshot = self.snapshot.clone();
5076 let pending_ops = self.pending_ops.clone();
5077 let to_stage = cx.background_spawn(async move {
5078 snapshot
5079 .status()
5080 .filter_map(|entry| {
5081 if let Some(ops) =
5082 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5083 {
5084 if ops.staging() || ops.staged() {
5085 None
5086 } else {
5087 Some(entry.repo_path)
5088 }
5089 } else if entry.status.staging().is_fully_staged() {
5090 None
5091 } else {
5092 Some(entry.repo_path)
5093 }
5094 })
5095 .collect()
5096 });
5097
5098 cx.spawn(async move |this, cx| {
5099 let to_stage = to_stage.await;
5100 this.update(cx, |this, cx| {
5101 this.stage_or_unstage_entries(true, to_stage, cx)
5102 })?
5103 .await
5104 })
5105 }
5106
5107 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5108 let snapshot = self.snapshot.clone();
5109 let pending_ops = self.pending_ops.clone();
5110 let to_unstage = cx.background_spawn(async move {
5111 snapshot
5112 .status()
5113 .filter_map(|entry| {
5114 if let Some(ops) =
5115 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5116 {
5117 if !ops.staging() && !ops.staged() {
5118 None
5119 } else {
5120 Some(entry.repo_path)
5121 }
5122 } else if entry.status.staging().is_fully_unstaged() {
5123 None
5124 } else {
5125 Some(entry.repo_path)
5126 }
5127 })
5128 .collect()
5129 });
5130
5131 cx.spawn(async move |this, cx| {
5132 let to_unstage = to_unstage.await;
5133 this.update(cx, |this, cx| {
5134 this.stage_or_unstage_entries(false, to_unstage, cx)
5135 })?
5136 .await
5137 })
5138 }
5139
5140 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5141 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5142
5143 self.stash_entries(to_stash, cx)
5144 }
5145
5146 pub fn stash_entries(
5147 &mut self,
5148 entries: Vec<RepoPath>,
5149 cx: &mut Context<Self>,
5150 ) -> Task<anyhow::Result<()>> {
5151 let id = self.id;
5152
5153 cx.spawn(async move |this, cx| {
5154 this.update(cx, |this, _| {
5155 this.send_job(None, move |git_repo, _cx| async move {
5156 match git_repo {
5157 RepositoryState::Local(LocalRepositoryState {
5158 backend,
5159 environment,
5160 ..
5161 }) => backend.stash_paths(entries, environment).await,
5162 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5163 client
5164 .request(proto::Stash {
5165 project_id: project_id.0,
5166 repository_id: id.to_proto(),
5167 paths: entries
5168 .into_iter()
5169 .map(|repo_path| repo_path.to_proto())
5170 .collect(),
5171 })
5172 .await?;
5173 Ok(())
5174 }
5175 }
5176 })
5177 })?
5178 .await??;
5179 Ok(())
5180 })
5181 }
5182
5183 pub fn stash_pop(
5184 &mut self,
5185 index: Option<usize>,
5186 cx: &mut Context<Self>,
5187 ) -> Task<anyhow::Result<()>> {
5188 let id = self.id;
5189 cx.spawn(async move |this, cx| {
5190 this.update(cx, |this, _| {
5191 this.send_job(None, move |git_repo, _cx| async move {
5192 match git_repo {
5193 RepositoryState::Local(LocalRepositoryState {
5194 backend,
5195 environment,
5196 ..
5197 }) => backend.stash_pop(index, environment).await,
5198 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5199 client
5200 .request(proto::StashPop {
5201 project_id: project_id.0,
5202 repository_id: id.to_proto(),
5203 stash_index: index.map(|i| i as u64),
5204 })
5205 .await
5206 .context("sending stash pop request")?;
5207 Ok(())
5208 }
5209 }
5210 })
5211 })?
5212 .await??;
5213 Ok(())
5214 })
5215 }
5216
5217 pub fn stash_apply(
5218 &mut self,
5219 index: Option<usize>,
5220 cx: &mut Context<Self>,
5221 ) -> Task<anyhow::Result<()>> {
5222 let id = self.id;
5223 cx.spawn(async move |this, cx| {
5224 this.update(cx, |this, _| {
5225 this.send_job(None, move |git_repo, _cx| async move {
5226 match git_repo {
5227 RepositoryState::Local(LocalRepositoryState {
5228 backend,
5229 environment,
5230 ..
5231 }) => backend.stash_apply(index, environment).await,
5232 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5233 client
5234 .request(proto::StashApply {
5235 project_id: project_id.0,
5236 repository_id: id.to_proto(),
5237 stash_index: index.map(|i| i as u64),
5238 })
5239 .await
5240 .context("sending stash apply request")?;
5241 Ok(())
5242 }
5243 }
5244 })
5245 })?
5246 .await??;
5247 Ok(())
5248 })
5249 }
5250
5251 pub fn stash_drop(
5252 &mut self,
5253 index: Option<usize>,
5254 cx: &mut Context<Self>,
5255 ) -> oneshot::Receiver<anyhow::Result<()>> {
5256 let id = self.id;
5257 let updates_tx = self
5258 .git_store()
5259 .and_then(|git_store| match &git_store.read(cx).state {
5260 GitStoreState::Local { downstream, .. } => downstream
5261 .as_ref()
5262 .map(|downstream| downstream.updates_tx.clone()),
5263 _ => None,
5264 });
5265 let this = cx.weak_entity();
5266 self.send_job(None, move |git_repo, mut cx| async move {
5267 match git_repo {
5268 RepositoryState::Local(LocalRepositoryState {
5269 backend,
5270 environment,
5271 ..
5272 }) => {
5273 // TODO would be nice to not have to do this manually
5274 let result = backend.stash_drop(index, environment).await;
5275 if result.is_ok()
5276 && let Ok(stash_entries) = backend.stash_entries().await
5277 {
5278 let snapshot = this.update(&mut cx, |this, cx| {
5279 this.snapshot.stash_entries = stash_entries;
5280 cx.emit(RepositoryEvent::StashEntriesChanged);
5281 this.snapshot.clone()
5282 })?;
5283 if let Some(updates_tx) = updates_tx {
5284 updates_tx
5285 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5286 .ok();
5287 }
5288 }
5289
5290 result
5291 }
5292 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5293 client
5294 .request(proto::StashDrop {
5295 project_id: project_id.0,
5296 repository_id: id.to_proto(),
5297 stash_index: index.map(|i| i as u64),
5298 })
5299 .await
5300 .context("sending stash pop request")?;
5301 Ok(())
5302 }
5303 }
5304 })
5305 }
5306
5307 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5308 let id = self.id;
5309 self.send_job(
5310 Some(format!("git hook {}", hook.as_str()).into()),
5311 move |git_repo, _cx| async move {
5312 match git_repo {
5313 RepositoryState::Local(LocalRepositoryState {
5314 backend,
5315 environment,
5316 ..
5317 }) => backend.run_hook(hook, environment.clone()).await,
5318 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5319 client
5320 .request(proto::RunGitHook {
5321 project_id: project_id.0,
5322 repository_id: id.to_proto(),
5323 hook: hook.to_proto(),
5324 })
5325 .await?;
5326
5327 Ok(())
5328 }
5329 }
5330 },
5331 )
5332 }
5333
5334 pub fn commit(
5335 &mut self,
5336 message: SharedString,
5337 name_and_email: Option<(SharedString, SharedString)>,
5338 options: CommitOptions,
5339 askpass: AskPassDelegate,
5340 cx: &mut App,
5341 ) -> oneshot::Receiver<Result<()>> {
5342 let id = self.id;
5343 let askpass_delegates = self.askpass_delegates.clone();
5344 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5345
5346 let rx = self.run_hook(RunHook::PreCommit, cx);
5347
5348 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5349 rx.await??;
5350
5351 match git_repo {
5352 RepositoryState::Local(LocalRepositoryState {
5353 backend,
5354 environment,
5355 ..
5356 }) => {
5357 backend
5358 .commit(message, name_and_email, options, askpass, environment)
5359 .await
5360 }
5361 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5362 askpass_delegates.lock().insert(askpass_id, askpass);
5363 let _defer = util::defer(|| {
5364 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5365 debug_assert!(askpass_delegate.is_some());
5366 });
5367 let (name, email) = name_and_email.unzip();
5368 client
5369 .request(proto::Commit {
5370 project_id: project_id.0,
5371 repository_id: id.to_proto(),
5372 message: String::from(message),
5373 name: name.map(String::from),
5374 email: email.map(String::from),
5375 options: Some(proto::commit::CommitOptions {
5376 amend: options.amend,
5377 signoff: options.signoff,
5378 }),
5379 askpass_id,
5380 })
5381 .await?;
5382
5383 Ok(())
5384 }
5385 }
5386 })
5387 }
5388
5389 pub fn fetch(
5390 &mut self,
5391 fetch_options: FetchOptions,
5392 askpass: AskPassDelegate,
5393 _cx: &mut App,
5394 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5395 let askpass_delegates = self.askpass_delegates.clone();
5396 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5397 let id = self.id;
5398
5399 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5400 match git_repo {
5401 RepositoryState::Local(LocalRepositoryState {
5402 backend,
5403 environment,
5404 ..
5405 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5406 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5407 askpass_delegates.lock().insert(askpass_id, askpass);
5408 let _defer = util::defer(|| {
5409 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5410 debug_assert!(askpass_delegate.is_some());
5411 });
5412
5413 let response = client
5414 .request(proto::Fetch {
5415 project_id: project_id.0,
5416 repository_id: id.to_proto(),
5417 askpass_id,
5418 remote: fetch_options.to_proto(),
5419 })
5420 .await?;
5421
5422 Ok(RemoteCommandOutput {
5423 stdout: response.stdout,
5424 stderr: response.stderr,
5425 })
5426 }
5427 }
5428 })
5429 }
5430
5431 pub fn push(
5432 &mut self,
5433 branch: SharedString,
5434 remote_branch: SharedString,
5435 remote: SharedString,
5436 options: Option<PushOptions>,
5437 askpass: AskPassDelegate,
5438 cx: &mut Context<Self>,
5439 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5440 let askpass_delegates = self.askpass_delegates.clone();
5441 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5442 let id = self.id;
5443
5444 let args = options
5445 .map(|option| match option {
5446 PushOptions::SetUpstream => " --set-upstream",
5447 PushOptions::Force => " --force-with-lease",
5448 })
5449 .unwrap_or("");
5450
5451 let updates_tx = self
5452 .git_store()
5453 .and_then(|git_store| match &git_store.read(cx).state {
5454 GitStoreState::Local { downstream, .. } => downstream
5455 .as_ref()
5456 .map(|downstream| downstream.updates_tx.clone()),
5457 _ => None,
5458 });
5459
5460 let this = cx.weak_entity();
5461 self.send_job(
5462 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5463 move |git_repo, mut cx| async move {
5464 match git_repo {
5465 RepositoryState::Local(LocalRepositoryState {
5466 backend,
5467 environment,
5468 ..
5469 }) => {
5470 let result = backend
5471 .push(
5472 branch.to_string(),
5473 remote_branch.to_string(),
5474 remote.to_string(),
5475 options,
5476 askpass,
5477 environment.clone(),
5478 cx.clone(),
5479 )
5480 .await;
5481 // TODO would be nice to not have to do this manually
5482 if result.is_ok() {
5483 let branches = backend.branches().await?;
5484 let branch = branches.into_iter().find(|branch| branch.is_head);
5485 log::info!("head branch after scan is {branch:?}");
5486 let snapshot = this.update(&mut cx, |this, cx| {
5487 this.snapshot.branch = branch;
5488 cx.emit(RepositoryEvent::BranchChanged);
5489 this.snapshot.clone()
5490 })?;
5491 if let Some(updates_tx) = updates_tx {
5492 updates_tx
5493 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5494 .ok();
5495 }
5496 }
5497 result
5498 }
5499 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5500 askpass_delegates.lock().insert(askpass_id, askpass);
5501 let _defer = util::defer(|| {
5502 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5503 debug_assert!(askpass_delegate.is_some());
5504 });
5505 let response = client
5506 .request(proto::Push {
5507 project_id: project_id.0,
5508 repository_id: id.to_proto(),
5509 askpass_id,
5510 branch_name: branch.to_string(),
5511 remote_branch_name: remote_branch.to_string(),
5512 remote_name: remote.to_string(),
5513 options: options.map(|options| match options {
5514 PushOptions::Force => proto::push::PushOptions::Force,
5515 PushOptions::SetUpstream => {
5516 proto::push::PushOptions::SetUpstream
5517 }
5518 }
5519 as i32),
5520 })
5521 .await?;
5522
5523 Ok(RemoteCommandOutput {
5524 stdout: response.stdout,
5525 stderr: response.stderr,
5526 })
5527 }
5528 }
5529 },
5530 )
5531 }
5532
5533 pub fn pull(
5534 &mut self,
5535 branch: Option<SharedString>,
5536 remote: SharedString,
5537 rebase: bool,
5538 askpass: AskPassDelegate,
5539 _cx: &mut App,
5540 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5541 let askpass_delegates = self.askpass_delegates.clone();
5542 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5543 let id = self.id;
5544
5545 let mut status = "git pull".to_string();
5546 if rebase {
5547 status.push_str(" --rebase");
5548 }
5549 status.push_str(&format!(" {}", remote));
5550 if let Some(b) = &branch {
5551 status.push_str(&format!(" {}", b));
5552 }
5553
5554 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5555 match git_repo {
5556 RepositoryState::Local(LocalRepositoryState {
5557 backend,
5558 environment,
5559 ..
5560 }) => {
5561 backend
5562 .pull(
5563 branch.as_ref().map(|b| b.to_string()),
5564 remote.to_string(),
5565 rebase,
5566 askpass,
5567 environment.clone(),
5568 cx,
5569 )
5570 .await
5571 }
5572 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5573 askpass_delegates.lock().insert(askpass_id, askpass);
5574 let _defer = util::defer(|| {
5575 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5576 debug_assert!(askpass_delegate.is_some());
5577 });
5578 let response = client
5579 .request(proto::Pull {
5580 project_id: project_id.0,
5581 repository_id: id.to_proto(),
5582 askpass_id,
5583 rebase,
5584 branch_name: branch.as_ref().map(|b| b.to_string()),
5585 remote_name: remote.to_string(),
5586 })
5587 .await?;
5588
5589 Ok(RemoteCommandOutput {
5590 stdout: response.stdout,
5591 stderr: response.stderr,
5592 })
5593 }
5594 }
5595 })
5596 }
5597
5598 fn spawn_set_index_text_job(
5599 &mut self,
5600 path: RepoPath,
5601 content: Option<String>,
5602 hunk_staging_operation_count: Option<usize>,
5603 cx: &mut Context<Self>,
5604 ) -> oneshot::Receiver<anyhow::Result<()>> {
5605 let id = self.id;
5606 let this = cx.weak_entity();
5607 let git_store = self.git_store.clone();
5608 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5609 self.send_keyed_job(
5610 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5611 None,
5612 move |git_repo, mut cx| async move {
5613 log::debug!(
5614 "start updating index text for buffer {}",
5615 path.as_unix_str()
5616 );
5617
5618 match git_repo {
5619 RepositoryState::Local(LocalRepositoryState {
5620 fs,
5621 backend,
5622 environment,
5623 ..
5624 }) => {
5625 let executable = match fs.metadata(&abs_path).await {
5626 Ok(Some(meta)) => meta.is_executable,
5627 Ok(None) => false,
5628 Err(_err) => false,
5629 };
5630 backend
5631 .set_index_text(path.clone(), content, environment.clone(), executable)
5632 .await?;
5633 }
5634 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5635 client
5636 .request(proto::SetIndexText {
5637 project_id: project_id.0,
5638 repository_id: id.to_proto(),
5639 path: path.to_proto(),
5640 text: content,
5641 })
5642 .await?;
5643 }
5644 }
5645 log::debug!(
5646 "finish updating index text for buffer {}",
5647 path.as_unix_str()
5648 );
5649
5650 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5651 let project_path = this
5652 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5653 .ok()
5654 .flatten();
5655 git_store
5656 .update(&mut cx, |git_store, cx| {
5657 let buffer_id = git_store
5658 .buffer_store
5659 .read(cx)
5660 .get_by_path(&project_path?)?
5661 .read(cx)
5662 .remote_id();
5663 let diff_state = git_store.diffs.get(&buffer_id)?;
5664 diff_state.update(cx, |diff_state, _| {
5665 diff_state.hunk_staging_operation_count_as_of_write =
5666 hunk_staging_operation_count;
5667 });
5668 Some(())
5669 })
5670 .context("Git store dropped")?;
5671 }
5672 Ok(())
5673 },
5674 )
5675 }
5676
5677 pub fn create_remote(
5678 &mut self,
5679 remote_name: String,
5680 remote_url: String,
5681 ) -> oneshot::Receiver<Result<()>> {
5682 let id = self.id;
5683 self.send_job(
5684 Some(format!("git remote add {remote_name} {remote_url}").into()),
5685 move |repo, _cx| async move {
5686 match repo {
5687 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5688 backend.create_remote(remote_name, remote_url).await
5689 }
5690 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5691 client
5692 .request(proto::GitCreateRemote {
5693 project_id: project_id.0,
5694 repository_id: id.to_proto(),
5695 remote_name,
5696 remote_url,
5697 })
5698 .await?;
5699
5700 Ok(())
5701 }
5702 }
5703 },
5704 )
5705 }
5706
5707 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5708 let id = self.id;
5709 self.send_job(
5710 Some(format!("git remove remote {remote_name}").into()),
5711 move |repo, _cx| async move {
5712 match repo {
5713 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5714 backend.remove_remote(remote_name).await
5715 }
5716 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5717 client
5718 .request(proto::GitRemoveRemote {
5719 project_id: project_id.0,
5720 repository_id: id.to_proto(),
5721 remote_name,
5722 })
5723 .await?;
5724
5725 Ok(())
5726 }
5727 }
5728 },
5729 )
5730 }
5731
5732 pub fn get_remotes(
5733 &mut self,
5734 branch_name: Option<String>,
5735 is_push: bool,
5736 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5737 let id = self.id;
5738 self.send_job(None, move |repo, _cx| async move {
5739 match repo {
5740 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5741 let remote = if let Some(branch_name) = branch_name {
5742 if is_push {
5743 backend.get_push_remote(branch_name).await?
5744 } else {
5745 backend.get_branch_remote(branch_name).await?
5746 }
5747 } else {
5748 None
5749 };
5750
5751 match remote {
5752 Some(remote) => Ok(vec![remote]),
5753 None => backend.get_all_remotes().await,
5754 }
5755 }
5756 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5757 let response = client
5758 .request(proto::GetRemotes {
5759 project_id: project_id.0,
5760 repository_id: id.to_proto(),
5761 branch_name,
5762 is_push,
5763 })
5764 .await?;
5765
5766 let remotes = response
5767 .remotes
5768 .into_iter()
5769 .map(|remotes| Remote {
5770 name: remotes.name.into(),
5771 })
5772 .collect();
5773
5774 Ok(remotes)
5775 }
5776 }
5777 })
5778 }
5779
5780 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5781 let id = self.id;
5782 self.send_job(None, move |repo, _| async move {
5783 match repo {
5784 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5785 backend.branches().await
5786 }
5787 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5788 let response = client
5789 .request(proto::GitGetBranches {
5790 project_id: project_id.0,
5791 repository_id: id.to_proto(),
5792 })
5793 .await?;
5794
5795 let branches = response
5796 .branches
5797 .into_iter()
5798 .map(|branch| proto_to_branch(&branch))
5799 .collect();
5800
5801 Ok(branches)
5802 }
5803 }
5804 })
5805 }
5806
5807 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5808 /// returns the pathed for the linked worktree.
5809 ///
5810 /// Returns None if this is the main checkout.
5811 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5812 if self.work_directory_abs_path != self.original_repo_abs_path {
5813 Some(&self.work_directory_abs_path)
5814 } else {
5815 None
5816 }
5817 }
5818
5819 pub fn path_for_new_linked_worktree(
5820 &self,
5821 branch_name: &str,
5822 worktree_directory_setting: &str,
5823 ) -> Result<PathBuf> {
5824 let original_repo = self.original_repo_abs_path.clone();
5825 let project_name = original_repo
5826 .file_name()
5827 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5828 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5829 Ok(directory.join(branch_name).join(project_name))
5830 }
5831
5832 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5833 let id = self.id;
5834 self.send_job(None, move |repo, _| async move {
5835 match repo {
5836 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5837 backend.worktrees().await
5838 }
5839 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5840 let response = client
5841 .request(proto::GitGetWorktrees {
5842 project_id: project_id.0,
5843 repository_id: id.to_proto(),
5844 })
5845 .await?;
5846
5847 let worktrees = response
5848 .worktrees
5849 .into_iter()
5850 .map(|worktree| proto_to_worktree(&worktree))
5851 .collect();
5852
5853 Ok(worktrees)
5854 }
5855 }
5856 })
5857 }
5858
5859 pub fn create_worktree(
5860 &mut self,
5861 branch_name: String,
5862 path: PathBuf,
5863 commit: Option<String>,
5864 ) -> oneshot::Receiver<Result<()>> {
5865 let id = self.id;
5866 self.send_job(
5867 Some(format!("git worktree add: {}", branch_name).into()),
5868 move |repo, _cx| async move {
5869 match repo {
5870 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5871 backend.create_worktree(branch_name, path, commit).await
5872 }
5873 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5874 client
5875 .request(proto::GitCreateWorktree {
5876 project_id: project_id.0,
5877 repository_id: id.to_proto(),
5878 name: branch_name,
5879 directory: path.to_string_lossy().to_string(),
5880 commit,
5881 })
5882 .await?;
5883
5884 Ok(())
5885 }
5886 }
5887 },
5888 )
5889 }
5890
5891 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5892 let id = self.id;
5893 self.send_job(
5894 Some(format!("git worktree remove: {}", path.display()).into()),
5895 move |repo, _cx| async move {
5896 match repo {
5897 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5898 backend.remove_worktree(path, force).await
5899 }
5900 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5901 client
5902 .request(proto::GitRemoveWorktree {
5903 project_id: project_id.0,
5904 repository_id: id.to_proto(),
5905 path: path.to_string_lossy().to_string(),
5906 force,
5907 })
5908 .await?;
5909
5910 Ok(())
5911 }
5912 }
5913 },
5914 )
5915 }
5916
5917 pub fn rename_worktree(
5918 &mut self,
5919 old_path: PathBuf,
5920 new_path: PathBuf,
5921 ) -> oneshot::Receiver<Result<()>> {
5922 let id = self.id;
5923 self.send_job(
5924 Some(format!("git worktree move: {}", old_path.display()).into()),
5925 move |repo, _cx| async move {
5926 match repo {
5927 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5928 backend.rename_worktree(old_path, new_path).await
5929 }
5930 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5931 client
5932 .request(proto::GitRenameWorktree {
5933 project_id: project_id.0,
5934 repository_id: id.to_proto(),
5935 old_path: old_path.to_string_lossy().to_string(),
5936 new_path: new_path.to_string_lossy().to_string(),
5937 })
5938 .await?;
5939
5940 Ok(())
5941 }
5942 }
5943 },
5944 )
5945 }
5946
5947 pub fn default_branch(
5948 &mut self,
5949 include_remote_name: bool,
5950 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5951 let id = self.id;
5952 self.send_job(None, move |repo, _| async move {
5953 match repo {
5954 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5955 backend.default_branch(include_remote_name).await
5956 }
5957 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5958 let response = client
5959 .request(proto::GetDefaultBranch {
5960 project_id: project_id.0,
5961 repository_id: id.to_proto(),
5962 })
5963 .await?;
5964
5965 anyhow::Ok(response.branch.map(SharedString::from))
5966 }
5967 }
5968 })
5969 }
5970
5971 pub fn diff_tree(
5972 &mut self,
5973 diff_type: DiffTreeType,
5974 _cx: &App,
5975 ) -> oneshot::Receiver<Result<TreeDiff>> {
5976 let repository_id = self.snapshot.id;
5977 self.send_job(None, move |repo, _cx| async move {
5978 match repo {
5979 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5980 backend.diff_tree(diff_type).await
5981 }
5982 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5983 let response = client
5984 .request(proto::GetTreeDiff {
5985 project_id: project_id.0,
5986 repository_id: repository_id.0,
5987 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5988 base: diff_type.base().to_string(),
5989 head: diff_type.head().to_string(),
5990 })
5991 .await?;
5992
5993 let entries = response
5994 .entries
5995 .into_iter()
5996 .filter_map(|entry| {
5997 let status = match entry.status() {
5998 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5999 proto::tree_diff_status::Status::Modified => {
6000 TreeDiffStatus::Modified {
6001 old: git::Oid::from_str(
6002 &entry.oid.context("missing oid").log_err()?,
6003 )
6004 .log_err()?,
6005 }
6006 }
6007 proto::tree_diff_status::Status::Deleted => {
6008 TreeDiffStatus::Deleted {
6009 old: git::Oid::from_str(
6010 &entry.oid.context("missing oid").log_err()?,
6011 )
6012 .log_err()?,
6013 }
6014 }
6015 };
6016 Some((
6017 RepoPath::from_rel_path(
6018 &RelPath::from_proto(&entry.path).log_err()?,
6019 ),
6020 status,
6021 ))
6022 })
6023 .collect();
6024
6025 Ok(TreeDiff { entries })
6026 }
6027 }
6028 })
6029 }
6030
6031 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6032 let id = self.id;
6033 self.send_job(None, move |repo, _cx| async move {
6034 match repo {
6035 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6036 backend.diff(diff_type).await
6037 }
6038 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6039 let (proto_diff_type, merge_base_ref) = match &diff_type {
6040 DiffType::HeadToIndex => {
6041 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6042 }
6043 DiffType::HeadToWorktree => {
6044 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6045 }
6046 DiffType::MergeBase { base_ref } => (
6047 proto::git_diff::DiffType::MergeBase.into(),
6048 Some(base_ref.to_string()),
6049 ),
6050 };
6051 let response = client
6052 .request(proto::GitDiff {
6053 project_id: project_id.0,
6054 repository_id: id.to_proto(),
6055 diff_type: proto_diff_type,
6056 merge_base_ref,
6057 })
6058 .await?;
6059
6060 Ok(response.diff)
6061 }
6062 }
6063 })
6064 }
6065
6066 pub fn create_branch(
6067 &mut self,
6068 branch_name: String,
6069 base_branch: Option<String>,
6070 ) -> oneshot::Receiver<Result<()>> {
6071 let id = self.id;
6072 let status_msg = if let Some(ref base) = base_branch {
6073 format!("git switch -c {branch_name} {base}").into()
6074 } else {
6075 format!("git switch -c {branch_name}").into()
6076 };
6077 self.send_job(Some(status_msg), move |repo, _cx| async move {
6078 match repo {
6079 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6080 backend.create_branch(branch_name, base_branch).await
6081 }
6082 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6083 client
6084 .request(proto::GitCreateBranch {
6085 project_id: project_id.0,
6086 repository_id: id.to_proto(),
6087 branch_name,
6088 })
6089 .await?;
6090
6091 Ok(())
6092 }
6093 }
6094 })
6095 }
6096
6097 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6098 let id = self.id;
6099 self.send_job(
6100 Some(format!("git switch {branch_name}").into()),
6101 move |repo, _cx| async move {
6102 match repo {
6103 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6104 backend.change_branch(branch_name).await
6105 }
6106 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6107 client
6108 .request(proto::GitChangeBranch {
6109 project_id: project_id.0,
6110 repository_id: id.to_proto(),
6111 branch_name,
6112 })
6113 .await?;
6114
6115 Ok(())
6116 }
6117 }
6118 },
6119 )
6120 }
6121
6122 pub fn delete_branch(
6123 &mut self,
6124 is_remote: bool,
6125 branch_name: String,
6126 ) -> oneshot::Receiver<Result<()>> {
6127 let id = self.id;
6128 self.send_job(
6129 Some(
6130 format!(
6131 "git branch {} {}",
6132 if is_remote { "-dr" } else { "-d" },
6133 branch_name
6134 )
6135 .into(),
6136 ),
6137 move |repo, _cx| async move {
6138 match repo {
6139 RepositoryState::Local(state) => {
6140 state.backend.delete_branch(is_remote, branch_name).await
6141 }
6142 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6143 client
6144 .request(proto::GitDeleteBranch {
6145 project_id: project_id.0,
6146 repository_id: id.to_proto(),
6147 is_remote,
6148 branch_name,
6149 })
6150 .await?;
6151
6152 Ok(())
6153 }
6154 }
6155 },
6156 )
6157 }
6158
6159 pub fn rename_branch(
6160 &mut self,
6161 branch: String,
6162 new_name: String,
6163 ) -> oneshot::Receiver<Result<()>> {
6164 let id = self.id;
6165 self.send_job(
6166 Some(format!("git branch -m {branch} {new_name}").into()),
6167 move |repo, _cx| async move {
6168 match repo {
6169 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6170 backend.rename_branch(branch, new_name).await
6171 }
6172 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6173 client
6174 .request(proto::GitRenameBranch {
6175 project_id: project_id.0,
6176 repository_id: id.to_proto(),
6177 branch,
6178 new_name,
6179 })
6180 .await?;
6181
6182 Ok(())
6183 }
6184 }
6185 },
6186 )
6187 }
6188
6189 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6190 let id = self.id;
6191 self.send_job(None, move |repo, _cx| async move {
6192 match repo {
6193 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6194 backend.check_for_pushed_commit().await
6195 }
6196 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6197 let response = client
6198 .request(proto::CheckForPushedCommits {
6199 project_id: project_id.0,
6200 repository_id: id.to_proto(),
6201 })
6202 .await?;
6203
6204 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6205
6206 Ok(branches)
6207 }
6208 }
6209 })
6210 }
6211
6212 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6213 self.send_job(None, |repo, _cx| async move {
6214 match repo {
6215 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6216 backend.checkpoint().await
6217 }
6218 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6219 }
6220 })
6221 }
6222
6223 pub fn restore_checkpoint(
6224 &mut self,
6225 checkpoint: GitRepositoryCheckpoint,
6226 ) -> oneshot::Receiver<Result<()>> {
6227 self.send_job(None, move |repo, _cx| async move {
6228 match repo {
6229 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6230 backend.restore_checkpoint(checkpoint).await
6231 }
6232 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6233 }
6234 })
6235 }
6236
6237 pub(crate) fn apply_remote_update(
6238 &mut self,
6239 update: proto::UpdateRepository,
6240 cx: &mut Context<Self>,
6241 ) -> Result<()> {
6242 if let Some(main_path) = &update.original_repo_abs_path {
6243 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6244 }
6245
6246 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6247 let new_head_commit = update
6248 .head_commit_details
6249 .as_ref()
6250 .map(proto_to_commit_details);
6251 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6252 cx.emit(RepositoryEvent::BranchChanged)
6253 }
6254 self.snapshot.branch = new_branch;
6255 self.snapshot.head_commit = new_head_commit;
6256
6257 // We don't store any merge head state for downstream projects; the upstream
6258 // will track it and we will just get the updated conflicts
6259 let new_merge_heads = TreeMap::from_ordered_entries(
6260 update
6261 .current_merge_conflicts
6262 .into_iter()
6263 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6264 );
6265 let conflicts_changed =
6266 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6267 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6268 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6269 let new_stash_entries = GitStash {
6270 entries: update
6271 .stash_entries
6272 .iter()
6273 .filter_map(|entry| proto_to_stash(entry).ok())
6274 .collect(),
6275 };
6276 if self.snapshot.stash_entries != new_stash_entries {
6277 cx.emit(RepositoryEvent::StashEntriesChanged)
6278 }
6279 self.snapshot.stash_entries = new_stash_entries;
6280 let new_linked_worktrees: Arc<[GitWorktree]> = update
6281 .linked_worktrees
6282 .iter()
6283 .map(proto_to_worktree)
6284 .collect();
6285 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6286 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6287 }
6288 self.snapshot.linked_worktrees = new_linked_worktrees;
6289 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6290 self.snapshot.remote_origin_url = update.remote_origin_url;
6291
6292 let edits = update
6293 .removed_statuses
6294 .into_iter()
6295 .filter_map(|path| {
6296 Some(sum_tree::Edit::Remove(PathKey(
6297 RelPath::from_proto(&path).log_err()?,
6298 )))
6299 })
6300 .chain(
6301 update
6302 .updated_statuses
6303 .into_iter()
6304 .filter_map(|updated_status| {
6305 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6306 }),
6307 )
6308 .collect::<Vec<_>>();
6309 if conflicts_changed || !edits.is_empty() {
6310 cx.emit(RepositoryEvent::StatusesChanged);
6311 }
6312 self.snapshot.statuses_by_path.edit(edits, ());
6313
6314 if update.is_last_update {
6315 self.snapshot.scan_id = update.scan_id;
6316 }
6317 self.clear_pending_ops(cx);
6318 Ok(())
6319 }
6320
6321 pub fn compare_checkpoints(
6322 &mut self,
6323 left: GitRepositoryCheckpoint,
6324 right: GitRepositoryCheckpoint,
6325 ) -> oneshot::Receiver<Result<bool>> {
6326 self.send_job(None, move |repo, _cx| async move {
6327 match repo {
6328 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6329 backend.compare_checkpoints(left, right).await
6330 }
6331 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6332 }
6333 })
6334 }
6335
6336 pub fn diff_checkpoints(
6337 &mut self,
6338 base_checkpoint: GitRepositoryCheckpoint,
6339 target_checkpoint: GitRepositoryCheckpoint,
6340 ) -> oneshot::Receiver<Result<String>> {
6341 self.send_job(None, move |repo, _cx| async move {
6342 match repo {
6343 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6344 backend
6345 .diff_checkpoints(base_checkpoint, target_checkpoint)
6346 .await
6347 }
6348 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6349 }
6350 })
6351 }
6352
6353 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6354 let updated = SumTree::from_iter(
6355 self.pending_ops.iter().filter_map(|ops| {
6356 let inner_ops: Vec<PendingOp> =
6357 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6358 if inner_ops.is_empty() {
6359 None
6360 } else {
6361 Some(PendingOps {
6362 repo_path: ops.repo_path.clone(),
6363 ops: inner_ops,
6364 })
6365 }
6366 }),
6367 (),
6368 );
6369
6370 if updated != self.pending_ops {
6371 cx.emit(RepositoryEvent::PendingOpsChanged {
6372 pending_ops: self.pending_ops.clone(),
6373 })
6374 }
6375
6376 self.pending_ops = updated;
6377 }
6378
6379 fn schedule_scan(
6380 &mut self,
6381 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6382 cx: &mut Context<Self>,
6383 ) {
6384 let this = cx.weak_entity();
6385 let _ = self.send_keyed_job(
6386 Some(GitJobKey::ReloadGitState),
6387 None,
6388 |state, mut cx| async move {
6389 log::debug!("run scheduled git status scan");
6390
6391 let Some(this) = this.upgrade() else {
6392 return Ok(());
6393 };
6394 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6395 bail!("not a local repository")
6396 };
6397 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6398 this.update(&mut cx, |this, cx| {
6399 this.clear_pending_ops(cx);
6400 });
6401 if let Some(updates_tx) = updates_tx {
6402 updates_tx
6403 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6404 .ok();
6405 }
6406 Ok(())
6407 },
6408 );
6409 }
6410
6411 fn spawn_local_git_worker(
6412 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6413 cx: &mut Context<Self>,
6414 ) -> mpsc::UnboundedSender<GitJob> {
6415 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6416
6417 cx.spawn(async move |_, cx| {
6418 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6419 if let Some(git_hosting_provider_registry) =
6420 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6421 {
6422 git_hosting_providers::register_additional_providers(
6423 git_hosting_provider_registry,
6424 state.backend.clone(),
6425 )
6426 .await;
6427 }
6428 let state = RepositoryState::Local(state);
6429 let mut jobs = VecDeque::new();
6430 loop {
6431 while let Ok(Some(next_job)) = job_rx.try_next() {
6432 jobs.push_back(next_job);
6433 }
6434
6435 if let Some(job) = jobs.pop_front() {
6436 if let Some(current_key) = &job.key
6437 && jobs
6438 .iter()
6439 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6440 {
6441 continue;
6442 }
6443 (job.job)(state.clone(), cx).await;
6444 } else if let Some(job) = job_rx.next().await {
6445 jobs.push_back(job);
6446 } else {
6447 break;
6448 }
6449 }
6450 anyhow::Ok(())
6451 })
6452 .detach_and_log_err(cx);
6453
6454 job_tx
6455 }
6456
6457 fn spawn_remote_git_worker(
6458 state: RemoteRepositoryState,
6459 cx: &mut Context<Self>,
6460 ) -> mpsc::UnboundedSender<GitJob> {
6461 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6462
6463 cx.spawn(async move |_, cx| {
6464 let state = RepositoryState::Remote(state);
6465 let mut jobs = VecDeque::new();
6466 loop {
6467 while let Ok(Some(next_job)) = job_rx.try_next() {
6468 jobs.push_back(next_job);
6469 }
6470
6471 if let Some(job) = jobs.pop_front() {
6472 if let Some(current_key) = &job.key
6473 && jobs
6474 .iter()
6475 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6476 {
6477 continue;
6478 }
6479 (job.job)(state.clone(), cx).await;
6480 } else if let Some(job) = job_rx.next().await {
6481 jobs.push_back(job);
6482 } else {
6483 break;
6484 }
6485 }
6486 anyhow::Ok(())
6487 })
6488 .detach_and_log_err(cx);
6489
6490 job_tx
6491 }
6492
6493 fn load_staged_text(
6494 &mut self,
6495 buffer_id: BufferId,
6496 repo_path: RepoPath,
6497 cx: &App,
6498 ) -> Task<Result<Option<String>>> {
6499 let rx = self.send_job(None, move |state, _| async move {
6500 match state {
6501 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6502 anyhow::Ok(backend.load_index_text(repo_path).await)
6503 }
6504 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6505 let response = client
6506 .request(proto::OpenUnstagedDiff {
6507 project_id: project_id.to_proto(),
6508 buffer_id: buffer_id.to_proto(),
6509 })
6510 .await?;
6511 Ok(response.staged_text)
6512 }
6513 }
6514 });
6515 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6516 }
6517
6518 fn load_committed_text(
6519 &mut self,
6520 buffer_id: BufferId,
6521 repo_path: RepoPath,
6522 cx: &App,
6523 ) -> Task<Result<DiffBasesChange>> {
6524 let rx = self.send_job(None, move |state, _| async move {
6525 match state {
6526 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6527 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6528 let staged_text = backend.load_index_text(repo_path).await;
6529 let diff_bases_change = if committed_text == staged_text {
6530 DiffBasesChange::SetBoth(committed_text)
6531 } else {
6532 DiffBasesChange::SetEach {
6533 index: staged_text,
6534 head: committed_text,
6535 }
6536 };
6537 anyhow::Ok(diff_bases_change)
6538 }
6539 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6540 use proto::open_uncommitted_diff_response::Mode;
6541
6542 let response = client
6543 .request(proto::OpenUncommittedDiff {
6544 project_id: project_id.to_proto(),
6545 buffer_id: buffer_id.to_proto(),
6546 })
6547 .await?;
6548 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6549 let bases = match mode {
6550 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6551 Mode::IndexAndHead => DiffBasesChange::SetEach {
6552 head: response.committed_text,
6553 index: response.staged_text,
6554 },
6555 };
6556 Ok(bases)
6557 }
6558 }
6559 });
6560
6561 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6562 }
6563
6564 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6565 let repository_id = self.snapshot.id;
6566 let rx = self.send_job(None, move |state, _| async move {
6567 match state {
6568 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6569 backend.load_blob_content(oid).await
6570 }
6571 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6572 let response = client
6573 .request(proto::GetBlobContent {
6574 project_id: project_id.to_proto(),
6575 repository_id: repository_id.0,
6576 oid: oid.to_string(),
6577 })
6578 .await?;
6579 Ok(response.content)
6580 }
6581 }
6582 });
6583 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6584 }
6585
6586 fn paths_changed(
6587 &mut self,
6588 paths: Vec<RepoPath>,
6589 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6590 cx: &mut Context<Self>,
6591 ) {
6592 if !paths.is_empty() {
6593 self.paths_needing_status_update.push(paths);
6594 }
6595
6596 let this = cx.weak_entity();
6597 let _ = self.send_keyed_job(
6598 Some(GitJobKey::RefreshStatuses),
6599 None,
6600 |state, mut cx| async move {
6601 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6602 (
6603 this.snapshot.clone(),
6604 mem::take(&mut this.paths_needing_status_update),
6605 )
6606 })?;
6607 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6608 bail!("not a local repository")
6609 };
6610
6611 if changed_paths.is_empty() {
6612 return Ok(());
6613 }
6614
6615 let has_head = prev_snapshot.head_commit.is_some();
6616
6617 let stash_entries = backend.stash_entries().await?;
6618 let changed_path_statuses = cx
6619 .background_spawn(async move {
6620 let mut changed_paths =
6621 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6622 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6623
6624 let status_task = backend.status(&changed_paths_vec);
6625 let diff_stat_future = if has_head {
6626 backend.diff_stat(&changed_paths_vec)
6627 } else {
6628 future::ready(Ok(status::GitDiffStat {
6629 entries: Arc::default(),
6630 }))
6631 .boxed()
6632 };
6633
6634 let (statuses, diff_stats) =
6635 futures::future::try_join(status_task, diff_stat_future).await?;
6636
6637 let diff_stats: HashMap<RepoPath, DiffStat> =
6638 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6639
6640 let mut changed_path_statuses = Vec::new();
6641 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6642 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6643
6644 for (repo_path, status) in &*statuses.entries {
6645 let current_diff_stat = diff_stats.get(repo_path).copied();
6646
6647 changed_paths.remove(repo_path);
6648 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6649 && cursor.item().is_some_and(|entry| {
6650 entry.status == *status && entry.diff_stat == current_diff_stat
6651 })
6652 {
6653 continue;
6654 }
6655
6656 changed_path_statuses.push(Edit::Insert(StatusEntry {
6657 repo_path: repo_path.clone(),
6658 status: *status,
6659 diff_stat: current_diff_stat,
6660 }));
6661 }
6662 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6663 for path in changed_paths.into_iter() {
6664 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6665 changed_path_statuses
6666 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6667 }
6668 }
6669 anyhow::Ok(changed_path_statuses)
6670 })
6671 .await?;
6672
6673 this.update(&mut cx, |this, cx| {
6674 if this.snapshot.stash_entries != stash_entries {
6675 cx.emit(RepositoryEvent::StashEntriesChanged);
6676 this.snapshot.stash_entries = stash_entries;
6677 }
6678
6679 if !changed_path_statuses.is_empty() {
6680 cx.emit(RepositoryEvent::StatusesChanged);
6681 this.snapshot
6682 .statuses_by_path
6683 .edit(changed_path_statuses, ());
6684 this.snapshot.scan_id += 1;
6685 }
6686
6687 if let Some(updates_tx) = updates_tx {
6688 updates_tx
6689 .unbounded_send(DownstreamUpdate::UpdateRepository(
6690 this.snapshot.clone(),
6691 ))
6692 .ok();
6693 }
6694 })
6695 },
6696 );
6697 }
6698
6699 /// currently running git command and when it started
6700 pub fn current_job(&self) -> Option<JobInfo> {
6701 self.active_jobs.values().next().cloned()
6702 }
6703
6704 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6705 self.send_job(None, |_, _| async {})
6706 }
6707
6708 fn spawn_job_with_tracking<AsyncFn>(
6709 &mut self,
6710 paths: Vec<RepoPath>,
6711 git_status: pending_op::GitStatus,
6712 cx: &mut Context<Self>,
6713 f: AsyncFn,
6714 ) -> Task<Result<()>>
6715 where
6716 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6717 {
6718 let ids = self.new_pending_ops_for_paths(paths, git_status);
6719
6720 cx.spawn(async move |this, cx| {
6721 let (job_status, result) = match f(this.clone(), cx).await {
6722 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6723 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6724 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6725 };
6726
6727 this.update(cx, |this, _| {
6728 let mut edits = Vec::with_capacity(ids.len());
6729 for (id, entry) in ids {
6730 if let Some(mut ops) = this
6731 .pending_ops
6732 .get(&PathKey(entry.as_ref().clone()), ())
6733 .cloned()
6734 {
6735 if let Some(op) = ops.op_by_id_mut(id) {
6736 op.job_status = job_status;
6737 }
6738 edits.push(sum_tree::Edit::Insert(ops));
6739 }
6740 }
6741 this.pending_ops.edit(edits, ());
6742 })?;
6743
6744 result
6745 })
6746 }
6747
6748 fn new_pending_ops_for_paths(
6749 &mut self,
6750 paths: Vec<RepoPath>,
6751 git_status: pending_op::GitStatus,
6752 ) -> Vec<(PendingOpId, RepoPath)> {
6753 let mut edits = Vec::with_capacity(paths.len());
6754 let mut ids = Vec::with_capacity(paths.len());
6755 for path in paths {
6756 let mut ops = self
6757 .pending_ops
6758 .get(&PathKey(path.as_ref().clone()), ())
6759 .cloned()
6760 .unwrap_or_else(|| PendingOps::new(&path));
6761 let id = ops.max_id() + 1;
6762 ops.ops.push(PendingOp {
6763 id,
6764 git_status,
6765 job_status: pending_op::JobStatus::Running,
6766 });
6767 edits.push(sum_tree::Edit::Insert(ops));
6768 ids.push((id, path));
6769 }
6770 self.pending_ops.edit(edits, ());
6771 ids
6772 }
6773 pub fn default_remote_url(&self) -> Option<String> {
6774 self.remote_upstream_url
6775 .clone()
6776 .or(self.remote_origin_url.clone())
6777 }
6778}
6779
6780/// If `path` is a git linked worktree checkout, resolves it to the main
6781/// repository's working directory path. Returns `None` if `path` is a normal
6782/// repository, not a git repo, or if resolution fails.
6783///
6784/// Resolution works by:
6785/// 1. Reading the `.git` file to get the `gitdir:` pointer
6786/// 2. Following that to the worktree-specific git directory
6787/// 3. Reading the `commondir` file to find the shared `.git` directory
6788/// 4. Deriving the main repo's working directory from the common dir
6789pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6790 let dot_git = path.join(".git");
6791 let metadata = fs.metadata(&dot_git).await.ok()??;
6792 if metadata.is_dir {
6793 return None; // Normal repo, not a linked worktree
6794 }
6795 // It's a .git file — parse the gitdir: pointer
6796 let content = fs.load(&dot_git).await.ok()?;
6797 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6798 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6799 // Read commondir to find the main .git directory
6800 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6801 let common_dir = fs
6802 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6803 .await
6804 .ok()?;
6805 Some(git::repository::original_repo_path_from_common_dir(
6806 &common_dir,
6807 ))
6808}
6809
6810/// Validates that the resolved worktree directory is acceptable:
6811/// - The setting must not be an absolute path.
6812/// - The resolved path must be either a subdirectory of the working
6813/// directory or a subdirectory of its parent (i.e., a sibling).
6814///
6815/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6816pub fn worktrees_directory_for_repo(
6817 original_repo_abs_path: &Path,
6818 worktree_directory_setting: &str,
6819) -> Result<PathBuf> {
6820 // Check the original setting before trimming, since a path like "///"
6821 // is absolute but becomes "" after stripping trailing separators.
6822 // Also check for leading `/` or `\` explicitly, because on Windows
6823 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6824 // would slip through even though it's clearly not a relative path.
6825 if Path::new(worktree_directory_setting).is_absolute()
6826 || worktree_directory_setting.starts_with('/')
6827 || worktree_directory_setting.starts_with('\\')
6828 {
6829 anyhow::bail!(
6830 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6831 );
6832 }
6833
6834 if worktree_directory_setting.is_empty() {
6835 anyhow::bail!("git.worktree_directory must not be empty");
6836 }
6837
6838 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6839 if trimmed == ".." {
6840 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6841 }
6842
6843 let joined = original_repo_abs_path.join(trimmed);
6844 let resolved = util::normalize_path(&joined);
6845 let resolved = if resolved.starts_with(original_repo_abs_path) {
6846 resolved
6847 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
6848 resolved.join(repo_dir_name)
6849 } else {
6850 resolved
6851 };
6852
6853 let parent = original_repo_abs_path
6854 .parent()
6855 .unwrap_or(original_repo_abs_path);
6856
6857 if !resolved.starts_with(parent) {
6858 anyhow::bail!(
6859 "git.worktree_directory resolved to {resolved:?}, which is outside \
6860 the project root and its parent directory. It must resolve to a \
6861 subdirectory of {original_repo_abs_path:?} or a sibling of it."
6862 );
6863 }
6864
6865 Ok(resolved)
6866}
6867
6868/// Returns a short name for a linked worktree suitable for UI display
6869///
6870/// Uses the main worktree path to come up with a short name that disambiguates
6871/// the linked worktree from the main worktree.
6872pub fn linked_worktree_short_name(
6873 main_worktree_path: &Path,
6874 linked_worktree_path: &Path,
6875) -> Option<SharedString> {
6876 if main_worktree_path == linked_worktree_path {
6877 return None;
6878 }
6879
6880 let project_name = main_worktree_path.file_name()?.to_str()?;
6881 let directory_name = linked_worktree_path.file_name()?.to_str()?;
6882 let name = if directory_name != project_name {
6883 directory_name.to_string()
6884 } else {
6885 linked_worktree_path
6886 .parent()?
6887 .file_name()?
6888 .to_str()?
6889 .to_string()
6890 };
6891 Some(name.into())
6892}
6893
6894fn get_permalink_in_rust_registry_src(
6895 provider_registry: Arc<GitHostingProviderRegistry>,
6896 path: PathBuf,
6897 selection: Range<u32>,
6898) -> Result<url::Url> {
6899 #[derive(Deserialize)]
6900 struct CargoVcsGit {
6901 sha1: String,
6902 }
6903
6904 #[derive(Deserialize)]
6905 struct CargoVcsInfo {
6906 git: CargoVcsGit,
6907 path_in_vcs: String,
6908 }
6909
6910 #[derive(Deserialize)]
6911 struct CargoPackage {
6912 repository: String,
6913 }
6914
6915 #[derive(Deserialize)]
6916 struct CargoToml {
6917 package: CargoPackage,
6918 }
6919
6920 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6921 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6922 Some((dir, json))
6923 }) else {
6924 bail!("No .cargo_vcs_info.json found in parent directories")
6925 };
6926 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6927 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6928 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6929 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6930 .context("parsing package.repository field of manifest")?;
6931 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6932 let permalink = provider.build_permalink(
6933 remote,
6934 BuildPermalinkParams::new(
6935 &cargo_vcs_info.git.sha1,
6936 &RepoPath::from_rel_path(
6937 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6938 ),
6939 Some(selection),
6940 ),
6941 );
6942 Ok(permalink)
6943}
6944
6945fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6946 let Some(blame) = blame else {
6947 return proto::BlameBufferResponse {
6948 blame_response: None,
6949 };
6950 };
6951
6952 let entries = blame
6953 .entries
6954 .into_iter()
6955 .map(|entry| proto::BlameEntry {
6956 sha: entry.sha.as_bytes().into(),
6957 start_line: entry.range.start,
6958 end_line: entry.range.end,
6959 original_line_number: entry.original_line_number,
6960 author: entry.author,
6961 author_mail: entry.author_mail,
6962 author_time: entry.author_time,
6963 author_tz: entry.author_tz,
6964 committer: entry.committer_name,
6965 committer_mail: entry.committer_email,
6966 committer_time: entry.committer_time,
6967 committer_tz: entry.committer_tz,
6968 summary: entry.summary,
6969 previous: entry.previous,
6970 filename: entry.filename,
6971 })
6972 .collect::<Vec<_>>();
6973
6974 let messages = blame
6975 .messages
6976 .into_iter()
6977 .map(|(oid, message)| proto::CommitMessage {
6978 oid: oid.as_bytes().into(),
6979 message,
6980 })
6981 .collect::<Vec<_>>();
6982
6983 proto::BlameBufferResponse {
6984 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6985 }
6986}
6987
6988fn deserialize_blame_buffer_response(
6989 response: proto::BlameBufferResponse,
6990) -> Option<git::blame::Blame> {
6991 let response = response.blame_response?;
6992 let entries = response
6993 .entries
6994 .into_iter()
6995 .filter_map(|entry| {
6996 Some(git::blame::BlameEntry {
6997 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6998 range: entry.start_line..entry.end_line,
6999 original_line_number: entry.original_line_number,
7000 committer_name: entry.committer,
7001 committer_time: entry.committer_time,
7002 committer_tz: entry.committer_tz,
7003 committer_email: entry.committer_mail,
7004 author: entry.author,
7005 author_mail: entry.author_mail,
7006 author_time: entry.author_time,
7007 author_tz: entry.author_tz,
7008 summary: entry.summary,
7009 previous: entry.previous,
7010 filename: entry.filename,
7011 })
7012 })
7013 .collect::<Vec<_>>();
7014
7015 let messages = response
7016 .messages
7017 .into_iter()
7018 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7019 .collect::<HashMap<_, _>>();
7020
7021 Some(Blame { entries, messages })
7022}
7023
7024fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7025 proto::Branch {
7026 is_head: branch.is_head,
7027 ref_name: branch.ref_name.to_string(),
7028 unix_timestamp: branch
7029 .most_recent_commit
7030 .as_ref()
7031 .map(|commit| commit.commit_timestamp as u64),
7032 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7033 ref_name: upstream.ref_name.to_string(),
7034 tracking: upstream
7035 .tracking
7036 .status()
7037 .map(|upstream| proto::UpstreamTracking {
7038 ahead: upstream.ahead as u64,
7039 behind: upstream.behind as u64,
7040 }),
7041 }),
7042 most_recent_commit: branch
7043 .most_recent_commit
7044 .as_ref()
7045 .map(|commit| proto::CommitSummary {
7046 sha: commit.sha.to_string(),
7047 subject: commit.subject.to_string(),
7048 commit_timestamp: commit.commit_timestamp,
7049 author_name: commit.author_name.to_string(),
7050 }),
7051 }
7052}
7053
7054fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7055 proto::Worktree {
7056 path: worktree.path.to_string_lossy().to_string(),
7057 ref_name: worktree
7058 .ref_name
7059 .as_ref()
7060 .map(|s| s.to_string())
7061 .unwrap_or_default(),
7062 sha: worktree.sha.to_string(),
7063 is_main: worktree.is_main,
7064 }
7065}
7066
7067fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7068 git::repository::Worktree {
7069 path: PathBuf::from(proto.path.clone()),
7070 ref_name: Some(SharedString::from(&proto.ref_name)),
7071 sha: proto.sha.clone().into(),
7072 is_main: proto.is_main,
7073 }
7074}
7075
7076fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7077 git::repository::Branch {
7078 is_head: proto.is_head,
7079 ref_name: proto.ref_name.clone().into(),
7080 upstream: proto
7081 .upstream
7082 .as_ref()
7083 .map(|upstream| git::repository::Upstream {
7084 ref_name: upstream.ref_name.to_string().into(),
7085 tracking: upstream
7086 .tracking
7087 .as_ref()
7088 .map(|tracking| {
7089 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7090 ahead: tracking.ahead as u32,
7091 behind: tracking.behind as u32,
7092 })
7093 })
7094 .unwrap_or(git::repository::UpstreamTracking::Gone),
7095 }),
7096 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7097 git::repository::CommitSummary {
7098 sha: commit.sha.to_string().into(),
7099 subject: commit.subject.to_string().into(),
7100 commit_timestamp: commit.commit_timestamp,
7101 author_name: commit.author_name.to_string().into(),
7102 has_parent: true,
7103 }
7104 }),
7105 }
7106}
7107
7108fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7109 proto::GitCommitDetails {
7110 sha: commit.sha.to_string(),
7111 message: commit.message.to_string(),
7112 commit_timestamp: commit.commit_timestamp,
7113 author_email: commit.author_email.to_string(),
7114 author_name: commit.author_name.to_string(),
7115 }
7116}
7117
7118fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7119 CommitDetails {
7120 sha: proto.sha.clone().into(),
7121 message: proto.message.clone().into(),
7122 commit_timestamp: proto.commit_timestamp,
7123 author_email: proto.author_email.clone().into(),
7124 author_name: proto.author_name.clone().into(),
7125 }
7126}
7127
7128/// This snapshot computes the repository state on the foreground thread while
7129/// running the git commands on the background thread. We update branch, head,
7130/// remotes, and worktrees first so the UI can react sooner, then compute file
7131/// state and emit those events immediately after.
7132async fn compute_snapshot(
7133 this: Entity<Repository>,
7134 backend: Arc<dyn GitRepository>,
7135 cx: &mut AsyncApp,
7136) -> Result<RepositorySnapshot> {
7137 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7138 this.paths_needing_status_update.clear();
7139 (
7140 this.id,
7141 this.work_directory_abs_path.clone(),
7142 this.snapshot.clone(),
7143 )
7144 });
7145
7146 let head_commit_future = {
7147 let backend = backend.clone();
7148 async move {
7149 Ok(match backend.head_sha().await {
7150 Some(head_sha) => backend.show(head_sha).await.log_err(),
7151 None => None,
7152 })
7153 }
7154 };
7155 let (branches, head_commit, all_worktrees) = cx
7156 .background_spawn({
7157 let backend = backend.clone();
7158 async move {
7159 futures::future::try_join3(
7160 backend.branches(),
7161 head_commit_future,
7162 backend.worktrees(),
7163 )
7164 .await
7165 }
7166 })
7167 .await?;
7168 let branch = branches.into_iter().find(|branch| branch.is_head);
7169
7170 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7171 .into_iter()
7172 .filter(|wt| wt.path != *work_directory_abs_path)
7173 .collect();
7174
7175 let (remote_origin_url, remote_upstream_url) = cx
7176 .background_spawn({
7177 let backend = backend.clone();
7178 async move {
7179 Ok::<_, anyhow::Error>(
7180 futures::future::join(
7181 backend.remote_url("origin"),
7182 backend.remote_url("upstream"),
7183 )
7184 .await,
7185 )
7186 }
7187 })
7188 .await?;
7189
7190 let snapshot = this.update(cx, |this, cx| {
7191 let branch_changed =
7192 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7193 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7194
7195 this.snapshot = RepositorySnapshot {
7196 id,
7197 work_directory_abs_path,
7198 branch,
7199 head_commit,
7200 remote_origin_url,
7201 remote_upstream_url,
7202 linked_worktrees,
7203 scan_id: prev_snapshot.scan_id + 1,
7204 ..prev_snapshot
7205 };
7206
7207 if branch_changed {
7208 cx.emit(RepositoryEvent::BranchChanged);
7209 }
7210
7211 if worktrees_changed {
7212 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7213 }
7214
7215 this.snapshot.clone()
7216 });
7217
7218 let (statuses, diff_stats, stash_entries) = cx
7219 .background_spawn({
7220 let backend = backend.clone();
7221 let snapshot = snapshot.clone();
7222 async move {
7223 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7224 if snapshot.head_commit.is_some() {
7225 backend.diff_stat(&[])
7226 } else {
7227 future::ready(Ok(status::GitDiffStat {
7228 entries: Arc::default(),
7229 }))
7230 .boxed()
7231 };
7232 futures::future::try_join3(
7233 backend.status(&[RepoPath::from_rel_path(
7234 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7235 )]),
7236 diff_stat_future,
7237 backend.stash_entries(),
7238 )
7239 .await
7240 }
7241 })
7242 .await?;
7243
7244 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7245 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7246 let mut conflicted_paths = Vec::new();
7247 let statuses_by_path = SumTree::from_iter(
7248 statuses.entries.iter().map(|(repo_path, status)| {
7249 if status.is_conflicted() {
7250 conflicted_paths.push(repo_path.clone());
7251 }
7252 StatusEntry {
7253 repo_path: repo_path.clone(),
7254 status: *status,
7255 diff_stat: diff_stat_map.get(repo_path).copied(),
7256 }
7257 }),
7258 (),
7259 );
7260
7261 let merge_details = cx
7262 .background_spawn({
7263 let backend = backend.clone();
7264 let mut merge_details = snapshot.merge.clone();
7265 async move {
7266 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7267 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7268 }
7269 })
7270 .await?;
7271 let (merge_details, conflicts_changed) = merge_details;
7272 log::debug!("new merge details: {merge_details:?}");
7273
7274 Ok(this.update(cx, |this, cx| {
7275 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7276 cx.emit(RepositoryEvent::StatusesChanged);
7277 }
7278 if stash_entries != this.snapshot.stash_entries {
7279 cx.emit(RepositoryEvent::StashEntriesChanged);
7280 }
7281
7282 this.snapshot.scan_id += 1;
7283 this.snapshot.merge = merge_details;
7284 this.snapshot.statuses_by_path = statuses_by_path;
7285 this.snapshot.stash_entries = stash_entries;
7286
7287 this.snapshot.clone()
7288 }))
7289}
7290
7291fn status_from_proto(
7292 simple_status: i32,
7293 status: Option<proto::GitFileStatus>,
7294) -> anyhow::Result<FileStatus> {
7295 use proto::git_file_status::Variant;
7296
7297 let Some(variant) = status.and_then(|status| status.variant) else {
7298 let code = proto::GitStatus::from_i32(simple_status)
7299 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7300 let result = match code {
7301 proto::GitStatus::Added => TrackedStatus {
7302 worktree_status: StatusCode::Added,
7303 index_status: StatusCode::Unmodified,
7304 }
7305 .into(),
7306 proto::GitStatus::Modified => TrackedStatus {
7307 worktree_status: StatusCode::Modified,
7308 index_status: StatusCode::Unmodified,
7309 }
7310 .into(),
7311 proto::GitStatus::Conflict => UnmergedStatus {
7312 first_head: UnmergedStatusCode::Updated,
7313 second_head: UnmergedStatusCode::Updated,
7314 }
7315 .into(),
7316 proto::GitStatus::Deleted => TrackedStatus {
7317 worktree_status: StatusCode::Deleted,
7318 index_status: StatusCode::Unmodified,
7319 }
7320 .into(),
7321 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7322 };
7323 return Ok(result);
7324 };
7325
7326 let result = match variant {
7327 Variant::Untracked(_) => FileStatus::Untracked,
7328 Variant::Ignored(_) => FileStatus::Ignored,
7329 Variant::Unmerged(unmerged) => {
7330 let [first_head, second_head] =
7331 [unmerged.first_head, unmerged.second_head].map(|head| {
7332 let code = proto::GitStatus::from_i32(head)
7333 .with_context(|| format!("Invalid git status code: {head}"))?;
7334 let result = match code {
7335 proto::GitStatus::Added => UnmergedStatusCode::Added,
7336 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7337 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7338 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7339 };
7340 Ok(result)
7341 });
7342 let [first_head, second_head] = [first_head?, second_head?];
7343 UnmergedStatus {
7344 first_head,
7345 second_head,
7346 }
7347 .into()
7348 }
7349 Variant::Tracked(tracked) => {
7350 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7351 .map(|status| {
7352 let code = proto::GitStatus::from_i32(status)
7353 .with_context(|| format!("Invalid git status code: {status}"))?;
7354 let result = match code {
7355 proto::GitStatus::Modified => StatusCode::Modified,
7356 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7357 proto::GitStatus::Added => StatusCode::Added,
7358 proto::GitStatus::Deleted => StatusCode::Deleted,
7359 proto::GitStatus::Renamed => StatusCode::Renamed,
7360 proto::GitStatus::Copied => StatusCode::Copied,
7361 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7362 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7363 };
7364 Ok(result)
7365 });
7366 let [index_status, worktree_status] = [index_status?, worktree_status?];
7367 TrackedStatus {
7368 index_status,
7369 worktree_status,
7370 }
7371 .into()
7372 }
7373 };
7374 Ok(result)
7375}
7376
7377fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7378 use proto::git_file_status::{Tracked, Unmerged, Variant};
7379
7380 let variant = match status {
7381 FileStatus::Untracked => Variant::Untracked(Default::default()),
7382 FileStatus::Ignored => Variant::Ignored(Default::default()),
7383 FileStatus::Unmerged(UnmergedStatus {
7384 first_head,
7385 second_head,
7386 }) => Variant::Unmerged(Unmerged {
7387 first_head: unmerged_status_to_proto(first_head),
7388 second_head: unmerged_status_to_proto(second_head),
7389 }),
7390 FileStatus::Tracked(TrackedStatus {
7391 index_status,
7392 worktree_status,
7393 }) => Variant::Tracked(Tracked {
7394 index_status: tracked_status_to_proto(index_status),
7395 worktree_status: tracked_status_to_proto(worktree_status),
7396 }),
7397 };
7398 proto::GitFileStatus {
7399 variant: Some(variant),
7400 }
7401}
7402
7403fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7404 match code {
7405 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7406 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7407 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7408 }
7409}
7410
7411fn tracked_status_to_proto(code: StatusCode) -> i32 {
7412 match code {
7413 StatusCode::Added => proto::GitStatus::Added as _,
7414 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7415 StatusCode::Modified => proto::GitStatus::Modified as _,
7416 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7417 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7418 StatusCode::Copied => proto::GitStatus::Copied as _,
7419 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7420 }
7421}