1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let unstaged_diff = this
825 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
826 .await?;
827 buffer_diff.update(cx, |buffer_diff, _| {
828 buffer_diff.set_secondary_diff(unstaged_diff);
829 });
830
831 this.update(cx, |this, cx| {
832 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
833 .detach();
834
835 this.loading_diffs.remove(&(buffer_id, diff_kind));
836
837 let git_store = cx.weak_entity();
838 let diff_state = this
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842
843 diff_state.update(cx, |state, _| {
844 if let Some(oid) = oid {
845 if let Some(content) = content {
846 state.oid_texts.insert(oid, content);
847 }
848 }
849 state.oid_diffs.insert(oid, buffer_diff.downgrade());
850 });
851 })?;
852
853 Ok(buffer_diff)
854 }
855 .await;
856 result.map_err(Arc::new)
857 })
858 .shared();
859
860 self.loading_diffs
861 .insert((buffer_id, diff_kind), task.clone());
862 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
863 }
864
865 #[ztracing::instrument(skip_all)]
866 pub fn open_uncommitted_diff(
867 &mut self,
868 buffer: Entity<Buffer>,
869 cx: &mut Context<Self>,
870 ) -> Task<Result<Entity<BufferDiff>>> {
871 let buffer_id = buffer.read(cx).remote_id();
872
873 if let Some(diff_state) = self.diffs.get(&buffer_id)
874 && let Some(uncommitted_diff) = diff_state
875 .read(cx)
876 .uncommitted_diff
877 .as_ref()
878 .and_then(|weak| weak.upgrade())
879 {
880 if let Some(task) =
881 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
882 {
883 return cx.background_executor().spawn(async move {
884 task.await;
885 Ok(uncommitted_diff)
886 });
887 }
888 return Task::ready(Ok(uncommitted_diff));
889 }
890
891 let Some((repo, repo_path)) =
892 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
893 else {
894 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
895 };
896
897 let task = self
898 .loading_diffs
899 .entry((buffer_id, DiffKind::Uncommitted))
900 .or_insert_with(|| {
901 let changes = repo.update(cx, |repo, cx| {
902 repo.load_committed_text(buffer_id, repo_path, cx)
903 });
904
905 // todo(lw): hot foreground spawn
906 cx.spawn(async move |this, cx| {
907 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
908 .await
909 .map_err(Arc::new)
910 })
911 .shared()
912 })
913 .clone();
914
915 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
916 }
917
918 #[ztracing::instrument(skip_all)]
919 async fn open_diff_internal(
920 this: WeakEntity<Self>,
921 kind: DiffKind,
922 texts: Result<DiffBasesChange>,
923 buffer_entity: Entity<Buffer>,
924 cx: &mut AsyncApp,
925 ) -> Result<Entity<BufferDiff>> {
926 let diff_bases_change = match texts {
927 Err(e) => {
928 this.update(cx, |this, cx| {
929 let buffer = buffer_entity.read(cx);
930 let buffer_id = buffer.remote_id();
931 this.loading_diffs.remove(&(buffer_id, kind));
932 })?;
933 return Err(e);
934 }
935 Ok(change) => change,
936 };
937
938 this.update(cx, |this, cx| {
939 let buffer = buffer_entity.read(cx);
940 let buffer_id = buffer.remote_id();
941 let language = buffer.language().cloned();
942 let language_registry = buffer.language_registry();
943 let text_snapshot = buffer.text_snapshot();
944 this.loading_diffs.remove(&(buffer_id, kind));
945
946 let git_store = cx.weak_entity();
947 let diff_state = this
948 .diffs
949 .entry(buffer_id)
950 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
951
952 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953
954 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
955 diff_state.update(cx, |diff_state, cx| {
956 diff_state.language_changed = true;
957 diff_state.language = language;
958 diff_state.language_registry = language_registry;
959
960 match kind {
961 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
962 DiffKind::Uncommitted => {
963 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
964 diff
965 } else {
966 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
967 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
968 unstaged_diff
969 };
970
971 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
972 diff_state.uncommitted_diff = Some(diff.downgrade())
973 }
974 DiffKind::SinceOid(_) => {
975 unreachable!("open_diff_internal is not used for OID diffs")
976 }
977 }
978
979 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
980 let rx = diff_state.wait_for_recalculation();
981
982 anyhow::Ok(async move {
983 if let Some(rx) = rx {
984 rx.await;
985 }
986 Ok(diff)
987 })
988 })
989 })??
990 .await
991 }
992
993 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
994 let diff_state = self.diffs.get(&buffer_id)?;
995 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
996 }
997
998 pub fn get_uncommitted_diff(
999 &self,
1000 buffer_id: BufferId,
1001 cx: &App,
1002 ) -> Option<Entity<BufferDiff>> {
1003 let diff_state = self.diffs.get(&buffer_id)?;
1004 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1005 }
1006
1007 pub fn get_diff_since_oid(
1008 &self,
1009 buffer_id: BufferId,
1010 oid: Option<git::Oid>,
1011 cx: &App,
1012 ) -> Option<Entity<BufferDiff>> {
1013 let diff_state = self.diffs.get(&buffer_id)?;
1014 diff_state.read(cx).oid_diff(oid)
1015 }
1016
1017 pub fn open_conflict_set(
1018 &mut self,
1019 buffer: Entity<Buffer>,
1020 cx: &mut Context<Self>,
1021 ) -> Entity<ConflictSet> {
1022 log::debug!("open conflict set");
1023 let buffer_id = buffer.read(cx).remote_id();
1024
1025 if let Some(git_state) = self.diffs.get(&buffer_id)
1026 && let Some(conflict_set) = git_state
1027 .read(cx)
1028 .conflict_set
1029 .as_ref()
1030 .and_then(|weak| weak.upgrade())
1031 {
1032 let conflict_set = conflict_set;
1033 let buffer_snapshot = buffer.read(cx).text_snapshot();
1034
1035 git_state.update(cx, |state, cx| {
1036 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1037 });
1038
1039 return conflict_set;
1040 }
1041
1042 let is_unmerged = self
1043 .repository_and_path_for_buffer_id(buffer_id, cx)
1044 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1045 let git_store = cx.weak_entity();
1046 let buffer_git_state = self
1047 .diffs
1048 .entry(buffer_id)
1049 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1050 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1051
1052 self._subscriptions
1053 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1054 cx.emit(GitStoreEvent::ConflictsUpdated);
1055 }));
1056
1057 buffer_git_state.update(cx, |state, cx| {
1058 state.conflict_set = Some(conflict_set.downgrade());
1059 let buffer_snapshot = buffer.read(cx).text_snapshot();
1060 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1061 });
1062
1063 conflict_set
1064 }
1065
1066 pub fn project_path_git_status(
1067 &self,
1068 project_path: &ProjectPath,
1069 cx: &App,
1070 ) -> Option<FileStatus> {
1071 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1072 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1073 }
1074
1075 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1076 let mut work_directory_abs_paths = Vec::new();
1077 let mut checkpoints = Vec::new();
1078 for repository in self.repositories.values() {
1079 repository.update(cx, |repository, _| {
1080 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1081 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1082 });
1083 }
1084
1085 cx.background_executor().spawn(async move {
1086 let checkpoints = future::try_join_all(checkpoints).await?;
1087 Ok(GitStoreCheckpoint {
1088 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1089 .into_iter()
1090 .zip(checkpoints)
1091 .collect(),
1092 })
1093 })
1094 }
1095
1096 pub fn restore_checkpoint(
1097 &self,
1098 checkpoint: GitStoreCheckpoint,
1099 cx: &mut App,
1100 ) -> Task<Result<()>> {
1101 let repositories_by_work_dir_abs_path = self
1102 .repositories
1103 .values()
1104 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1105 .collect::<HashMap<_, _>>();
1106
1107 let mut tasks = Vec::new();
1108 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1109 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1110 let restore = repository.update(cx, |repository, _| {
1111 repository.restore_checkpoint(checkpoint)
1112 });
1113 tasks.push(async move { restore.await? });
1114 }
1115 }
1116 cx.background_spawn(async move {
1117 future::try_join_all(tasks).await?;
1118 Ok(())
1119 })
1120 }
1121
1122 /// Compares two checkpoints, returning true if they are equal.
1123 pub fn compare_checkpoints(
1124 &self,
1125 left: GitStoreCheckpoint,
1126 mut right: GitStoreCheckpoint,
1127 cx: &mut App,
1128 ) -> Task<Result<bool>> {
1129 let repositories_by_work_dir_abs_path = self
1130 .repositories
1131 .values()
1132 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1133 .collect::<HashMap<_, _>>();
1134
1135 let mut tasks = Vec::new();
1136 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1137 if let Some(right_checkpoint) = right
1138 .checkpoints_by_work_dir_abs_path
1139 .remove(&work_dir_abs_path)
1140 {
1141 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1142 {
1143 let compare = repository.update(cx, |repository, _| {
1144 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1145 });
1146
1147 tasks.push(async move { compare.await? });
1148 }
1149 } else {
1150 return Task::ready(Ok(false));
1151 }
1152 }
1153 cx.background_spawn(async move {
1154 Ok(future::try_join_all(tasks)
1155 .await?
1156 .into_iter()
1157 .all(|result| result))
1158 })
1159 }
1160
1161 /// Blames a buffer.
1162 pub fn blame_buffer(
1163 &self,
1164 buffer: &Entity<Buffer>,
1165 version: Option<clock::Global>,
1166 cx: &mut Context<Self>,
1167 ) -> Task<Result<Option<Blame>>> {
1168 let buffer = buffer.read(cx);
1169 let Some((repo, repo_path)) =
1170 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1171 else {
1172 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1173 };
1174 let content = match &version {
1175 Some(version) => buffer.rope_for_version(version),
1176 None => buffer.as_rope().clone(),
1177 };
1178 let line_ending = buffer.line_ending();
1179 let version = version.unwrap_or(buffer.version());
1180 let buffer_id = buffer.remote_id();
1181
1182 let repo = repo.downgrade();
1183 cx.spawn(async move |_, cx| {
1184 let repository_state = repo
1185 .update(cx, |repo, _| repo.repository_state.clone())?
1186 .await
1187 .map_err(|err| anyhow::anyhow!(err))?;
1188 match repository_state {
1189 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1190 .blame(repo_path.clone(), content, line_ending)
1191 .await
1192 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1193 .map(Some),
1194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1195 let response = client
1196 .request(proto::BlameBuffer {
1197 project_id: project_id.to_proto(),
1198 buffer_id: buffer_id.into(),
1199 version: serialize_version(&version),
1200 })
1201 .await?;
1202 Ok(deserialize_blame_buffer_response(response))
1203 }
1204 }
1205 })
1206 }
1207
1208 pub fn file_history(
1209 &self,
1210 repo: &Entity<Repository>,
1211 path: RepoPath,
1212 cx: &mut App,
1213 ) -> Task<Result<git::repository::FileHistory>> {
1214 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1215
1216 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1217 }
1218
1219 pub fn file_history_paginated(
1220 &self,
1221 repo: &Entity<Repository>,
1222 path: RepoPath,
1223 skip: usize,
1224 limit: Option<usize>,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn get_permalink_to_line(
1233 &self,
1234 buffer: &Entity<Buffer>,
1235 selection: Range<u32>,
1236 cx: &mut App,
1237 ) -> Task<Result<url::Url>> {
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 return Task::ready(Err(anyhow!("buffer has no file")));
1240 };
1241
1242 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1243 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1244 cx,
1245 ) else {
1246 // If we're not in a Git repo, check whether this is a Rust source
1247 // file in the Cargo registry (presumably opened with go-to-definition
1248 // from a normal Rust file). If so, we can put together a permalink
1249 // using crate metadata.
1250 if buffer
1251 .read(cx)
1252 .language()
1253 .is_none_or(|lang| lang.name() != "Rust")
1254 {
1255 return Task::ready(Err(anyhow!("no permalink available")));
1256 }
1257 let file_path = file.worktree.read(cx).absolutize(&file.path);
1258 return cx.spawn(async move |cx| {
1259 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1260 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1261 .context("no permalink available")
1262 });
1263 };
1264
1265 let buffer_id = buffer.read(cx).remote_id();
1266 let branch = repo.read(cx).branch.clone();
1267 let remote = branch
1268 .as_ref()
1269 .and_then(|b| b.upstream.as_ref())
1270 .and_then(|b| b.remote_name())
1271 .unwrap_or("origin")
1272 .to_string();
1273
1274 let rx = repo.update(cx, |repo, _| {
1275 repo.send_job(None, move |state, cx| async move {
1276 match state {
1277 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1278 let origin_url = backend
1279 .remote_url(&remote)
1280 .await
1281 .with_context(|| format!("remote \"{remote}\" not found"))?;
1282
1283 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1284
1285 let provider_registry =
1286 cx.update(GitHostingProviderRegistry::default_global);
1287
1288 let (provider, remote) =
1289 parse_git_remote_url(provider_registry, &origin_url)
1290 .context("parsing Git remote URL")?;
1291
1292 Ok(provider.build_permalink(
1293 remote,
1294 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1295 ))
1296 }
1297 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1298 let response = client
1299 .request(proto::GetPermalinkToLine {
1300 project_id: project_id.to_proto(),
1301 buffer_id: buffer_id.into(),
1302 selection: Some(proto::Range {
1303 start: selection.start as u64,
1304 end: selection.end as u64,
1305 }),
1306 })
1307 .await?;
1308
1309 url::Url::parse(&response.permalink).context("failed to parse permalink")
1310 }
1311 }
1312 })
1313 });
1314 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1315 }
1316
1317 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1318 match &self.state {
1319 GitStoreState::Local {
1320 downstream: downstream_client,
1321 ..
1322 } => downstream_client
1323 .as_ref()
1324 .map(|state| (state.client.clone(), state.project_id)),
1325 GitStoreState::Remote {
1326 downstream: downstream_client,
1327 ..
1328 } => downstream_client.clone(),
1329 }
1330 }
1331
1332 fn upstream_client(&self) -> Option<AnyProtoClient> {
1333 match &self.state {
1334 GitStoreState::Local { .. } => None,
1335 GitStoreState::Remote {
1336 upstream_client, ..
1337 } => Some(upstream_client.clone()),
1338 }
1339 }
1340
1341 fn on_worktree_store_event(
1342 &mut self,
1343 worktree_store: Entity<WorktreeStore>,
1344 event: &WorktreeStoreEvent,
1345 cx: &mut Context<Self>,
1346 ) {
1347 let GitStoreState::Local {
1348 project_environment,
1349 downstream,
1350 next_repository_id,
1351 fs,
1352 } = &self.state
1353 else {
1354 return;
1355 };
1356
1357 match event {
1358 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1359 if let Some(worktree) = self
1360 .worktree_store
1361 .read(cx)
1362 .worktree_for_id(*worktree_id, cx)
1363 {
1364 let paths_by_git_repo =
1365 self.process_updated_entries(&worktree, updated_entries, cx);
1366 let downstream = downstream
1367 .as_ref()
1368 .map(|downstream| downstream.updates_tx.clone());
1369 cx.spawn(async move |_, cx| {
1370 let paths_by_git_repo = paths_by_git_repo.await;
1371 for (repo, paths) in paths_by_git_repo {
1372 repo.update(cx, |repo, cx| {
1373 repo.paths_changed(paths, downstream.clone(), cx);
1374 });
1375 }
1376 })
1377 .detach();
1378 }
1379 }
1380 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1381 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1382 else {
1383 return;
1384 };
1385 if !worktree.read(cx).is_visible() {
1386 log::debug!(
1387 "not adding repositories for local worktree {:?} because it's not visible",
1388 worktree.read(cx).abs_path()
1389 );
1390 return;
1391 }
1392 self.update_repositories_from_worktree(
1393 *worktree_id,
1394 project_environment.clone(),
1395 next_repository_id.clone(),
1396 downstream
1397 .as_ref()
1398 .map(|downstream| downstream.updates_tx.clone()),
1399 changed_repos.clone(),
1400 fs.clone(),
1401 cx,
1402 );
1403 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1404 }
1405 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1406 let repos_without_worktree: Vec<RepositoryId> = self
1407 .worktree_ids
1408 .iter_mut()
1409 .filter_map(|(repo_id, worktree_ids)| {
1410 worktree_ids.remove(worktree_id);
1411 if worktree_ids.is_empty() {
1412 Some(*repo_id)
1413 } else {
1414 None
1415 }
1416 })
1417 .collect();
1418 let is_active_repo_removed = repos_without_worktree
1419 .iter()
1420 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1421
1422 for repo_id in repos_without_worktree {
1423 self.repositories.remove(&repo_id);
1424 self.worktree_ids.remove(&repo_id);
1425 if let Some(updates_tx) =
1426 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1427 {
1428 updates_tx
1429 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1430 .ok();
1431 }
1432 }
1433
1434 if is_active_repo_removed {
1435 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1436 self.active_repo_id = Some(repo_id);
1437 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1438 } else {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 }
1443 }
1444 _ => {}
1445 }
1446 }
1447 fn on_repository_event(
1448 &mut self,
1449 repo: Entity<Repository>,
1450 event: &RepositoryEvent,
1451 cx: &mut Context<Self>,
1452 ) {
1453 let id = repo.read(cx).id;
1454 let repo_snapshot = repo.read(cx).snapshot.clone();
1455 for (buffer_id, diff) in self.diffs.iter() {
1456 if let Some((buffer_repo, repo_path)) =
1457 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1458 && buffer_repo == repo
1459 {
1460 diff.update(cx, |diff, cx| {
1461 if let Some(conflict_set) = &diff.conflict_set {
1462 let conflict_status_changed =
1463 conflict_set.update(cx, |conflict_set, cx| {
1464 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1465 conflict_set.set_has_conflict(has_conflict, cx)
1466 })?;
1467 if conflict_status_changed {
1468 let buffer_store = self.buffer_store.read(cx);
1469 if let Some(buffer) = buffer_store.get(*buffer_id) {
1470 let _ = diff
1471 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1472 }
1473 }
1474 }
1475 anyhow::Ok(())
1476 })
1477 .ok();
1478 }
1479 }
1480 cx.emit(GitStoreEvent::RepositoryUpdated(
1481 id,
1482 event.clone(),
1483 self.active_repo_id == Some(id),
1484 ))
1485 }
1486
1487 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1488 cx.emit(GitStoreEvent::JobsUpdated)
1489 }
1490
1491 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1492 fn update_repositories_from_worktree(
1493 &mut self,
1494 worktree_id: WorktreeId,
1495 project_environment: Entity<ProjectEnvironment>,
1496 next_repository_id: Arc<AtomicU64>,
1497 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1498 updated_git_repositories: UpdatedGitRepositoriesSet,
1499 fs: Arc<dyn Fs>,
1500 cx: &mut Context<Self>,
1501 ) {
1502 let mut removed_ids = Vec::new();
1503 for update in updated_git_repositories.iter() {
1504 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1505 let existing_work_directory_abs_path =
1506 repo.read(cx).work_directory_abs_path.clone();
1507 Some(&existing_work_directory_abs_path)
1508 == update.old_work_directory_abs_path.as_ref()
1509 || Some(&existing_work_directory_abs_path)
1510 == update.new_work_directory_abs_path.as_ref()
1511 }) {
1512 let repo_id = *id;
1513 if let Some(new_work_directory_abs_path) =
1514 update.new_work_directory_abs_path.clone()
1515 {
1516 self.worktree_ids
1517 .entry(repo_id)
1518 .or_insert_with(HashSet::new)
1519 .insert(worktree_id);
1520 existing.update(cx, |existing, cx| {
1521 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1522 existing.schedule_scan(updates_tx.clone(), cx);
1523 });
1524 } else {
1525 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1526 worktree_ids.remove(&worktree_id);
1527 if worktree_ids.is_empty() {
1528 removed_ids.push(repo_id);
1529 }
1530 }
1531 }
1532 } else if let UpdatedGitRepository {
1533 new_work_directory_abs_path: Some(work_directory_abs_path),
1534 dot_git_abs_path: Some(dot_git_abs_path),
1535 repository_dir_abs_path: Some(_repository_dir_abs_path),
1536 common_dir_abs_path: Some(common_dir_abs_path),
1537 ..
1538 } = update
1539 {
1540 let original_repo_abs_path: Arc<Path> =
1541 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1542 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1543 let is_trusted = TrustedWorktrees::try_get_global(cx)
1544 .map(|trusted_worktrees| {
1545 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1546 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1547 })
1548 })
1549 .unwrap_or(false);
1550 let git_store = cx.weak_entity();
1551 let repo = cx.new(|cx| {
1552 let mut repo = Repository::local(
1553 id,
1554 work_directory_abs_path.clone(),
1555 original_repo_abs_path.clone(),
1556 dot_git_abs_path.clone(),
1557 project_environment.downgrade(),
1558 fs.clone(),
1559 is_trusted,
1560 git_store,
1561 cx,
1562 );
1563 if let Some(updates_tx) = updates_tx.as_ref() {
1564 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1565 updates_tx
1566 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1567 .ok();
1568 }
1569 repo.schedule_scan(updates_tx.clone(), cx);
1570 repo
1571 });
1572 self._subscriptions
1573 .push(cx.subscribe(&repo, Self::on_repository_event));
1574 self._subscriptions
1575 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1576 self.repositories.insert(id, repo);
1577 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1578 cx.emit(GitStoreEvent::RepositoryAdded);
1579 self.active_repo_id.get_or_insert_with(|| {
1580 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1581 id
1582 });
1583 }
1584 }
1585
1586 for id in removed_ids {
1587 if self.active_repo_id == Some(id) {
1588 self.active_repo_id = None;
1589 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1590 }
1591 self.repositories.remove(&id);
1592 if let Some(updates_tx) = updates_tx.as_ref() {
1593 updates_tx
1594 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1595 .ok();
1596 }
1597 }
1598 }
1599
1600 fn on_trusted_worktrees_event(
1601 &mut self,
1602 _: Entity<TrustedWorktreesStore>,
1603 event: &TrustedWorktreesEvent,
1604 cx: &mut Context<Self>,
1605 ) {
1606 if !matches!(self.state, GitStoreState::Local { .. }) {
1607 return;
1608 }
1609
1610 let (is_trusted, event_paths) = match event {
1611 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1612 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1613 };
1614
1615 for (repo_id, worktree_ids) in &self.worktree_ids {
1616 if worktree_ids
1617 .iter()
1618 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1619 {
1620 if let Some(repo) = self.repositories.get(repo_id) {
1621 let repository_state = repo.read(cx).repository_state.clone();
1622 cx.background_spawn(async move {
1623 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1624 state.backend.set_trusted(is_trusted);
1625 }
1626 })
1627 .detach();
1628 }
1629 }
1630 }
1631 }
1632
1633 fn on_buffer_store_event(
1634 &mut self,
1635 _: Entity<BufferStore>,
1636 event: &BufferStoreEvent,
1637 cx: &mut Context<Self>,
1638 ) {
1639 match event {
1640 BufferStoreEvent::BufferAdded(buffer) => {
1641 cx.subscribe(buffer, |this, buffer, event, cx| {
1642 if let BufferEvent::LanguageChanged(_) = event {
1643 let buffer_id = buffer.read(cx).remote_id();
1644 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1645 diff_state.update(cx, |diff_state, cx| {
1646 diff_state.buffer_language_changed(buffer, cx);
1647 });
1648 }
1649 }
1650 })
1651 .detach();
1652 }
1653 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1654 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1655 diffs.remove(buffer_id);
1656 }
1657 }
1658 BufferStoreEvent::BufferDropped(buffer_id) => {
1659 self.diffs.remove(buffer_id);
1660 for diffs in self.shared_diffs.values_mut() {
1661 diffs.remove(buffer_id);
1662 }
1663 }
1664 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1665 // Whenever a buffer's file path changes, it's possible that the
1666 // new path is actually a path that is being tracked by a git
1667 // repository. In that case, we'll want to update the buffer's
1668 // `BufferDiffState`, in case it already has one.
1669 let buffer_id = buffer.read(cx).remote_id();
1670 let diff_state = self.diffs.get(&buffer_id);
1671 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1672
1673 if let Some(diff_state) = diff_state
1674 && let Some((repo, repo_path)) = repo
1675 {
1676 let buffer = buffer.clone();
1677 let diff_state = diff_state.clone();
1678
1679 cx.spawn(async move |_git_store, cx| {
1680 async {
1681 let diff_bases_change = repo
1682 .update(cx, |repo, cx| {
1683 repo.load_committed_text(buffer_id, repo_path, cx)
1684 })
1685 .await?;
1686
1687 diff_state.update(cx, |diff_state, cx| {
1688 let buffer_snapshot = buffer.read(cx).text_snapshot();
1689 diff_state.diff_bases_changed(
1690 buffer_snapshot,
1691 Some(diff_bases_change),
1692 cx,
1693 );
1694 });
1695 anyhow::Ok(())
1696 }
1697 .await
1698 .log_err();
1699 })
1700 .detach();
1701 }
1702 }
1703 }
1704 }
1705
1706 pub fn recalculate_buffer_diffs(
1707 &mut self,
1708 buffers: Vec<Entity<Buffer>>,
1709 cx: &mut Context<Self>,
1710 ) -> impl Future<Output = ()> + use<> {
1711 let mut futures = Vec::new();
1712 for buffer in buffers {
1713 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1714 let buffer = buffer.read(cx).text_snapshot();
1715 diff_state.update(cx, |diff_state, cx| {
1716 diff_state.recalculate_diffs(buffer.clone(), cx);
1717 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1718 });
1719 futures.push(diff_state.update(cx, |diff_state, cx| {
1720 diff_state
1721 .reparse_conflict_markers(buffer, cx)
1722 .map(|_| {})
1723 .boxed()
1724 }));
1725 }
1726 }
1727 async move {
1728 futures::future::join_all(futures).await;
1729 }
1730 }
1731
1732 fn on_buffer_diff_event(
1733 &mut self,
1734 diff: Entity<buffer_diff::BufferDiff>,
1735 event: &BufferDiffEvent,
1736 cx: &mut Context<Self>,
1737 ) {
1738 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1739 let buffer_id = diff.read(cx).buffer_id;
1740 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1741 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1742 diff_state.hunk_staging_operation_count += 1;
1743 diff_state.hunk_staging_operation_count
1744 });
1745 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1746 let recv = repo.update(cx, |repo, cx| {
1747 log::debug!("hunks changed for {}", path.as_unix_str());
1748 repo.spawn_set_index_text_job(
1749 path,
1750 new_index_text.as_ref().map(|rope| rope.to_string()),
1751 Some(hunk_staging_operation_count),
1752 cx,
1753 )
1754 });
1755 let diff = diff.downgrade();
1756 cx.spawn(async move |this, cx| {
1757 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1758 diff.update(cx, |diff, cx| {
1759 diff.clear_pending_hunks(cx);
1760 })
1761 .ok();
1762 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1763 .ok();
1764 }
1765 })
1766 .detach();
1767 }
1768 }
1769 }
1770 }
1771
1772 fn local_worktree_git_repos_changed(
1773 &mut self,
1774 worktree: Entity<Worktree>,
1775 changed_repos: &UpdatedGitRepositoriesSet,
1776 cx: &mut Context<Self>,
1777 ) {
1778 log::debug!("local worktree repos changed");
1779 debug_assert!(worktree.read(cx).is_local());
1780
1781 for repository in self.repositories.values() {
1782 repository.update(cx, |repository, cx| {
1783 let repo_abs_path = &repository.work_directory_abs_path;
1784 if changed_repos.iter().any(|update| {
1785 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1786 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1787 }) {
1788 repository.reload_buffer_diff_bases(cx);
1789 }
1790 });
1791 }
1792 }
1793
1794 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1795 &self.repositories
1796 }
1797
1798 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1799 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1800 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1801 Some(status.status)
1802 }
1803
1804 pub fn repository_and_path_for_buffer_id(
1805 &self,
1806 buffer_id: BufferId,
1807 cx: &App,
1808 ) -> Option<(Entity<Repository>, RepoPath)> {
1809 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1810 let project_path = buffer.read(cx).project_path(cx)?;
1811 self.repository_and_path_for_project_path(&project_path, cx)
1812 }
1813
1814 pub fn repository_and_path_for_project_path(
1815 &self,
1816 path: &ProjectPath,
1817 cx: &App,
1818 ) -> Option<(Entity<Repository>, RepoPath)> {
1819 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1820 self.repositories
1821 .values()
1822 .filter_map(|repo| {
1823 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1824 Some((repo.clone(), repo_path))
1825 })
1826 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1827 }
1828
1829 pub fn git_init(
1830 &self,
1831 path: Arc<Path>,
1832 fallback_branch_name: String,
1833 cx: &App,
1834 ) -> Task<Result<()>> {
1835 match &self.state {
1836 GitStoreState::Local { fs, .. } => {
1837 let fs = fs.clone();
1838 cx.background_executor()
1839 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1840 }
1841 GitStoreState::Remote {
1842 upstream_client,
1843 upstream_project_id: project_id,
1844 ..
1845 } => {
1846 let client = upstream_client.clone();
1847 let project_id = *project_id;
1848 cx.background_executor().spawn(async move {
1849 client
1850 .request(proto::GitInit {
1851 project_id: project_id,
1852 abs_path: path.to_string_lossy().into_owned(),
1853 fallback_branch_name,
1854 })
1855 .await?;
1856 Ok(())
1857 })
1858 }
1859 }
1860 }
1861
1862 pub fn git_clone(
1863 &self,
1864 repo: String,
1865 path: impl Into<Arc<std::path::Path>>,
1866 cx: &App,
1867 ) -> Task<Result<()>> {
1868 let path = path.into();
1869 match &self.state {
1870 GitStoreState::Local { fs, .. } => {
1871 let fs = fs.clone();
1872 cx.background_executor()
1873 .spawn(async move { fs.git_clone(&repo, &path).await })
1874 }
1875 GitStoreState::Remote {
1876 upstream_client,
1877 upstream_project_id,
1878 ..
1879 } => {
1880 if upstream_client.is_via_collab() {
1881 return Task::ready(Err(anyhow!(
1882 "Git Clone isn't supported for project guests"
1883 )));
1884 }
1885 let request = upstream_client.request(proto::GitClone {
1886 project_id: *upstream_project_id,
1887 abs_path: path.to_string_lossy().into_owned(),
1888 remote_repo: repo,
1889 });
1890
1891 cx.background_spawn(async move {
1892 let result = request.await?;
1893
1894 match result.success {
1895 true => Ok(()),
1896 false => Err(anyhow!("Git Clone failed")),
1897 }
1898 })
1899 }
1900 }
1901 }
1902
1903 async fn handle_update_repository(
1904 this: Entity<Self>,
1905 envelope: TypedEnvelope<proto::UpdateRepository>,
1906 mut cx: AsyncApp,
1907 ) -> Result<()> {
1908 this.update(&mut cx, |this, cx| {
1909 let path_style = this.worktree_store.read(cx).path_style();
1910 let mut update = envelope.payload;
1911
1912 let id = RepositoryId::from_proto(update.id);
1913 let client = this.upstream_client().context("no upstream client")?;
1914
1915 let original_repo_abs_path: Option<Arc<Path>> = update
1916 .original_repo_abs_path
1917 .as_deref()
1918 .map(|p| Path::new(p).into());
1919
1920 let mut repo_subscription = None;
1921 let repo = this.repositories.entry(id).or_insert_with(|| {
1922 let git_store = cx.weak_entity();
1923 let repo = cx.new(|cx| {
1924 Repository::remote(
1925 id,
1926 Path::new(&update.abs_path).into(),
1927 original_repo_abs_path.clone(),
1928 path_style,
1929 ProjectId(update.project_id),
1930 client,
1931 git_store,
1932 cx,
1933 )
1934 });
1935 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1936 cx.emit(GitStoreEvent::RepositoryAdded);
1937 repo
1938 });
1939 this._subscriptions.extend(repo_subscription);
1940
1941 repo.update(cx, {
1942 let update = update.clone();
1943 |repo, cx| repo.apply_remote_update(update, cx)
1944 })?;
1945
1946 this.active_repo_id.get_or_insert_with(|| {
1947 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1948 id
1949 });
1950
1951 if let Some((client, project_id)) = this.downstream_client() {
1952 update.project_id = project_id.to_proto();
1953 client.send(update).log_err();
1954 }
1955 Ok(())
1956 })
1957 }
1958
1959 async fn handle_remove_repository(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::RemoveRepository>,
1962 mut cx: AsyncApp,
1963 ) -> Result<()> {
1964 this.update(&mut cx, |this, cx| {
1965 let mut update = envelope.payload;
1966 let id = RepositoryId::from_proto(update.id);
1967 this.repositories.remove(&id);
1968 if let Some((client, project_id)) = this.downstream_client() {
1969 update.project_id = project_id.to_proto();
1970 client.send(update).log_err();
1971 }
1972 if this.active_repo_id == Some(id) {
1973 this.active_repo_id = None;
1974 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1975 }
1976 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1977 });
1978 Ok(())
1979 }
1980
1981 async fn handle_git_init(
1982 this: Entity<Self>,
1983 envelope: TypedEnvelope<proto::GitInit>,
1984 cx: AsyncApp,
1985 ) -> Result<proto::Ack> {
1986 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1987 let name = envelope.payload.fallback_branch_name;
1988 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1989 .await?;
1990
1991 Ok(proto::Ack {})
1992 }
1993
1994 async fn handle_git_clone(
1995 this: Entity<Self>,
1996 envelope: TypedEnvelope<proto::GitClone>,
1997 cx: AsyncApp,
1998 ) -> Result<proto::GitCloneResponse> {
1999 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2000 let repo_name = envelope.payload.remote_repo;
2001 let result = cx
2002 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2003 .await;
2004
2005 Ok(proto::GitCloneResponse {
2006 success: result.is_ok(),
2007 })
2008 }
2009
2010 async fn handle_fetch(
2011 this: Entity<Self>,
2012 envelope: TypedEnvelope<proto::Fetch>,
2013 mut cx: AsyncApp,
2014 ) -> Result<proto::RemoteMessageResponse> {
2015 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2016 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2017 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2018 let askpass_id = envelope.payload.askpass_id;
2019
2020 let askpass = make_remote_delegate(
2021 this,
2022 envelope.payload.project_id,
2023 repository_id,
2024 askpass_id,
2025 &mut cx,
2026 );
2027
2028 let remote_output = repository_handle
2029 .update(&mut cx, |repository_handle, cx| {
2030 repository_handle.fetch(fetch_options, askpass, cx)
2031 })
2032 .await??;
2033
2034 Ok(proto::RemoteMessageResponse {
2035 stdout: remote_output.stdout,
2036 stderr: remote_output.stderr,
2037 })
2038 }
2039
2040 async fn handle_push(
2041 this: Entity<Self>,
2042 envelope: TypedEnvelope<proto::Push>,
2043 mut cx: AsyncApp,
2044 ) -> Result<proto::RemoteMessageResponse> {
2045 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2046 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2047
2048 let askpass_id = envelope.payload.askpass_id;
2049 let askpass = make_remote_delegate(
2050 this,
2051 envelope.payload.project_id,
2052 repository_id,
2053 askpass_id,
2054 &mut cx,
2055 );
2056
2057 let options = envelope
2058 .payload
2059 .options
2060 .as_ref()
2061 .map(|_| match envelope.payload.options() {
2062 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2063 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2064 });
2065
2066 let branch_name = envelope.payload.branch_name.into();
2067 let remote_branch_name = envelope.payload.remote_branch_name.into();
2068 let remote_name = envelope.payload.remote_name.into();
2069
2070 let remote_output = repository_handle
2071 .update(&mut cx, |repository_handle, cx| {
2072 repository_handle.push(
2073 branch_name,
2074 remote_branch_name,
2075 remote_name,
2076 options,
2077 askpass,
2078 cx,
2079 )
2080 })
2081 .await??;
2082 Ok(proto::RemoteMessageResponse {
2083 stdout: remote_output.stdout,
2084 stderr: remote_output.stderr,
2085 })
2086 }
2087
2088 async fn handle_pull(
2089 this: Entity<Self>,
2090 envelope: TypedEnvelope<proto::Pull>,
2091 mut cx: AsyncApp,
2092 ) -> Result<proto::RemoteMessageResponse> {
2093 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2094 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2095 let askpass_id = envelope.payload.askpass_id;
2096 let askpass = make_remote_delegate(
2097 this,
2098 envelope.payload.project_id,
2099 repository_id,
2100 askpass_id,
2101 &mut cx,
2102 );
2103
2104 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2105 let remote_name = envelope.payload.remote_name.into();
2106 let rebase = envelope.payload.rebase;
2107
2108 let remote_message = repository_handle
2109 .update(&mut cx, |repository_handle, cx| {
2110 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2111 })
2112 .await??;
2113
2114 Ok(proto::RemoteMessageResponse {
2115 stdout: remote_message.stdout,
2116 stderr: remote_message.stderr,
2117 })
2118 }
2119
2120 async fn handle_stage(
2121 this: Entity<Self>,
2122 envelope: TypedEnvelope<proto::Stage>,
2123 mut cx: AsyncApp,
2124 ) -> Result<proto::Ack> {
2125 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2126 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2127
2128 let entries = envelope
2129 .payload
2130 .paths
2131 .into_iter()
2132 .map(|path| RepoPath::new(&path))
2133 .collect::<Result<Vec<_>>>()?;
2134
2135 repository_handle
2136 .update(&mut cx, |repository_handle, cx| {
2137 repository_handle.stage_entries(entries, cx)
2138 })
2139 .await?;
2140 Ok(proto::Ack {})
2141 }
2142
2143 async fn handle_unstage(
2144 this: Entity<Self>,
2145 envelope: TypedEnvelope<proto::Unstage>,
2146 mut cx: AsyncApp,
2147 ) -> Result<proto::Ack> {
2148 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2149 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2150
2151 let entries = envelope
2152 .payload
2153 .paths
2154 .into_iter()
2155 .map(|path| RepoPath::new(&path))
2156 .collect::<Result<Vec<_>>>()?;
2157
2158 repository_handle
2159 .update(&mut cx, |repository_handle, cx| {
2160 repository_handle.unstage_entries(entries, cx)
2161 })
2162 .await?;
2163
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_stash(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::Stash>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::Ack> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174
2175 let entries = envelope
2176 .payload
2177 .paths
2178 .into_iter()
2179 .map(|path| RepoPath::new(&path))
2180 .collect::<Result<Vec<_>>>()?;
2181
2182 repository_handle
2183 .update(&mut cx, |repository_handle, cx| {
2184 repository_handle.stash_entries(entries, cx)
2185 })
2186 .await?;
2187
2188 Ok(proto::Ack {})
2189 }
2190
2191 async fn handle_stash_pop(
2192 this: Entity<Self>,
2193 envelope: TypedEnvelope<proto::StashPop>,
2194 mut cx: AsyncApp,
2195 ) -> Result<proto::Ack> {
2196 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2197 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2198 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2199
2200 repository_handle
2201 .update(&mut cx, |repository_handle, cx| {
2202 repository_handle.stash_pop(stash_index, cx)
2203 })
2204 .await?;
2205
2206 Ok(proto::Ack {})
2207 }
2208
2209 async fn handle_stash_apply(
2210 this: Entity<Self>,
2211 envelope: TypedEnvelope<proto::StashApply>,
2212 mut cx: AsyncApp,
2213 ) -> Result<proto::Ack> {
2214 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2215 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2216 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2217
2218 repository_handle
2219 .update(&mut cx, |repository_handle, cx| {
2220 repository_handle.stash_apply(stash_index, cx)
2221 })
2222 .await?;
2223
2224 Ok(proto::Ack {})
2225 }
2226
2227 async fn handle_stash_drop(
2228 this: Entity<Self>,
2229 envelope: TypedEnvelope<proto::StashDrop>,
2230 mut cx: AsyncApp,
2231 ) -> Result<proto::Ack> {
2232 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2233 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2234 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2235
2236 repository_handle
2237 .update(&mut cx, |repository_handle, cx| {
2238 repository_handle.stash_drop(stash_index, cx)
2239 })
2240 .await??;
2241
2242 Ok(proto::Ack {})
2243 }
2244
2245 async fn handle_set_index_text(
2246 this: Entity<Self>,
2247 envelope: TypedEnvelope<proto::SetIndexText>,
2248 mut cx: AsyncApp,
2249 ) -> Result<proto::Ack> {
2250 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2251 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2252 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2253
2254 repository_handle
2255 .update(&mut cx, |repository_handle, cx| {
2256 repository_handle.spawn_set_index_text_job(
2257 repo_path,
2258 envelope.payload.text,
2259 None,
2260 cx,
2261 )
2262 })
2263 .await??;
2264 Ok(proto::Ack {})
2265 }
2266
2267 async fn handle_run_hook(
2268 this: Entity<Self>,
2269 envelope: TypedEnvelope<proto::RunGitHook>,
2270 mut cx: AsyncApp,
2271 ) -> Result<proto::Ack> {
2272 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2273 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2274 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2275 repository_handle
2276 .update(&mut cx, |repository_handle, cx| {
2277 repository_handle.run_hook(hook, cx)
2278 })
2279 .await??;
2280 Ok(proto::Ack {})
2281 }
2282
2283 async fn handle_commit(
2284 this: Entity<Self>,
2285 envelope: TypedEnvelope<proto::Commit>,
2286 mut cx: AsyncApp,
2287 ) -> Result<proto::Ack> {
2288 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2289 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2290 let askpass_id = envelope.payload.askpass_id;
2291
2292 let askpass = make_remote_delegate(
2293 this,
2294 envelope.payload.project_id,
2295 repository_id,
2296 askpass_id,
2297 &mut cx,
2298 );
2299
2300 let message = SharedString::from(envelope.payload.message);
2301 let name = envelope.payload.name.map(SharedString::from);
2302 let email = envelope.payload.email.map(SharedString::from);
2303 let options = envelope.payload.options.unwrap_or_default();
2304
2305 repository_handle
2306 .update(&mut cx, |repository_handle, cx| {
2307 repository_handle.commit(
2308 message,
2309 name.zip(email),
2310 CommitOptions {
2311 amend: options.amend,
2312 signoff: options.signoff,
2313 },
2314 askpass,
2315 cx,
2316 )
2317 })
2318 .await??;
2319 Ok(proto::Ack {})
2320 }
2321
2322 async fn handle_get_remotes(
2323 this: Entity<Self>,
2324 envelope: TypedEnvelope<proto::GetRemotes>,
2325 mut cx: AsyncApp,
2326 ) -> Result<proto::GetRemotesResponse> {
2327 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2328 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2329
2330 let branch_name = envelope.payload.branch_name;
2331 let is_push = envelope.payload.is_push;
2332
2333 let remotes = repository_handle
2334 .update(&mut cx, |repository_handle, _| {
2335 repository_handle.get_remotes(branch_name, is_push)
2336 })
2337 .await??;
2338
2339 Ok(proto::GetRemotesResponse {
2340 remotes: remotes
2341 .into_iter()
2342 .map(|remotes| proto::get_remotes_response::Remote {
2343 name: remotes.name.to_string(),
2344 })
2345 .collect::<Vec<_>>(),
2346 })
2347 }
2348
2349 async fn handle_get_worktrees(
2350 this: Entity<Self>,
2351 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2352 mut cx: AsyncApp,
2353 ) -> Result<proto::GitWorktreesResponse> {
2354 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2355 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2356
2357 let worktrees = repository_handle
2358 .update(&mut cx, |repository_handle, _| {
2359 repository_handle.worktrees()
2360 })
2361 .await??;
2362
2363 Ok(proto::GitWorktreesResponse {
2364 worktrees: worktrees
2365 .into_iter()
2366 .map(|worktree| worktree_to_proto(&worktree))
2367 .collect::<Vec<_>>(),
2368 })
2369 }
2370
2371 async fn handle_create_worktree(
2372 this: Entity<Self>,
2373 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2374 mut cx: AsyncApp,
2375 ) -> Result<proto::Ack> {
2376 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2377 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2378 let directory = PathBuf::from(envelope.payload.directory);
2379 let name = envelope.payload.name;
2380 let commit = envelope.payload.commit;
2381
2382 repository_handle
2383 .update(&mut cx, |repository_handle, _| {
2384 repository_handle.create_worktree(name, directory, commit)
2385 })
2386 .await??;
2387
2388 Ok(proto::Ack {})
2389 }
2390
2391 async fn handle_remove_worktree(
2392 this: Entity<Self>,
2393 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2394 mut cx: AsyncApp,
2395 ) -> Result<proto::Ack> {
2396 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2397 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2398 let path = PathBuf::from(envelope.payload.path);
2399 let force = envelope.payload.force;
2400
2401 repository_handle
2402 .update(&mut cx, |repository_handle, _| {
2403 repository_handle.remove_worktree(path, force)
2404 })
2405 .await??;
2406
2407 Ok(proto::Ack {})
2408 }
2409
2410 async fn handle_rename_worktree(
2411 this: Entity<Self>,
2412 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2413 mut cx: AsyncApp,
2414 ) -> Result<proto::Ack> {
2415 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2416 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2417 let old_path = PathBuf::from(envelope.payload.old_path);
2418 let new_path = PathBuf::from(envelope.payload.new_path);
2419
2420 repository_handle
2421 .update(&mut cx, |repository_handle, _| {
2422 repository_handle.rename_worktree(old_path, new_path)
2423 })
2424 .await??;
2425
2426 Ok(proto::Ack {})
2427 }
2428
2429 async fn handle_get_branches(
2430 this: Entity<Self>,
2431 envelope: TypedEnvelope<proto::GitGetBranches>,
2432 mut cx: AsyncApp,
2433 ) -> Result<proto::GitBranchesResponse> {
2434 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2435 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2436
2437 let branches = repository_handle
2438 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2439 .await??;
2440
2441 Ok(proto::GitBranchesResponse {
2442 branches: branches
2443 .into_iter()
2444 .map(|branch| branch_to_proto(&branch))
2445 .collect::<Vec<_>>(),
2446 })
2447 }
2448 async fn handle_get_default_branch(
2449 this: Entity<Self>,
2450 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2451 mut cx: AsyncApp,
2452 ) -> Result<proto::GetDefaultBranchResponse> {
2453 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2454 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2455
2456 let branch = repository_handle
2457 .update(&mut cx, |repository_handle, _| {
2458 repository_handle.default_branch(false)
2459 })
2460 .await??
2461 .map(Into::into);
2462
2463 Ok(proto::GetDefaultBranchResponse { branch })
2464 }
2465 async fn handle_create_branch(
2466 this: Entity<Self>,
2467 envelope: TypedEnvelope<proto::GitCreateBranch>,
2468 mut cx: AsyncApp,
2469 ) -> Result<proto::Ack> {
2470 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2471 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2472 let branch_name = envelope.payload.branch_name;
2473
2474 repository_handle
2475 .update(&mut cx, |repository_handle, _| {
2476 repository_handle.create_branch(branch_name, None)
2477 })
2478 .await??;
2479
2480 Ok(proto::Ack {})
2481 }
2482
2483 async fn handle_change_branch(
2484 this: Entity<Self>,
2485 envelope: TypedEnvelope<proto::GitChangeBranch>,
2486 mut cx: AsyncApp,
2487 ) -> Result<proto::Ack> {
2488 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2489 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2490 let branch_name = envelope.payload.branch_name;
2491
2492 repository_handle
2493 .update(&mut cx, |repository_handle, _| {
2494 repository_handle.change_branch(branch_name)
2495 })
2496 .await??;
2497
2498 Ok(proto::Ack {})
2499 }
2500
2501 async fn handle_rename_branch(
2502 this: Entity<Self>,
2503 envelope: TypedEnvelope<proto::GitRenameBranch>,
2504 mut cx: AsyncApp,
2505 ) -> Result<proto::Ack> {
2506 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2507 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2508 let branch = envelope.payload.branch;
2509 let new_name = envelope.payload.new_name;
2510
2511 repository_handle
2512 .update(&mut cx, |repository_handle, _| {
2513 repository_handle.rename_branch(branch, new_name)
2514 })
2515 .await??;
2516
2517 Ok(proto::Ack {})
2518 }
2519
2520 async fn handle_create_remote(
2521 this: Entity<Self>,
2522 envelope: TypedEnvelope<proto::GitCreateRemote>,
2523 mut cx: AsyncApp,
2524 ) -> Result<proto::Ack> {
2525 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2526 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2527 let remote_name = envelope.payload.remote_name;
2528 let remote_url = envelope.payload.remote_url;
2529
2530 repository_handle
2531 .update(&mut cx, |repository_handle, _| {
2532 repository_handle.create_remote(remote_name, remote_url)
2533 })
2534 .await??;
2535
2536 Ok(proto::Ack {})
2537 }
2538
2539 async fn handle_delete_branch(
2540 this: Entity<Self>,
2541 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2542 mut cx: AsyncApp,
2543 ) -> Result<proto::Ack> {
2544 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2545 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2546 let is_remote = envelope.payload.is_remote;
2547 let branch_name = envelope.payload.branch_name;
2548
2549 repository_handle
2550 .update(&mut cx, |repository_handle, _| {
2551 repository_handle.delete_branch(is_remote, branch_name)
2552 })
2553 .await??;
2554
2555 Ok(proto::Ack {})
2556 }
2557
2558 async fn handle_remove_remote(
2559 this: Entity<Self>,
2560 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2561 mut cx: AsyncApp,
2562 ) -> Result<proto::Ack> {
2563 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2564 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2565 let remote_name = envelope.payload.remote_name;
2566
2567 repository_handle
2568 .update(&mut cx, |repository_handle, _| {
2569 repository_handle.remove_remote(remote_name)
2570 })
2571 .await??;
2572
2573 Ok(proto::Ack {})
2574 }
2575
2576 async fn handle_show(
2577 this: Entity<Self>,
2578 envelope: TypedEnvelope<proto::GitShow>,
2579 mut cx: AsyncApp,
2580 ) -> Result<proto::GitCommitDetails> {
2581 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2582 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2583
2584 let commit = repository_handle
2585 .update(&mut cx, |repository_handle, _| {
2586 repository_handle.show(envelope.payload.commit)
2587 })
2588 .await??;
2589 Ok(proto::GitCommitDetails {
2590 sha: commit.sha.into(),
2591 message: commit.message.into(),
2592 commit_timestamp: commit.commit_timestamp,
2593 author_email: commit.author_email.into(),
2594 author_name: commit.author_name.into(),
2595 })
2596 }
2597
2598 async fn handle_load_commit_diff(
2599 this: Entity<Self>,
2600 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2601 mut cx: AsyncApp,
2602 ) -> Result<proto::LoadCommitDiffResponse> {
2603 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2604 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2605
2606 let commit_diff = repository_handle
2607 .update(&mut cx, |repository_handle, _| {
2608 repository_handle.load_commit_diff(envelope.payload.commit)
2609 })
2610 .await??;
2611 Ok(proto::LoadCommitDiffResponse {
2612 files: commit_diff
2613 .files
2614 .into_iter()
2615 .map(|file| proto::CommitFile {
2616 path: file.path.to_proto(),
2617 old_text: file.old_text,
2618 new_text: file.new_text,
2619 is_binary: file.is_binary,
2620 })
2621 .collect(),
2622 })
2623 }
2624
2625 async fn handle_file_history(
2626 this: Entity<Self>,
2627 envelope: TypedEnvelope<proto::GitFileHistory>,
2628 mut cx: AsyncApp,
2629 ) -> Result<proto::GitFileHistoryResponse> {
2630 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2631 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2632 let path = RepoPath::from_proto(&envelope.payload.path)?;
2633 let skip = envelope.payload.skip as usize;
2634 let limit = envelope.payload.limit.map(|l| l as usize);
2635
2636 let file_history = repository_handle
2637 .update(&mut cx, |repository_handle, _| {
2638 repository_handle.file_history_paginated(path, skip, limit)
2639 })
2640 .await??;
2641
2642 Ok(proto::GitFileHistoryResponse {
2643 entries: file_history
2644 .entries
2645 .into_iter()
2646 .map(|entry| proto::FileHistoryEntry {
2647 sha: entry.sha.to_string(),
2648 subject: entry.subject.to_string(),
2649 message: entry.message.to_string(),
2650 commit_timestamp: entry.commit_timestamp,
2651 author_name: entry.author_name.to_string(),
2652 author_email: entry.author_email.to_string(),
2653 })
2654 .collect(),
2655 path: file_history.path.to_proto(),
2656 })
2657 }
2658
2659 async fn handle_reset(
2660 this: Entity<Self>,
2661 envelope: TypedEnvelope<proto::GitReset>,
2662 mut cx: AsyncApp,
2663 ) -> Result<proto::Ack> {
2664 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2665 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2666
2667 let mode = match envelope.payload.mode() {
2668 git_reset::ResetMode::Soft => ResetMode::Soft,
2669 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2670 };
2671
2672 repository_handle
2673 .update(&mut cx, |repository_handle, cx| {
2674 repository_handle.reset(envelope.payload.commit, mode, cx)
2675 })
2676 .await??;
2677 Ok(proto::Ack {})
2678 }
2679
2680 async fn handle_checkout_files(
2681 this: Entity<Self>,
2682 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2683 mut cx: AsyncApp,
2684 ) -> Result<proto::Ack> {
2685 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2686 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2687 let paths = envelope
2688 .payload
2689 .paths
2690 .iter()
2691 .map(|s| RepoPath::from_proto(s))
2692 .collect::<Result<Vec<_>>>()?;
2693
2694 repository_handle
2695 .update(&mut cx, |repository_handle, cx| {
2696 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2697 })
2698 .await?;
2699 Ok(proto::Ack {})
2700 }
2701
2702 async fn handle_open_commit_message_buffer(
2703 this: Entity<Self>,
2704 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2705 mut cx: AsyncApp,
2706 ) -> Result<proto::OpenBufferResponse> {
2707 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2708 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2709 let buffer = repository
2710 .update(&mut cx, |repository, cx| {
2711 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2712 })
2713 .await?;
2714
2715 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2716 this.update(&mut cx, |this, cx| {
2717 this.buffer_store.update(cx, |buffer_store, cx| {
2718 buffer_store
2719 .create_buffer_for_peer(
2720 &buffer,
2721 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2722 cx,
2723 )
2724 .detach_and_log_err(cx);
2725 })
2726 });
2727
2728 Ok(proto::OpenBufferResponse {
2729 buffer_id: buffer_id.to_proto(),
2730 })
2731 }
2732
2733 async fn handle_askpass(
2734 this: Entity<Self>,
2735 envelope: TypedEnvelope<proto::AskPassRequest>,
2736 mut cx: AsyncApp,
2737 ) -> Result<proto::AskPassResponse> {
2738 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2739 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2740
2741 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2742 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2743 debug_panic!("no askpass found");
2744 anyhow::bail!("no askpass found");
2745 };
2746
2747 let response = askpass
2748 .ask_password(envelope.payload.prompt)
2749 .await
2750 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2751
2752 delegates
2753 .lock()
2754 .insert(envelope.payload.askpass_id, askpass);
2755
2756 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2757 Ok(proto::AskPassResponse {
2758 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2759 })
2760 }
2761
2762 async fn handle_check_for_pushed_commits(
2763 this: Entity<Self>,
2764 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2765 mut cx: AsyncApp,
2766 ) -> Result<proto::CheckForPushedCommitsResponse> {
2767 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2768 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2769
2770 let branches = repository_handle
2771 .update(&mut cx, |repository_handle, _| {
2772 repository_handle.check_for_pushed_commits()
2773 })
2774 .await??;
2775 Ok(proto::CheckForPushedCommitsResponse {
2776 pushed_to: branches
2777 .into_iter()
2778 .map(|commit| commit.to_string())
2779 .collect(),
2780 })
2781 }
2782
2783 async fn handle_git_diff(
2784 this: Entity<Self>,
2785 envelope: TypedEnvelope<proto::GitDiff>,
2786 mut cx: AsyncApp,
2787 ) -> Result<proto::GitDiffResponse> {
2788 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2789 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2790 let diff_type = match envelope.payload.diff_type() {
2791 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2792 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2793 proto::git_diff::DiffType::MergeBase => {
2794 let base_ref = envelope
2795 .payload
2796 .merge_base_ref
2797 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2798 DiffType::MergeBase {
2799 base_ref: base_ref.into(),
2800 }
2801 }
2802 };
2803
2804 let mut diff = repository_handle
2805 .update(&mut cx, |repository_handle, cx| {
2806 repository_handle.diff(diff_type, cx)
2807 })
2808 .await??;
2809 const ONE_MB: usize = 1_000_000;
2810 if diff.len() > ONE_MB {
2811 diff = diff.chars().take(ONE_MB).collect()
2812 }
2813
2814 Ok(proto::GitDiffResponse { diff })
2815 }
2816
2817 async fn handle_tree_diff(
2818 this: Entity<Self>,
2819 request: TypedEnvelope<proto::GetTreeDiff>,
2820 mut cx: AsyncApp,
2821 ) -> Result<proto::GetTreeDiffResponse> {
2822 let repository_id = RepositoryId(request.payload.repository_id);
2823 let diff_type = if request.payload.is_merge {
2824 DiffTreeType::MergeBase {
2825 base: request.payload.base.into(),
2826 head: request.payload.head.into(),
2827 }
2828 } else {
2829 DiffTreeType::Since {
2830 base: request.payload.base.into(),
2831 head: request.payload.head.into(),
2832 }
2833 };
2834
2835 let diff = this
2836 .update(&mut cx, |this, cx| {
2837 let repository = this.repositories().get(&repository_id)?;
2838 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2839 })
2840 .context("missing repository")?
2841 .await??;
2842
2843 Ok(proto::GetTreeDiffResponse {
2844 entries: diff
2845 .entries
2846 .into_iter()
2847 .map(|(path, status)| proto::TreeDiffStatus {
2848 path: path.as_ref().to_proto(),
2849 status: match status {
2850 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2851 TreeDiffStatus::Modified { .. } => {
2852 proto::tree_diff_status::Status::Modified.into()
2853 }
2854 TreeDiffStatus::Deleted { .. } => {
2855 proto::tree_diff_status::Status::Deleted.into()
2856 }
2857 },
2858 oid: match status {
2859 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2860 Some(old.to_string())
2861 }
2862 TreeDiffStatus::Added => None,
2863 },
2864 })
2865 .collect(),
2866 })
2867 }
2868
2869 async fn handle_get_blob_content(
2870 this: Entity<Self>,
2871 request: TypedEnvelope<proto::GetBlobContent>,
2872 mut cx: AsyncApp,
2873 ) -> Result<proto::GetBlobContentResponse> {
2874 let oid = git::Oid::from_str(&request.payload.oid)?;
2875 let repository_id = RepositoryId(request.payload.repository_id);
2876 let content = this
2877 .update(&mut cx, |this, cx| {
2878 let repository = this.repositories().get(&repository_id)?;
2879 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2880 })
2881 .context("missing repository")?
2882 .await?;
2883 Ok(proto::GetBlobContentResponse { content })
2884 }
2885
2886 async fn handle_open_unstaged_diff(
2887 this: Entity<Self>,
2888 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2889 mut cx: AsyncApp,
2890 ) -> Result<proto::OpenUnstagedDiffResponse> {
2891 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2892 let diff = this
2893 .update(&mut cx, |this, cx| {
2894 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2895 Some(this.open_unstaged_diff(buffer, cx))
2896 })
2897 .context("missing buffer")?
2898 .await?;
2899 this.update(&mut cx, |this, _| {
2900 let shared_diffs = this
2901 .shared_diffs
2902 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2903 .or_default();
2904 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2905 });
2906 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2907 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2908 }
2909
2910 async fn handle_open_uncommitted_diff(
2911 this: Entity<Self>,
2912 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2913 mut cx: AsyncApp,
2914 ) -> Result<proto::OpenUncommittedDiffResponse> {
2915 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2916 let diff = this
2917 .update(&mut cx, |this, cx| {
2918 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2919 Some(this.open_uncommitted_diff(buffer, cx))
2920 })
2921 .context("missing buffer")?
2922 .await?;
2923 this.update(&mut cx, |this, _| {
2924 let shared_diffs = this
2925 .shared_diffs
2926 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2927 .or_default();
2928 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2929 });
2930 Ok(diff.read_with(&cx, |diff, cx| {
2931 use proto::open_uncommitted_diff_response::Mode;
2932
2933 let unstaged_diff = diff.secondary_diff();
2934 let index_snapshot = unstaged_diff.and_then(|diff| {
2935 let diff = diff.read(cx);
2936 diff.base_text_exists().then(|| diff.base_text(cx))
2937 });
2938
2939 let mode;
2940 let staged_text;
2941 let committed_text;
2942 if diff.base_text_exists() {
2943 let committed_snapshot = diff.base_text(cx);
2944 committed_text = Some(committed_snapshot.text());
2945 if let Some(index_text) = index_snapshot {
2946 if index_text.remote_id() == committed_snapshot.remote_id() {
2947 mode = Mode::IndexMatchesHead;
2948 staged_text = None;
2949 } else {
2950 mode = Mode::IndexAndHead;
2951 staged_text = Some(index_text.text());
2952 }
2953 } else {
2954 mode = Mode::IndexAndHead;
2955 staged_text = None;
2956 }
2957 } else {
2958 mode = Mode::IndexAndHead;
2959 committed_text = None;
2960 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2961 }
2962
2963 proto::OpenUncommittedDiffResponse {
2964 committed_text,
2965 staged_text,
2966 mode: mode.into(),
2967 }
2968 }))
2969 }
2970
2971 async fn handle_update_diff_bases(
2972 this: Entity<Self>,
2973 request: TypedEnvelope<proto::UpdateDiffBases>,
2974 mut cx: AsyncApp,
2975 ) -> Result<()> {
2976 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2977 this.update(&mut cx, |this, cx| {
2978 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2979 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2980 {
2981 let buffer = buffer.read(cx).text_snapshot();
2982 diff_state.update(cx, |diff_state, cx| {
2983 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2984 })
2985 }
2986 });
2987 Ok(())
2988 }
2989
2990 async fn handle_blame_buffer(
2991 this: Entity<Self>,
2992 envelope: TypedEnvelope<proto::BlameBuffer>,
2993 mut cx: AsyncApp,
2994 ) -> Result<proto::BlameBufferResponse> {
2995 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2996 let version = deserialize_version(&envelope.payload.version);
2997 let buffer = this.read_with(&cx, |this, cx| {
2998 this.buffer_store.read(cx).get_existing(buffer_id)
2999 })?;
3000 buffer
3001 .update(&mut cx, |buffer, _| {
3002 buffer.wait_for_version(version.clone())
3003 })
3004 .await?;
3005 let blame = this
3006 .update(&mut cx, |this, cx| {
3007 this.blame_buffer(&buffer, Some(version), cx)
3008 })
3009 .await?;
3010 Ok(serialize_blame_buffer_response(blame))
3011 }
3012
3013 async fn handle_get_permalink_to_line(
3014 this: Entity<Self>,
3015 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3016 mut cx: AsyncApp,
3017 ) -> Result<proto::GetPermalinkToLineResponse> {
3018 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3019 // let version = deserialize_version(&envelope.payload.version);
3020 let selection = {
3021 let proto_selection = envelope
3022 .payload
3023 .selection
3024 .context("no selection to get permalink for defined")?;
3025 proto_selection.start as u32..proto_selection.end as u32
3026 };
3027 let buffer = this.read_with(&cx, |this, cx| {
3028 this.buffer_store.read(cx).get_existing(buffer_id)
3029 })?;
3030 let permalink = this
3031 .update(&mut cx, |this, cx| {
3032 this.get_permalink_to_line(&buffer, selection, cx)
3033 })
3034 .await?;
3035 Ok(proto::GetPermalinkToLineResponse {
3036 permalink: permalink.to_string(),
3037 })
3038 }
3039
3040 fn repository_for_request(
3041 this: &Entity<Self>,
3042 id: RepositoryId,
3043 cx: &mut AsyncApp,
3044 ) -> Result<Entity<Repository>> {
3045 this.read_with(cx, |this, _| {
3046 this.repositories
3047 .get(&id)
3048 .context("missing repository handle")
3049 .cloned()
3050 })
3051 }
3052
3053 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3054 self.repositories
3055 .iter()
3056 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3057 .collect()
3058 }
3059
3060 fn process_updated_entries(
3061 &self,
3062 worktree: &Entity<Worktree>,
3063 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3064 cx: &mut App,
3065 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3066 let path_style = worktree.read(cx).path_style();
3067 let mut repo_paths = self
3068 .repositories
3069 .values()
3070 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3071 .collect::<Vec<_>>();
3072 let mut entries: Vec<_> = updated_entries
3073 .iter()
3074 .map(|(path, _, _)| path.clone())
3075 .collect();
3076 entries.sort();
3077 let worktree = worktree.read(cx);
3078
3079 let entries = entries
3080 .into_iter()
3081 .map(|path| worktree.absolutize(&path))
3082 .collect::<Arc<[_]>>();
3083
3084 let executor = cx.background_executor().clone();
3085 cx.background_executor().spawn(async move {
3086 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3087 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3088 let mut tasks = FuturesOrdered::new();
3089 for (repo_path, repo) in repo_paths.into_iter().rev() {
3090 let entries = entries.clone();
3091 let task = executor.spawn(async move {
3092 // Find all repository paths that belong to this repo
3093 let mut ix = entries.partition_point(|path| path < &*repo_path);
3094 if ix == entries.len() {
3095 return None;
3096 };
3097
3098 let mut paths = Vec::new();
3099 // All paths prefixed by a given repo will constitute a continuous range.
3100 while let Some(path) = entries.get(ix)
3101 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3102 &repo_path, path, path_style,
3103 )
3104 {
3105 paths.push((repo_path, ix));
3106 ix += 1;
3107 }
3108 if paths.is_empty() {
3109 None
3110 } else {
3111 Some((repo, paths))
3112 }
3113 });
3114 tasks.push_back(task);
3115 }
3116
3117 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3118 let mut path_was_used = vec![false; entries.len()];
3119 let tasks = tasks.collect::<Vec<_>>().await;
3120 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3121 // We always want to assign a path to it's innermost repository.
3122 for t in tasks {
3123 let Some((repo, paths)) = t else {
3124 continue;
3125 };
3126 let entry = paths_by_git_repo.entry(repo).or_default();
3127 for (repo_path, ix) in paths {
3128 if path_was_used[ix] {
3129 continue;
3130 }
3131 path_was_used[ix] = true;
3132 entry.push(repo_path);
3133 }
3134 }
3135
3136 paths_by_git_repo
3137 })
3138 }
3139}
3140
3141impl BufferGitState {
3142 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3143 Self {
3144 unstaged_diff: Default::default(),
3145 uncommitted_diff: Default::default(),
3146 oid_diffs: Default::default(),
3147 recalculate_diff_task: Default::default(),
3148 language: Default::default(),
3149 language_registry: Default::default(),
3150 recalculating_tx: postage::watch::channel_with(false).0,
3151 hunk_staging_operation_count: 0,
3152 hunk_staging_operation_count_as_of_write: 0,
3153 head_text: Default::default(),
3154 index_text: Default::default(),
3155 oid_texts: Default::default(),
3156 head_changed: Default::default(),
3157 index_changed: Default::default(),
3158 language_changed: Default::default(),
3159 conflict_updated_futures: Default::default(),
3160 conflict_set: Default::default(),
3161 reparse_conflict_markers_task: Default::default(),
3162 }
3163 }
3164
3165 #[ztracing::instrument(skip_all)]
3166 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3167 self.language = buffer.read(cx).language().cloned();
3168 self.language_changed = true;
3169 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3170 }
3171
3172 fn reparse_conflict_markers(
3173 &mut self,
3174 buffer: text::BufferSnapshot,
3175 cx: &mut Context<Self>,
3176 ) -> oneshot::Receiver<()> {
3177 let (tx, rx) = oneshot::channel();
3178
3179 let Some(conflict_set) = self
3180 .conflict_set
3181 .as_ref()
3182 .and_then(|conflict_set| conflict_set.upgrade())
3183 else {
3184 return rx;
3185 };
3186
3187 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3188 if conflict_set.has_conflict {
3189 Some(conflict_set.snapshot())
3190 } else {
3191 None
3192 }
3193 });
3194
3195 if let Some(old_snapshot) = old_snapshot {
3196 self.conflict_updated_futures.push(tx);
3197 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3198 let (snapshot, changed_range) = cx
3199 .background_spawn(async move {
3200 let new_snapshot = ConflictSet::parse(&buffer);
3201 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3202 (new_snapshot, changed_range)
3203 })
3204 .await;
3205 this.update(cx, |this, cx| {
3206 if let Some(conflict_set) = &this.conflict_set {
3207 conflict_set
3208 .update(cx, |conflict_set, cx| {
3209 conflict_set.set_snapshot(snapshot, changed_range, cx);
3210 })
3211 .ok();
3212 }
3213 let futures = std::mem::take(&mut this.conflict_updated_futures);
3214 for tx in futures {
3215 tx.send(()).ok();
3216 }
3217 })
3218 }))
3219 }
3220
3221 rx
3222 }
3223
3224 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3225 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3226 }
3227
3228 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3229 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3230 }
3231
3232 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3233 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3234 }
3235
3236 fn handle_base_texts_updated(
3237 &mut self,
3238 buffer: text::BufferSnapshot,
3239 message: proto::UpdateDiffBases,
3240 cx: &mut Context<Self>,
3241 ) {
3242 use proto::update_diff_bases::Mode;
3243
3244 let Some(mode) = Mode::from_i32(message.mode) else {
3245 return;
3246 };
3247
3248 let diff_bases_change = match mode {
3249 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3250 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3251 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3252 Mode::IndexAndHead => DiffBasesChange::SetEach {
3253 index: message.staged_text,
3254 head: message.committed_text,
3255 },
3256 };
3257
3258 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3259 }
3260
3261 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3262 if *self.recalculating_tx.borrow() {
3263 let mut rx = self.recalculating_tx.subscribe();
3264 Some(async move {
3265 loop {
3266 let is_recalculating = rx.recv().await;
3267 if is_recalculating != Some(true) {
3268 break;
3269 }
3270 }
3271 })
3272 } else {
3273 None
3274 }
3275 }
3276
3277 fn diff_bases_changed(
3278 &mut self,
3279 buffer: text::BufferSnapshot,
3280 diff_bases_change: Option<DiffBasesChange>,
3281 cx: &mut Context<Self>,
3282 ) {
3283 match diff_bases_change {
3284 Some(DiffBasesChange::SetIndex(index)) => {
3285 self.index_text = index.map(|mut index| {
3286 text::LineEnding::normalize(&mut index);
3287 Arc::from(index.as_str())
3288 });
3289 self.index_changed = true;
3290 }
3291 Some(DiffBasesChange::SetHead(head)) => {
3292 self.head_text = head.map(|mut head| {
3293 text::LineEnding::normalize(&mut head);
3294 Arc::from(head.as_str())
3295 });
3296 self.head_changed = true;
3297 }
3298 Some(DiffBasesChange::SetBoth(text)) => {
3299 let text = text.map(|mut text| {
3300 text::LineEnding::normalize(&mut text);
3301 Arc::from(text.as_str())
3302 });
3303 self.head_text = text.clone();
3304 self.index_text = text;
3305 self.head_changed = true;
3306 self.index_changed = true;
3307 }
3308 Some(DiffBasesChange::SetEach { index, head }) => {
3309 self.index_text = index.map(|mut index| {
3310 text::LineEnding::normalize(&mut index);
3311 Arc::from(index.as_str())
3312 });
3313 self.index_changed = true;
3314 self.head_text = head.map(|mut head| {
3315 text::LineEnding::normalize(&mut head);
3316 Arc::from(head.as_str())
3317 });
3318 self.head_changed = true;
3319 }
3320 None => {}
3321 }
3322
3323 self.recalculate_diffs(buffer, cx)
3324 }
3325
3326 #[ztracing::instrument(skip_all)]
3327 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3328 *self.recalculating_tx.borrow_mut() = true;
3329
3330 let language = self.language.clone();
3331 let language_registry = self.language_registry.clone();
3332 let unstaged_diff = self.unstaged_diff();
3333 let uncommitted_diff = self.uncommitted_diff();
3334 let head = self.head_text.clone();
3335 let index = self.index_text.clone();
3336 let index_changed = self.index_changed;
3337 let head_changed = self.head_changed;
3338 let language_changed = self.language_changed;
3339 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3340 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3341 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3342 (None, None) => true,
3343 _ => false,
3344 };
3345
3346 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3347 .oid_diffs
3348 .iter()
3349 .filter_map(|(oid, weak)| {
3350 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3351 weak.upgrade().map(|diff| (*oid, diff, base_text))
3352 })
3353 .collect();
3354
3355 self.oid_diffs.retain(|oid, weak| {
3356 let alive = weak.upgrade().is_some();
3357 if !alive {
3358 if let Some(oid) = oid {
3359 self.oid_texts.remove(oid);
3360 }
3361 }
3362 alive
3363 });
3364 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3365 log::debug!(
3366 "start recalculating diffs for buffer {}",
3367 buffer.remote_id()
3368 );
3369
3370 let mut new_unstaged_diff = None;
3371 if let Some(unstaged_diff) = &unstaged_diff {
3372 new_unstaged_diff = Some(
3373 cx.update(|cx| {
3374 unstaged_diff.read(cx).update_diff(
3375 buffer.clone(),
3376 index,
3377 index_changed.then_some(false),
3378 language.clone(),
3379 cx,
3380 )
3381 })
3382 .await,
3383 );
3384 }
3385
3386 // Dropping BufferDiff can be expensive, so yield back to the event loop
3387 // for a bit
3388 yield_now().await;
3389
3390 let mut new_uncommitted_diff = None;
3391 if let Some(uncommitted_diff) = &uncommitted_diff {
3392 new_uncommitted_diff = if index_matches_head {
3393 new_unstaged_diff.clone()
3394 } else {
3395 Some(
3396 cx.update(|cx| {
3397 uncommitted_diff.read(cx).update_diff(
3398 buffer.clone(),
3399 head,
3400 head_changed.then_some(true),
3401 language.clone(),
3402 cx,
3403 )
3404 })
3405 .await,
3406 )
3407 }
3408 }
3409
3410 // Dropping BufferDiff can be expensive, so yield back to the event loop
3411 // for a bit
3412 yield_now().await;
3413
3414 let cancel = this.update(cx, |this, _| {
3415 // This checks whether all pending stage/unstage operations
3416 // have quiesced (i.e. both the corresponding write and the
3417 // read of that write have completed). If not, then we cancel
3418 // this recalculation attempt to avoid invalidating pending
3419 // state too quickly; another recalculation will come along
3420 // later and clear the pending state once the state of the index has settled.
3421 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3422 *this.recalculating_tx.borrow_mut() = false;
3423 true
3424 } else {
3425 false
3426 }
3427 })?;
3428 if cancel {
3429 log::debug!(
3430 concat!(
3431 "aborting recalculating diffs for buffer {}",
3432 "due to subsequent hunk operations",
3433 ),
3434 buffer.remote_id()
3435 );
3436 return Ok(());
3437 }
3438
3439 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3440 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3441 {
3442 let task = unstaged_diff.update(cx, |diff, cx| {
3443 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3444 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3445 // view multibuffers.
3446 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3447 });
3448 Some(task.await)
3449 } else {
3450 None
3451 };
3452
3453 yield_now().await;
3454
3455 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3456 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3457 {
3458 uncommitted_diff
3459 .update(cx, |diff, cx| {
3460 if language_changed {
3461 diff.language_changed(language.clone(), language_registry, cx);
3462 }
3463 diff.set_snapshot_with_secondary(
3464 new_uncommitted_diff,
3465 &buffer,
3466 unstaged_changed_range.flatten(),
3467 true,
3468 cx,
3469 )
3470 })
3471 .await;
3472 }
3473
3474 yield_now().await;
3475
3476 for (oid, oid_diff, base_text) in oid_diffs {
3477 let new_oid_diff = cx
3478 .update(|cx| {
3479 oid_diff.read(cx).update_diff(
3480 buffer.clone(),
3481 base_text,
3482 None,
3483 language.clone(),
3484 cx,
3485 )
3486 })
3487 .await;
3488
3489 oid_diff
3490 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3491 .await;
3492
3493 log::debug!(
3494 "finished recalculating oid diff for buffer {} oid {:?}",
3495 buffer.remote_id(),
3496 oid
3497 );
3498
3499 yield_now().await;
3500 }
3501
3502 log::debug!(
3503 "finished recalculating diffs for buffer {}",
3504 buffer.remote_id()
3505 );
3506
3507 if let Some(this) = this.upgrade() {
3508 this.update(cx, |this, _| {
3509 this.index_changed = false;
3510 this.head_changed = false;
3511 this.language_changed = false;
3512 *this.recalculating_tx.borrow_mut() = false;
3513 });
3514 }
3515
3516 Ok(())
3517 }));
3518 }
3519}
3520
3521fn make_remote_delegate(
3522 this: Entity<GitStore>,
3523 project_id: u64,
3524 repository_id: RepositoryId,
3525 askpass_id: u64,
3526 cx: &mut AsyncApp,
3527) -> AskPassDelegate {
3528 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3529 this.update(cx, |this, cx| {
3530 let Some((client, _)) = this.downstream_client() else {
3531 return;
3532 };
3533 let response = client.request(proto::AskPassRequest {
3534 project_id,
3535 repository_id: repository_id.to_proto(),
3536 askpass_id,
3537 prompt,
3538 });
3539 cx.spawn(async move |_, _| {
3540 let mut response = response.await?.response;
3541 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3542 .ok();
3543 response.zeroize();
3544 anyhow::Ok(())
3545 })
3546 .detach_and_log_err(cx);
3547 });
3548 })
3549}
3550
3551impl RepositoryId {
3552 pub fn to_proto(self) -> u64 {
3553 self.0
3554 }
3555
3556 pub fn from_proto(id: u64) -> Self {
3557 RepositoryId(id)
3558 }
3559}
3560
3561impl RepositorySnapshot {
3562 fn empty(
3563 id: RepositoryId,
3564 work_directory_abs_path: Arc<Path>,
3565 original_repo_abs_path: Option<Arc<Path>>,
3566 path_style: PathStyle,
3567 ) -> Self {
3568 Self {
3569 id,
3570 statuses_by_path: Default::default(),
3571 original_repo_abs_path: original_repo_abs_path
3572 .unwrap_or_else(|| work_directory_abs_path.clone()),
3573 work_directory_abs_path,
3574 branch: None,
3575 head_commit: None,
3576 scan_id: 0,
3577 merge: Default::default(),
3578 remote_origin_url: None,
3579 remote_upstream_url: None,
3580 stash_entries: Default::default(),
3581 linked_worktrees: Arc::from([]),
3582 path_style,
3583 }
3584 }
3585
3586 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3587 proto::UpdateRepository {
3588 branch_summary: self.branch.as_ref().map(branch_to_proto),
3589 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3590 updated_statuses: self
3591 .statuses_by_path
3592 .iter()
3593 .map(|entry| entry.to_proto())
3594 .collect(),
3595 removed_statuses: Default::default(),
3596 current_merge_conflicts: self
3597 .merge
3598 .merge_heads_by_conflicted_path
3599 .iter()
3600 .map(|(repo_path, _)| repo_path.to_proto())
3601 .collect(),
3602 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3603 project_id,
3604 id: self.id.to_proto(),
3605 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3606 entry_ids: vec![self.id.to_proto()],
3607 scan_id: self.scan_id,
3608 is_last_update: true,
3609 stash_entries: self
3610 .stash_entries
3611 .entries
3612 .iter()
3613 .map(stash_to_proto)
3614 .collect(),
3615 remote_upstream_url: self.remote_upstream_url.clone(),
3616 remote_origin_url: self.remote_origin_url.clone(),
3617 original_repo_abs_path: Some(
3618 self.original_repo_abs_path.to_string_lossy().into_owned(),
3619 ),
3620 linked_worktrees: self
3621 .linked_worktrees
3622 .iter()
3623 .map(worktree_to_proto)
3624 .collect(),
3625 }
3626 }
3627
3628 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3629 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3630 let mut removed_statuses: Vec<String> = Vec::new();
3631
3632 let mut new_statuses = self.statuses_by_path.iter().peekable();
3633 let mut old_statuses = old.statuses_by_path.iter().peekable();
3634
3635 let mut current_new_entry = new_statuses.next();
3636 let mut current_old_entry = old_statuses.next();
3637 loop {
3638 match (current_new_entry, current_old_entry) {
3639 (Some(new_entry), Some(old_entry)) => {
3640 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3641 Ordering::Less => {
3642 updated_statuses.push(new_entry.to_proto());
3643 current_new_entry = new_statuses.next();
3644 }
3645 Ordering::Equal => {
3646 if new_entry.status != old_entry.status
3647 || new_entry.diff_stat != old_entry.diff_stat
3648 {
3649 updated_statuses.push(new_entry.to_proto());
3650 }
3651 current_old_entry = old_statuses.next();
3652 current_new_entry = new_statuses.next();
3653 }
3654 Ordering::Greater => {
3655 removed_statuses.push(old_entry.repo_path.to_proto());
3656 current_old_entry = old_statuses.next();
3657 }
3658 }
3659 }
3660 (None, Some(old_entry)) => {
3661 removed_statuses.push(old_entry.repo_path.to_proto());
3662 current_old_entry = old_statuses.next();
3663 }
3664 (Some(new_entry), None) => {
3665 updated_statuses.push(new_entry.to_proto());
3666 current_new_entry = new_statuses.next();
3667 }
3668 (None, None) => break,
3669 }
3670 }
3671
3672 proto::UpdateRepository {
3673 branch_summary: self.branch.as_ref().map(branch_to_proto),
3674 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3675 updated_statuses,
3676 removed_statuses,
3677 current_merge_conflicts: self
3678 .merge
3679 .merge_heads_by_conflicted_path
3680 .iter()
3681 .map(|(path, _)| path.to_proto())
3682 .collect(),
3683 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3684 project_id,
3685 id: self.id.to_proto(),
3686 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3687 entry_ids: vec![],
3688 scan_id: self.scan_id,
3689 is_last_update: true,
3690 stash_entries: self
3691 .stash_entries
3692 .entries
3693 .iter()
3694 .map(stash_to_proto)
3695 .collect(),
3696 remote_upstream_url: self.remote_upstream_url.clone(),
3697 remote_origin_url: self.remote_origin_url.clone(),
3698 original_repo_abs_path: Some(
3699 self.original_repo_abs_path.to_string_lossy().into_owned(),
3700 ),
3701 linked_worktrees: self
3702 .linked_worktrees
3703 .iter()
3704 .map(worktree_to_proto)
3705 .collect(),
3706 }
3707 }
3708
3709 /// The main worktree is the original checkout that other worktrees were
3710 /// created from.
3711 ///
3712 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3713 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3714 pub fn is_main_worktree(&self) -> bool {
3715 self.work_directory_abs_path == self.original_repo_abs_path
3716 }
3717
3718 /// Returns true if this repository is a linked worktree, that is, one that
3719 /// was created from another worktree.
3720 ///
3721 /// This is by definition the opposite of [`Self::is_main_worktree`].
3722 pub fn is_linked_worktree(&self) -> bool {
3723 !self.is_main_worktree()
3724 }
3725
3726 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3727 &self.linked_worktrees
3728 }
3729
3730 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3731 self.statuses_by_path.iter().cloned()
3732 }
3733
3734 pub fn status_summary(&self) -> GitSummary {
3735 self.statuses_by_path.summary().item_summary
3736 }
3737
3738 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3739 self.statuses_by_path
3740 .get(&PathKey(path.as_ref().clone()), ())
3741 .cloned()
3742 }
3743
3744 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3745 self.statuses_by_path
3746 .get(&PathKey(path.as_ref().clone()), ())
3747 .and_then(|entry| entry.diff_stat)
3748 }
3749
3750 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3751 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3752 }
3753
3754 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3755 self.path_style
3756 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3757 .unwrap()
3758 .into()
3759 }
3760
3761 #[inline]
3762 fn abs_path_to_repo_path_inner(
3763 work_directory_abs_path: &Path,
3764 abs_path: &Path,
3765 path_style: PathStyle,
3766 ) -> Option<RepoPath> {
3767 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3768 Some(RepoPath::from_rel_path(&rel_path))
3769 }
3770
3771 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3772 self.merge
3773 .merge_heads_by_conflicted_path
3774 .contains_key(repo_path)
3775 }
3776
3777 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3778 let had_conflict_on_last_merge_head_change = self
3779 .merge
3780 .merge_heads_by_conflicted_path
3781 .contains_key(repo_path);
3782 let has_conflict_currently = self
3783 .status_for_path(repo_path)
3784 .is_some_and(|entry| entry.status.is_conflicted());
3785 had_conflict_on_last_merge_head_change || has_conflict_currently
3786 }
3787
3788 /// This is the name that will be displayed in the repository selector for this repository.
3789 pub fn display_name(&self) -> SharedString {
3790 self.work_directory_abs_path
3791 .file_name()
3792 .unwrap_or_default()
3793 .to_string_lossy()
3794 .to_string()
3795 .into()
3796 }
3797}
3798
3799pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3800 proto::StashEntry {
3801 oid: entry.oid.as_bytes().to_vec(),
3802 message: entry.message.clone(),
3803 branch: entry.branch.clone(),
3804 index: entry.index as u64,
3805 timestamp: entry.timestamp,
3806 }
3807}
3808
3809pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3810 Ok(StashEntry {
3811 oid: Oid::from_bytes(&entry.oid)?,
3812 message: entry.message.clone(),
3813 index: entry.index as usize,
3814 branch: entry.branch.clone(),
3815 timestamp: entry.timestamp,
3816 })
3817}
3818
3819impl MergeDetails {
3820 async fn update(
3821 &mut self,
3822 backend: &Arc<dyn GitRepository>,
3823 current_conflicted_paths: Vec<RepoPath>,
3824 ) -> Result<bool> {
3825 log::debug!("load merge details");
3826 self.message = backend.merge_message().await.map(SharedString::from);
3827 let heads = backend
3828 .revparse_batch(vec![
3829 "MERGE_HEAD".into(),
3830 "CHERRY_PICK_HEAD".into(),
3831 "REBASE_HEAD".into(),
3832 "REVERT_HEAD".into(),
3833 "APPLY_HEAD".into(),
3834 ])
3835 .await
3836 .log_err()
3837 .unwrap_or_default()
3838 .into_iter()
3839 .map(|opt| opt.map(SharedString::from))
3840 .collect::<Vec<_>>();
3841
3842 let mut conflicts_changed = false;
3843
3844 // Record the merge state for newly conflicted paths
3845 for path in ¤t_conflicted_paths {
3846 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3847 conflicts_changed = true;
3848 self.merge_heads_by_conflicted_path
3849 .insert(path.clone(), heads.clone());
3850 }
3851 }
3852
3853 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3854 self.merge_heads_by_conflicted_path
3855 .retain(|path, old_merge_heads| {
3856 let keep = current_conflicted_paths.contains(path)
3857 || (old_merge_heads == &heads
3858 && old_merge_heads.iter().any(|head| head.is_some()));
3859 if !keep {
3860 conflicts_changed = true;
3861 }
3862 keep
3863 });
3864
3865 Ok(conflicts_changed)
3866 }
3867}
3868
3869impl Repository {
3870 pub fn is_trusted(&self) -> bool {
3871 match self.repository_state.peek() {
3872 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3873 _ => false,
3874 }
3875 }
3876
3877 pub fn snapshot(&self) -> RepositorySnapshot {
3878 self.snapshot.clone()
3879 }
3880
3881 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3882 self.pending_ops.iter().cloned()
3883 }
3884
3885 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3886 self.pending_ops.summary().clone()
3887 }
3888
3889 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3890 self.pending_ops
3891 .get(&PathKey(path.as_ref().clone()), ())
3892 .cloned()
3893 }
3894
3895 fn local(
3896 id: RepositoryId,
3897 work_directory_abs_path: Arc<Path>,
3898 original_repo_abs_path: Arc<Path>,
3899 dot_git_abs_path: Arc<Path>,
3900 project_environment: WeakEntity<ProjectEnvironment>,
3901 fs: Arc<dyn Fs>,
3902 is_trusted: bool,
3903 git_store: WeakEntity<GitStore>,
3904 cx: &mut Context<Self>,
3905 ) -> Self {
3906 let snapshot = RepositorySnapshot::empty(
3907 id,
3908 work_directory_abs_path.clone(),
3909 Some(original_repo_abs_path),
3910 PathStyle::local(),
3911 );
3912 let state = cx
3913 .spawn(async move |_, cx| {
3914 LocalRepositoryState::new(
3915 work_directory_abs_path,
3916 dot_git_abs_path,
3917 project_environment,
3918 fs,
3919 is_trusted,
3920 cx,
3921 )
3922 .await
3923 .map_err(|err| err.to_string())
3924 })
3925 .shared();
3926 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3927 let state = cx
3928 .spawn(async move |_, _| {
3929 let state = state.await?;
3930 Ok(RepositoryState::Local(state))
3931 })
3932 .shared();
3933
3934 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3935 RepositoryEvent::BranchChanged => {
3936 if this.scan_id > 1 {
3937 this.initial_graph_data.clear();
3938 }
3939 }
3940 _ => {}
3941 })
3942 .detach();
3943
3944 Repository {
3945 this: cx.weak_entity(),
3946 git_store,
3947 snapshot,
3948 pending_ops: Default::default(),
3949 repository_state: state,
3950 commit_message_buffer: None,
3951 askpass_delegates: Default::default(),
3952 paths_needing_status_update: Default::default(),
3953 latest_askpass_id: 0,
3954 job_sender,
3955 job_id: 0,
3956 active_jobs: Default::default(),
3957 initial_graph_data: Default::default(),
3958 commit_data: Default::default(),
3959 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3960 }
3961 }
3962
3963 fn remote(
3964 id: RepositoryId,
3965 work_directory_abs_path: Arc<Path>,
3966 original_repo_abs_path: Option<Arc<Path>>,
3967 path_style: PathStyle,
3968 project_id: ProjectId,
3969 client: AnyProtoClient,
3970 git_store: WeakEntity<GitStore>,
3971 cx: &mut Context<Self>,
3972 ) -> Self {
3973 let snapshot = RepositorySnapshot::empty(
3974 id,
3975 work_directory_abs_path,
3976 original_repo_abs_path,
3977 path_style,
3978 );
3979 let repository_state = RemoteRepositoryState { project_id, client };
3980 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3981 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3982 Self {
3983 this: cx.weak_entity(),
3984 snapshot,
3985 commit_message_buffer: None,
3986 git_store,
3987 pending_ops: Default::default(),
3988 paths_needing_status_update: Default::default(),
3989 job_sender,
3990 repository_state,
3991 askpass_delegates: Default::default(),
3992 latest_askpass_id: 0,
3993 active_jobs: Default::default(),
3994 job_id: 0,
3995 initial_graph_data: Default::default(),
3996 commit_data: Default::default(),
3997 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3998 }
3999 }
4000
4001 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4002 self.git_store.upgrade()
4003 }
4004
4005 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4006 let this = cx.weak_entity();
4007 let git_store = self.git_store.clone();
4008 let _ = self.send_keyed_job(
4009 Some(GitJobKey::ReloadBufferDiffBases),
4010 None,
4011 |state, mut cx| async move {
4012 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4013 log::error!("tried to recompute diffs for a non-local repository");
4014 return Ok(());
4015 };
4016
4017 let Some(this) = this.upgrade() else {
4018 return Ok(());
4019 };
4020
4021 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4022 git_store.update(cx, |git_store, cx| {
4023 git_store
4024 .diffs
4025 .iter()
4026 .filter_map(|(buffer_id, diff_state)| {
4027 let buffer_store = git_store.buffer_store.read(cx);
4028 let buffer = buffer_store.get(*buffer_id)?;
4029 let file = File::from_dyn(buffer.read(cx).file())?;
4030 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4031 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4032 log::debug!(
4033 "start reload diff bases for repo path {}",
4034 repo_path.as_unix_str()
4035 );
4036 diff_state.update(cx, |diff_state, _| {
4037 let has_unstaged_diff = diff_state
4038 .unstaged_diff
4039 .as_ref()
4040 .is_some_and(|diff| diff.is_upgradable());
4041 let has_uncommitted_diff = diff_state
4042 .uncommitted_diff
4043 .as_ref()
4044 .is_some_and(|set| set.is_upgradable());
4045
4046 Some((
4047 buffer,
4048 repo_path,
4049 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4050 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4051 ))
4052 })
4053 })
4054 .collect::<Vec<_>>()
4055 })
4056 })?;
4057
4058 let buffer_diff_base_changes = cx
4059 .background_spawn(async move {
4060 let mut changes = Vec::new();
4061 for (buffer, repo_path, current_index_text, current_head_text) in
4062 &repo_diff_state_updates
4063 {
4064 let index_text = if current_index_text.is_some() {
4065 backend.load_index_text(repo_path.clone()).await
4066 } else {
4067 None
4068 };
4069 let head_text = if current_head_text.is_some() {
4070 backend.load_committed_text(repo_path.clone()).await
4071 } else {
4072 None
4073 };
4074
4075 let change =
4076 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4077 (Some(current_index), Some(current_head)) => {
4078 let index_changed =
4079 index_text.as_deref() != current_index.as_deref();
4080 let head_changed =
4081 head_text.as_deref() != current_head.as_deref();
4082 if index_changed && head_changed {
4083 if index_text == head_text {
4084 Some(DiffBasesChange::SetBoth(head_text))
4085 } else {
4086 Some(DiffBasesChange::SetEach {
4087 index: index_text,
4088 head: head_text,
4089 })
4090 }
4091 } else if index_changed {
4092 Some(DiffBasesChange::SetIndex(index_text))
4093 } else if head_changed {
4094 Some(DiffBasesChange::SetHead(head_text))
4095 } else {
4096 None
4097 }
4098 }
4099 (Some(current_index), None) => {
4100 let index_changed =
4101 index_text.as_deref() != current_index.as_deref();
4102 index_changed
4103 .then_some(DiffBasesChange::SetIndex(index_text))
4104 }
4105 (None, Some(current_head)) => {
4106 let head_changed =
4107 head_text.as_deref() != current_head.as_deref();
4108 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4109 }
4110 (None, None) => None,
4111 };
4112
4113 changes.push((buffer.clone(), change))
4114 }
4115 changes
4116 })
4117 .await;
4118
4119 git_store.update(&mut cx, |git_store, cx| {
4120 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4121 let buffer_snapshot = buffer.read(cx).text_snapshot();
4122 let buffer_id = buffer_snapshot.remote_id();
4123 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4124 continue;
4125 };
4126
4127 let downstream_client = git_store.downstream_client();
4128 diff_state.update(cx, |diff_state, cx| {
4129 use proto::update_diff_bases::Mode;
4130
4131 if let Some((diff_bases_change, (client, project_id))) =
4132 diff_bases_change.clone().zip(downstream_client)
4133 {
4134 let (staged_text, committed_text, mode) = match diff_bases_change {
4135 DiffBasesChange::SetIndex(index) => {
4136 (index, None, Mode::IndexOnly)
4137 }
4138 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4139 DiffBasesChange::SetEach { index, head } => {
4140 (index, head, Mode::IndexAndHead)
4141 }
4142 DiffBasesChange::SetBoth(text) => {
4143 (None, text, Mode::IndexMatchesHead)
4144 }
4145 };
4146 client
4147 .send(proto::UpdateDiffBases {
4148 project_id: project_id.to_proto(),
4149 buffer_id: buffer_id.to_proto(),
4150 staged_text,
4151 committed_text,
4152 mode: mode as i32,
4153 })
4154 .log_err();
4155 }
4156
4157 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4158 });
4159 }
4160 })
4161 },
4162 );
4163 }
4164
4165 pub fn send_job<F, Fut, R>(
4166 &mut self,
4167 status: Option<SharedString>,
4168 job: F,
4169 ) -> oneshot::Receiver<R>
4170 where
4171 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4172 Fut: Future<Output = R> + 'static,
4173 R: Send + 'static,
4174 {
4175 self.send_keyed_job(None, status, job)
4176 }
4177
4178 fn send_keyed_job<F, Fut, R>(
4179 &mut self,
4180 key: Option<GitJobKey>,
4181 status: Option<SharedString>,
4182 job: F,
4183 ) -> oneshot::Receiver<R>
4184 where
4185 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4186 Fut: Future<Output = R> + 'static,
4187 R: Send + 'static,
4188 {
4189 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4190 let job_id = post_inc(&mut self.job_id);
4191 let this = self.this.clone();
4192 self.job_sender
4193 .unbounded_send(GitJob {
4194 key,
4195 job: Box::new(move |state, cx: &mut AsyncApp| {
4196 let job = job(state, cx.clone());
4197 cx.spawn(async move |cx| {
4198 if let Some(s) = status.clone() {
4199 this.update(cx, |this, cx| {
4200 this.active_jobs.insert(
4201 job_id,
4202 JobInfo {
4203 start: Instant::now(),
4204 message: s.clone(),
4205 },
4206 );
4207
4208 cx.notify();
4209 })
4210 .ok();
4211 }
4212 let result = job.await;
4213
4214 this.update(cx, |this, cx| {
4215 this.active_jobs.remove(&job_id);
4216 cx.notify();
4217 })
4218 .ok();
4219
4220 result_tx.send(result).ok();
4221 })
4222 }),
4223 })
4224 .ok();
4225 result_rx
4226 }
4227
4228 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4229 let Some(git_store) = self.git_store.upgrade() else {
4230 return;
4231 };
4232 let entity = cx.entity();
4233 git_store.update(cx, |git_store, cx| {
4234 let Some((&id, _)) = git_store
4235 .repositories
4236 .iter()
4237 .find(|(_, handle)| *handle == &entity)
4238 else {
4239 return;
4240 };
4241 git_store.active_repo_id = Some(id);
4242 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4243 });
4244 }
4245
4246 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4247 self.snapshot.status()
4248 }
4249
4250 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4251 self.snapshot.diff_stat_for_path(path)
4252 }
4253
4254 pub fn cached_stash(&self) -> GitStash {
4255 self.snapshot.stash_entries.clone()
4256 }
4257
4258 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4259 let git_store = self.git_store.upgrade()?;
4260 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4261 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4262 let abs_path = SanitizedPath::new(&abs_path);
4263 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4264 Some(ProjectPath {
4265 worktree_id: worktree.read(cx).id(),
4266 path: relative_path,
4267 })
4268 }
4269
4270 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4271 let git_store = self.git_store.upgrade()?;
4272 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4273 let abs_path = worktree_store.absolutize(path, cx)?;
4274 self.snapshot.abs_path_to_repo_path(&abs_path)
4275 }
4276
4277 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4278 other
4279 .read(cx)
4280 .snapshot
4281 .work_directory_abs_path
4282 .starts_with(&self.snapshot.work_directory_abs_path)
4283 }
4284
4285 pub fn open_commit_buffer(
4286 &mut self,
4287 languages: Option<Arc<LanguageRegistry>>,
4288 buffer_store: Entity<BufferStore>,
4289 cx: &mut Context<Self>,
4290 ) -> Task<Result<Entity<Buffer>>> {
4291 let id = self.id;
4292 if let Some(buffer) = self.commit_message_buffer.clone() {
4293 return Task::ready(Ok(buffer));
4294 }
4295 let this = cx.weak_entity();
4296
4297 let rx = self.send_job(None, move |state, mut cx| async move {
4298 let Some(this) = this.upgrade() else {
4299 bail!("git store was dropped");
4300 };
4301 match state {
4302 RepositoryState::Local(..) => {
4303 this.update(&mut cx, |_, cx| {
4304 Self::open_local_commit_buffer(languages, buffer_store, cx)
4305 })
4306 .await
4307 }
4308 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4309 let request = client.request(proto::OpenCommitMessageBuffer {
4310 project_id: project_id.0,
4311 repository_id: id.to_proto(),
4312 });
4313 let response = request.await.context("requesting to open commit buffer")?;
4314 let buffer_id = BufferId::new(response.buffer_id)?;
4315 let buffer = buffer_store
4316 .update(&mut cx, |buffer_store, cx| {
4317 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4318 })
4319 .await?;
4320 if let Some(language_registry) = languages {
4321 let git_commit_language =
4322 language_registry.language_for_name("Git Commit").await?;
4323 buffer.update(&mut cx, |buffer, cx| {
4324 buffer.set_language(Some(git_commit_language), cx);
4325 });
4326 }
4327 this.update(&mut cx, |this, _| {
4328 this.commit_message_buffer = Some(buffer.clone());
4329 });
4330 Ok(buffer)
4331 }
4332 }
4333 });
4334
4335 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4336 }
4337
4338 fn open_local_commit_buffer(
4339 language_registry: Option<Arc<LanguageRegistry>>,
4340 buffer_store: Entity<BufferStore>,
4341 cx: &mut Context<Self>,
4342 ) -> Task<Result<Entity<Buffer>>> {
4343 cx.spawn(async move |repository, cx| {
4344 let git_commit_language = match language_registry {
4345 Some(language_registry) => {
4346 Some(language_registry.language_for_name("Git Commit").await?)
4347 }
4348 None => None,
4349 };
4350 let buffer = buffer_store
4351 .update(cx, |buffer_store, cx| {
4352 buffer_store.create_buffer(git_commit_language, false, cx)
4353 })
4354 .await?;
4355
4356 repository.update(cx, |repository, _| {
4357 repository.commit_message_buffer = Some(buffer.clone());
4358 })?;
4359 Ok(buffer)
4360 })
4361 }
4362
4363 pub fn checkout_files(
4364 &mut self,
4365 commit: &str,
4366 paths: Vec<RepoPath>,
4367 cx: &mut Context<Self>,
4368 ) -> Task<Result<()>> {
4369 let commit = commit.to_string();
4370 let id = self.id;
4371
4372 self.spawn_job_with_tracking(
4373 paths.clone(),
4374 pending_op::GitStatus::Reverted,
4375 cx,
4376 async move |this, cx| {
4377 this.update(cx, |this, _cx| {
4378 this.send_job(
4379 Some(format!("git checkout {}", commit).into()),
4380 move |git_repo, _| async move {
4381 match git_repo {
4382 RepositoryState::Local(LocalRepositoryState {
4383 backend,
4384 environment,
4385 ..
4386 }) => {
4387 backend
4388 .checkout_files(commit, paths, environment.clone())
4389 .await
4390 }
4391 RepositoryState::Remote(RemoteRepositoryState {
4392 project_id,
4393 client,
4394 }) => {
4395 client
4396 .request(proto::GitCheckoutFiles {
4397 project_id: project_id.0,
4398 repository_id: id.to_proto(),
4399 commit,
4400 paths: paths
4401 .into_iter()
4402 .map(|p| p.to_proto())
4403 .collect(),
4404 })
4405 .await?;
4406
4407 Ok(())
4408 }
4409 }
4410 },
4411 )
4412 })?
4413 .await?
4414 },
4415 )
4416 }
4417
4418 pub fn reset(
4419 &mut self,
4420 commit: String,
4421 reset_mode: ResetMode,
4422 _cx: &mut App,
4423 ) -> oneshot::Receiver<Result<()>> {
4424 let id = self.id;
4425
4426 self.send_job(None, move |git_repo, _| async move {
4427 match git_repo {
4428 RepositoryState::Local(LocalRepositoryState {
4429 backend,
4430 environment,
4431 ..
4432 }) => backend.reset(commit, reset_mode, environment).await,
4433 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4434 client
4435 .request(proto::GitReset {
4436 project_id: project_id.0,
4437 repository_id: id.to_proto(),
4438 commit,
4439 mode: match reset_mode {
4440 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4441 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4442 },
4443 })
4444 .await?;
4445
4446 Ok(())
4447 }
4448 }
4449 })
4450 }
4451
4452 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4453 let id = self.id;
4454 self.send_job(None, move |git_repo, _cx| async move {
4455 match git_repo {
4456 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4457 backend.show(commit).await
4458 }
4459 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4460 let resp = client
4461 .request(proto::GitShow {
4462 project_id: project_id.0,
4463 repository_id: id.to_proto(),
4464 commit,
4465 })
4466 .await?;
4467
4468 Ok(CommitDetails {
4469 sha: resp.sha.into(),
4470 message: resp.message.into(),
4471 commit_timestamp: resp.commit_timestamp,
4472 author_email: resp.author_email.into(),
4473 author_name: resp.author_name.into(),
4474 })
4475 }
4476 }
4477 })
4478 }
4479
4480 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4481 let id = self.id;
4482 self.send_job(None, move |git_repo, cx| async move {
4483 match git_repo {
4484 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4485 backend.load_commit(commit, cx).await
4486 }
4487 RepositoryState::Remote(RemoteRepositoryState {
4488 client, project_id, ..
4489 }) => {
4490 let response = client
4491 .request(proto::LoadCommitDiff {
4492 project_id: project_id.0,
4493 repository_id: id.to_proto(),
4494 commit,
4495 })
4496 .await?;
4497 Ok(CommitDiff {
4498 files: response
4499 .files
4500 .into_iter()
4501 .map(|file| {
4502 Ok(CommitFile {
4503 path: RepoPath::from_proto(&file.path)?,
4504 old_text: file.old_text,
4505 new_text: file.new_text,
4506 is_binary: file.is_binary,
4507 })
4508 })
4509 .collect::<Result<Vec<_>>>()?,
4510 })
4511 }
4512 }
4513 })
4514 }
4515
4516 pub fn file_history(
4517 &mut self,
4518 path: RepoPath,
4519 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4520 self.file_history_paginated(path, 0, None)
4521 }
4522
4523 pub fn file_history_paginated(
4524 &mut self,
4525 path: RepoPath,
4526 skip: usize,
4527 limit: Option<usize>,
4528 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4529 let id = self.id;
4530 self.send_job(None, move |git_repo, _cx| async move {
4531 match git_repo {
4532 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4533 backend.file_history_paginated(path, skip, limit).await
4534 }
4535 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4536 let response = client
4537 .request(proto::GitFileHistory {
4538 project_id: project_id.0,
4539 repository_id: id.to_proto(),
4540 path: path.to_proto(),
4541 skip: skip as u64,
4542 limit: limit.map(|l| l as u64),
4543 })
4544 .await?;
4545 Ok(git::repository::FileHistory {
4546 entries: response
4547 .entries
4548 .into_iter()
4549 .map(|entry| git::repository::FileHistoryEntry {
4550 sha: entry.sha.into(),
4551 subject: entry.subject.into(),
4552 message: entry.message.into(),
4553 commit_timestamp: entry.commit_timestamp,
4554 author_name: entry.author_name.into(),
4555 author_email: entry.author_email.into(),
4556 })
4557 .collect(),
4558 path: RepoPath::from_proto(&response.path)?,
4559 })
4560 }
4561 }
4562 })
4563 }
4564
4565 pub fn get_graph_data(
4566 &self,
4567 log_source: LogSource,
4568 log_order: LogOrder,
4569 ) -> Option<&InitialGitGraphData> {
4570 self.initial_graph_data.get(&(log_source, log_order))
4571 }
4572
4573 pub fn search_commits(
4574 &mut self,
4575 log_source: LogSource,
4576 search_args: SearchCommitArgs,
4577 request_tx: smol::channel::Sender<Oid>,
4578 cx: &mut Context<Self>,
4579 ) {
4580 let repository_state = self.repository_state.clone();
4581
4582 cx.background_spawn(async move {
4583 let repo_state = repository_state.await;
4584
4585 match repo_state {
4586 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4587 backend
4588 .search_commits(log_source, search_args, request_tx)
4589 .await
4590 .log_err();
4591 }
4592 Ok(RepositoryState::Remote(_)) => {}
4593 Err(_) => {}
4594 };
4595 })
4596 .detach();
4597 }
4598
4599 pub fn graph_data(
4600 &mut self,
4601 log_source: LogSource,
4602 log_order: LogOrder,
4603 range: Range<usize>,
4604 cx: &mut Context<Self>,
4605 ) -> GraphDataResponse<'_> {
4606 let initial_commit_data = self
4607 .initial_graph_data
4608 .entry((log_source.clone(), log_order))
4609 .or_insert_with(|| {
4610 let state = self.repository_state.clone();
4611 let log_source = log_source.clone();
4612
4613 let fetch_task = cx.spawn(async move |repository, cx| {
4614 let state = state.await;
4615 let result = match state {
4616 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4617 Self::local_git_graph_data(
4618 repository.clone(),
4619 backend,
4620 log_source.clone(),
4621 log_order,
4622 cx,
4623 )
4624 .await
4625 }
4626 Ok(RepositoryState::Remote(_)) => {
4627 Err("Git graph is not supported for collab yet".into())
4628 }
4629 Err(e) => Err(SharedString::from(e)),
4630 };
4631
4632 if let Err(fetch_task_error) = result {
4633 repository
4634 .update(cx, |repository, _| {
4635 if let Some(data) = repository
4636 .initial_graph_data
4637 .get_mut(&(log_source, log_order))
4638 {
4639 data.error = Some(fetch_task_error);
4640 } else {
4641 debug_panic!(
4642 "This task would be dropped if this entry doesn't exist"
4643 );
4644 }
4645 })
4646 .ok();
4647 }
4648 });
4649
4650 InitialGitGraphData {
4651 fetch_task,
4652 error: None,
4653 commit_data: Vec::new(),
4654 commit_oid_to_index: HashMap::default(),
4655 }
4656 });
4657
4658 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4659 let max_end = initial_commit_data.commit_data.len();
4660
4661 GraphDataResponse {
4662 commits: &initial_commit_data.commit_data
4663 [range.start.min(max_start)..range.end.min(max_end)],
4664 is_loading: !initial_commit_data.fetch_task.is_ready(),
4665 error: initial_commit_data.error.clone(),
4666 }
4667 }
4668
4669 async fn local_git_graph_data(
4670 this: WeakEntity<Self>,
4671 backend: Arc<dyn GitRepository>,
4672 log_source: LogSource,
4673 log_order: LogOrder,
4674 cx: &mut AsyncApp,
4675 ) -> Result<(), SharedString> {
4676 let (request_tx, request_rx) =
4677 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4678
4679 let task = cx.background_executor().spawn({
4680 let log_source = log_source.clone();
4681 async move {
4682 backend
4683 .initial_graph_data(log_source, log_order, request_tx)
4684 .await
4685 .map_err(|err| SharedString::from(err.to_string()))
4686 }
4687 });
4688
4689 let graph_data_key = (log_source, log_order);
4690
4691 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4692 this.update(cx, |repository, cx| {
4693 let graph_data = repository
4694 .initial_graph_data
4695 .entry(graph_data_key.clone())
4696 .and_modify(|graph_data| {
4697 for commit_data in initial_graph_commit_data {
4698 graph_data
4699 .commit_oid_to_index
4700 .insert(commit_data.sha, graph_data.commit_data.len());
4701 graph_data.commit_data.push(commit_data);
4702
4703 cx.emit(RepositoryEvent::GraphEvent(
4704 graph_data_key.clone(),
4705 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4706 ));
4707 }
4708 });
4709
4710 match &graph_data {
4711 Entry::Occupied(_) => {}
4712 Entry::Vacant(_) => {
4713 debug_panic!("This task should be dropped if data doesn't exist");
4714 }
4715 }
4716 })
4717 .ok();
4718 }
4719
4720 task.await?;
4721 Ok(())
4722 }
4723
4724 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4725 if !self.commit_data.contains_key(&sha) {
4726 match &self.graph_commit_data_handler {
4727 GraphCommitHandlerState::Open(handler) => {
4728 if handler.commit_data_request.try_send(sha).is_ok() {
4729 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4730 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4731 }
4732 }
4733 GraphCommitHandlerState::Closed => {
4734 self.open_graph_commit_data_handler(cx);
4735 }
4736 GraphCommitHandlerState::Starting => {}
4737 }
4738 }
4739
4740 self.commit_data
4741 .get(&sha)
4742 .unwrap_or(&CommitDataState::Loading)
4743 }
4744
4745 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4746 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4747
4748 let state = self.repository_state.clone();
4749 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4750 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4751
4752 let foreground_task = cx.spawn(async move |this, cx| {
4753 while let Ok((sha, commit_data)) = result_rx.recv().await {
4754 let result = this.update(cx, |this, cx| {
4755 let old_value = this
4756 .commit_data
4757 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4758 debug_assert!(
4759 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4760 "We should never overwrite commit data"
4761 );
4762
4763 cx.notify();
4764 });
4765 if result.is_err() {
4766 break;
4767 }
4768 }
4769
4770 this.update(cx, |this, _cx| {
4771 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4772 })
4773 .ok();
4774 });
4775
4776 let request_tx_for_handler = request_tx;
4777 let background_executor = cx.background_executor().clone();
4778
4779 cx.background_spawn(async move {
4780 let backend = match state.await {
4781 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4782 Ok(RepositoryState::Remote(_)) => {
4783 log::error!("commit_data_reader not supported for remote repositories");
4784 return;
4785 }
4786 Err(error) => {
4787 log::error!("failed to get repository state: {error}");
4788 return;
4789 }
4790 };
4791
4792 let reader = match backend.commit_data_reader() {
4793 Ok(reader) => reader,
4794 Err(error) => {
4795 log::error!("failed to create commit data reader: {error:?}");
4796 return;
4797 }
4798 };
4799
4800 loop {
4801 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4802
4803 futures::select_biased! {
4804 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4805 let Ok(sha) = sha else {
4806 break;
4807 };
4808
4809 match reader.read(sha).await {
4810 Ok(commit_data) => {
4811 if result_tx.send((sha, commit_data)).await.is_err() {
4812 break;
4813 }
4814 }
4815 Err(error) => {
4816 log::error!("failed to read commit data for {sha}: {error:?}");
4817 }
4818 }
4819 }
4820 _ = futures::FutureExt::fuse(timeout) => {
4821 break;
4822 }
4823 }
4824 }
4825
4826 drop(result_tx);
4827 })
4828 .detach();
4829
4830 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4831 _task: foreground_task,
4832 commit_data_request: request_tx_for_handler,
4833 });
4834 }
4835
4836 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4837 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4838 }
4839
4840 fn save_buffers<'a>(
4841 &self,
4842 entries: impl IntoIterator<Item = &'a RepoPath>,
4843 cx: &mut Context<Self>,
4844 ) -> Vec<Task<anyhow::Result<()>>> {
4845 let mut save_futures = Vec::new();
4846 if let Some(buffer_store) = self.buffer_store(cx) {
4847 buffer_store.update(cx, |buffer_store, cx| {
4848 for path in entries {
4849 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4850 continue;
4851 };
4852 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4853 && buffer
4854 .read(cx)
4855 .file()
4856 .is_some_and(|file| file.disk_state().exists())
4857 && buffer.read(cx).has_unsaved_edits()
4858 {
4859 save_futures.push(buffer_store.save_buffer(buffer, cx));
4860 }
4861 }
4862 })
4863 }
4864 save_futures
4865 }
4866
4867 pub fn stage_entries(
4868 &mut self,
4869 entries: Vec<RepoPath>,
4870 cx: &mut Context<Self>,
4871 ) -> Task<anyhow::Result<()>> {
4872 self.stage_or_unstage_entries(true, entries, cx)
4873 }
4874
4875 pub fn unstage_entries(
4876 &mut self,
4877 entries: Vec<RepoPath>,
4878 cx: &mut Context<Self>,
4879 ) -> Task<anyhow::Result<()>> {
4880 self.stage_or_unstage_entries(false, entries, cx)
4881 }
4882
4883 fn stage_or_unstage_entries(
4884 &mut self,
4885 stage: bool,
4886 entries: Vec<RepoPath>,
4887 cx: &mut Context<Self>,
4888 ) -> Task<anyhow::Result<()>> {
4889 if entries.is_empty() {
4890 return Task::ready(Ok(()));
4891 }
4892 let Some(git_store) = self.git_store.upgrade() else {
4893 return Task::ready(Ok(()));
4894 };
4895 let id = self.id;
4896 let save_tasks = self.save_buffers(&entries, cx);
4897 let paths = entries
4898 .iter()
4899 .map(|p| p.as_unix_str())
4900 .collect::<Vec<_>>()
4901 .join(" ");
4902 let status = if stage {
4903 format!("git add {paths}")
4904 } else {
4905 format!("git reset {paths}")
4906 };
4907 let job_key = GitJobKey::WriteIndex(entries.clone());
4908
4909 self.spawn_job_with_tracking(
4910 entries.clone(),
4911 if stage {
4912 pending_op::GitStatus::Staged
4913 } else {
4914 pending_op::GitStatus::Unstaged
4915 },
4916 cx,
4917 async move |this, cx| {
4918 for save_task in save_tasks {
4919 save_task.await?;
4920 }
4921
4922 this.update(cx, |this, cx| {
4923 let weak_this = cx.weak_entity();
4924 this.send_keyed_job(
4925 Some(job_key),
4926 Some(status.into()),
4927 move |git_repo, mut cx| async move {
4928 let hunk_staging_operation_counts = weak_this
4929 .update(&mut cx, |this, cx| {
4930 let mut hunk_staging_operation_counts = HashMap::default();
4931 for path in &entries {
4932 let Some(project_path) =
4933 this.repo_path_to_project_path(path, cx)
4934 else {
4935 continue;
4936 };
4937 let Some(buffer) = git_store
4938 .read(cx)
4939 .buffer_store
4940 .read(cx)
4941 .get_by_path(&project_path)
4942 else {
4943 continue;
4944 };
4945 let Some(diff_state) = git_store
4946 .read(cx)
4947 .diffs
4948 .get(&buffer.read(cx).remote_id())
4949 .cloned()
4950 else {
4951 continue;
4952 };
4953 let Some(uncommitted_diff) =
4954 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4955 |uncommitted_diff| uncommitted_diff.upgrade(),
4956 )
4957 else {
4958 continue;
4959 };
4960 let buffer_snapshot = buffer.read(cx).text_snapshot();
4961 let file_exists = buffer
4962 .read(cx)
4963 .file()
4964 .is_some_and(|file| file.disk_state().exists());
4965 let hunk_staging_operation_count =
4966 diff_state.update(cx, |diff_state, cx| {
4967 uncommitted_diff.update(
4968 cx,
4969 |uncommitted_diff, cx| {
4970 uncommitted_diff
4971 .stage_or_unstage_all_hunks(
4972 stage,
4973 &buffer_snapshot,
4974 file_exists,
4975 cx,
4976 );
4977 },
4978 );
4979
4980 diff_state.hunk_staging_operation_count += 1;
4981 diff_state.hunk_staging_operation_count
4982 });
4983 hunk_staging_operation_counts.insert(
4984 diff_state.downgrade(),
4985 hunk_staging_operation_count,
4986 );
4987 }
4988 hunk_staging_operation_counts
4989 })
4990 .unwrap_or_default();
4991
4992 let result = match git_repo {
4993 RepositoryState::Local(LocalRepositoryState {
4994 backend,
4995 environment,
4996 ..
4997 }) => {
4998 if stage {
4999 backend.stage_paths(entries, environment.clone()).await
5000 } else {
5001 backend.unstage_paths(entries, environment.clone()).await
5002 }
5003 }
5004 RepositoryState::Remote(RemoteRepositoryState {
5005 project_id,
5006 client,
5007 }) => {
5008 if stage {
5009 client
5010 .request(proto::Stage {
5011 project_id: project_id.0,
5012 repository_id: id.to_proto(),
5013 paths: entries
5014 .into_iter()
5015 .map(|repo_path| repo_path.to_proto())
5016 .collect(),
5017 })
5018 .await
5019 .context("sending stage request")
5020 .map(|_| ())
5021 } else {
5022 client
5023 .request(proto::Unstage {
5024 project_id: project_id.0,
5025 repository_id: id.to_proto(),
5026 paths: entries
5027 .into_iter()
5028 .map(|repo_path| repo_path.to_proto())
5029 .collect(),
5030 })
5031 .await
5032 .context("sending unstage request")
5033 .map(|_| ())
5034 }
5035 }
5036 };
5037
5038 for (diff_state, hunk_staging_operation_count) in
5039 hunk_staging_operation_counts
5040 {
5041 diff_state
5042 .update(&mut cx, |diff_state, cx| {
5043 if result.is_ok() {
5044 diff_state.hunk_staging_operation_count_as_of_write =
5045 hunk_staging_operation_count;
5046 } else if let Some(uncommitted_diff) =
5047 &diff_state.uncommitted_diff
5048 {
5049 uncommitted_diff
5050 .update(cx, |uncommitted_diff, cx| {
5051 uncommitted_diff.clear_pending_hunks(cx);
5052 })
5053 .ok();
5054 }
5055 })
5056 .ok();
5057 }
5058
5059 result
5060 },
5061 )
5062 })?
5063 .await?
5064 },
5065 )
5066 }
5067
5068 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5069 let snapshot = self.snapshot.clone();
5070 let pending_ops = self.pending_ops.clone();
5071 let to_stage = cx.background_spawn(async move {
5072 snapshot
5073 .status()
5074 .filter_map(|entry| {
5075 if let Some(ops) =
5076 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5077 {
5078 if ops.staging() || ops.staged() {
5079 None
5080 } else {
5081 Some(entry.repo_path)
5082 }
5083 } else if entry.status.staging().is_fully_staged() {
5084 None
5085 } else {
5086 Some(entry.repo_path)
5087 }
5088 })
5089 .collect()
5090 });
5091
5092 cx.spawn(async move |this, cx| {
5093 let to_stage = to_stage.await;
5094 this.update(cx, |this, cx| {
5095 this.stage_or_unstage_entries(true, to_stage, cx)
5096 })?
5097 .await
5098 })
5099 }
5100
5101 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5102 let snapshot = self.snapshot.clone();
5103 let pending_ops = self.pending_ops.clone();
5104 let to_unstage = cx.background_spawn(async move {
5105 snapshot
5106 .status()
5107 .filter_map(|entry| {
5108 if let Some(ops) =
5109 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5110 {
5111 if !ops.staging() && !ops.staged() {
5112 None
5113 } else {
5114 Some(entry.repo_path)
5115 }
5116 } else if entry.status.staging().is_fully_unstaged() {
5117 None
5118 } else {
5119 Some(entry.repo_path)
5120 }
5121 })
5122 .collect()
5123 });
5124
5125 cx.spawn(async move |this, cx| {
5126 let to_unstage = to_unstage.await;
5127 this.update(cx, |this, cx| {
5128 this.stage_or_unstage_entries(false, to_unstage, cx)
5129 })?
5130 .await
5131 })
5132 }
5133
5134 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5135 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5136
5137 self.stash_entries(to_stash, cx)
5138 }
5139
5140 pub fn stash_entries(
5141 &mut self,
5142 entries: Vec<RepoPath>,
5143 cx: &mut Context<Self>,
5144 ) -> Task<anyhow::Result<()>> {
5145 let id = self.id;
5146
5147 cx.spawn(async move |this, cx| {
5148 this.update(cx, |this, _| {
5149 this.send_job(None, move |git_repo, _cx| async move {
5150 match git_repo {
5151 RepositoryState::Local(LocalRepositoryState {
5152 backend,
5153 environment,
5154 ..
5155 }) => backend.stash_paths(entries, environment).await,
5156 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5157 client
5158 .request(proto::Stash {
5159 project_id: project_id.0,
5160 repository_id: id.to_proto(),
5161 paths: entries
5162 .into_iter()
5163 .map(|repo_path| repo_path.to_proto())
5164 .collect(),
5165 })
5166 .await?;
5167 Ok(())
5168 }
5169 }
5170 })
5171 })?
5172 .await??;
5173 Ok(())
5174 })
5175 }
5176
5177 pub fn stash_pop(
5178 &mut self,
5179 index: Option<usize>,
5180 cx: &mut Context<Self>,
5181 ) -> Task<anyhow::Result<()>> {
5182 let id = self.id;
5183 cx.spawn(async move |this, cx| {
5184 this.update(cx, |this, _| {
5185 this.send_job(None, move |git_repo, _cx| async move {
5186 match git_repo {
5187 RepositoryState::Local(LocalRepositoryState {
5188 backend,
5189 environment,
5190 ..
5191 }) => backend.stash_pop(index, environment).await,
5192 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5193 client
5194 .request(proto::StashPop {
5195 project_id: project_id.0,
5196 repository_id: id.to_proto(),
5197 stash_index: index.map(|i| i as u64),
5198 })
5199 .await
5200 .context("sending stash pop request")?;
5201 Ok(())
5202 }
5203 }
5204 })
5205 })?
5206 .await??;
5207 Ok(())
5208 })
5209 }
5210
5211 pub fn stash_apply(
5212 &mut self,
5213 index: Option<usize>,
5214 cx: &mut Context<Self>,
5215 ) -> Task<anyhow::Result<()>> {
5216 let id = self.id;
5217 cx.spawn(async move |this, cx| {
5218 this.update(cx, |this, _| {
5219 this.send_job(None, move |git_repo, _cx| async move {
5220 match git_repo {
5221 RepositoryState::Local(LocalRepositoryState {
5222 backend,
5223 environment,
5224 ..
5225 }) => backend.stash_apply(index, environment).await,
5226 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5227 client
5228 .request(proto::StashApply {
5229 project_id: project_id.0,
5230 repository_id: id.to_proto(),
5231 stash_index: index.map(|i| i as u64),
5232 })
5233 .await
5234 .context("sending stash apply request")?;
5235 Ok(())
5236 }
5237 }
5238 })
5239 })?
5240 .await??;
5241 Ok(())
5242 })
5243 }
5244
5245 pub fn stash_drop(
5246 &mut self,
5247 index: Option<usize>,
5248 cx: &mut Context<Self>,
5249 ) -> oneshot::Receiver<anyhow::Result<()>> {
5250 let id = self.id;
5251 let updates_tx = self
5252 .git_store()
5253 .and_then(|git_store| match &git_store.read(cx).state {
5254 GitStoreState::Local { downstream, .. } => downstream
5255 .as_ref()
5256 .map(|downstream| downstream.updates_tx.clone()),
5257 _ => None,
5258 });
5259 let this = cx.weak_entity();
5260 self.send_job(None, move |git_repo, mut cx| async move {
5261 match git_repo {
5262 RepositoryState::Local(LocalRepositoryState {
5263 backend,
5264 environment,
5265 ..
5266 }) => {
5267 // TODO would be nice to not have to do this manually
5268 let result = backend.stash_drop(index, environment).await;
5269 if result.is_ok()
5270 && let Ok(stash_entries) = backend.stash_entries().await
5271 {
5272 let snapshot = this.update(&mut cx, |this, cx| {
5273 this.snapshot.stash_entries = stash_entries;
5274 cx.emit(RepositoryEvent::StashEntriesChanged);
5275 this.snapshot.clone()
5276 })?;
5277 if let Some(updates_tx) = updates_tx {
5278 updates_tx
5279 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5280 .ok();
5281 }
5282 }
5283
5284 result
5285 }
5286 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5287 client
5288 .request(proto::StashDrop {
5289 project_id: project_id.0,
5290 repository_id: id.to_proto(),
5291 stash_index: index.map(|i| i as u64),
5292 })
5293 .await
5294 .context("sending stash pop request")?;
5295 Ok(())
5296 }
5297 }
5298 })
5299 }
5300
5301 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5302 let id = self.id;
5303 self.send_job(
5304 Some(format!("git hook {}", hook.as_str()).into()),
5305 move |git_repo, _cx| async move {
5306 match git_repo {
5307 RepositoryState::Local(LocalRepositoryState {
5308 backend,
5309 environment,
5310 ..
5311 }) => backend.run_hook(hook, environment.clone()).await,
5312 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5313 client
5314 .request(proto::RunGitHook {
5315 project_id: project_id.0,
5316 repository_id: id.to_proto(),
5317 hook: hook.to_proto(),
5318 })
5319 .await?;
5320
5321 Ok(())
5322 }
5323 }
5324 },
5325 )
5326 }
5327
5328 pub fn commit(
5329 &mut self,
5330 message: SharedString,
5331 name_and_email: Option<(SharedString, SharedString)>,
5332 options: CommitOptions,
5333 askpass: AskPassDelegate,
5334 cx: &mut App,
5335 ) -> oneshot::Receiver<Result<()>> {
5336 let id = self.id;
5337 let askpass_delegates = self.askpass_delegates.clone();
5338 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5339
5340 let rx = self.run_hook(RunHook::PreCommit, cx);
5341
5342 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5343 rx.await??;
5344
5345 match git_repo {
5346 RepositoryState::Local(LocalRepositoryState {
5347 backend,
5348 environment,
5349 ..
5350 }) => {
5351 backend
5352 .commit(message, name_and_email, options, askpass, environment)
5353 .await
5354 }
5355 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5356 askpass_delegates.lock().insert(askpass_id, askpass);
5357 let _defer = util::defer(|| {
5358 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5359 debug_assert!(askpass_delegate.is_some());
5360 });
5361 let (name, email) = name_and_email.unzip();
5362 client
5363 .request(proto::Commit {
5364 project_id: project_id.0,
5365 repository_id: id.to_proto(),
5366 message: String::from(message),
5367 name: name.map(String::from),
5368 email: email.map(String::from),
5369 options: Some(proto::commit::CommitOptions {
5370 amend: options.amend,
5371 signoff: options.signoff,
5372 }),
5373 askpass_id,
5374 })
5375 .await?;
5376
5377 Ok(())
5378 }
5379 }
5380 })
5381 }
5382
5383 pub fn fetch(
5384 &mut self,
5385 fetch_options: FetchOptions,
5386 askpass: AskPassDelegate,
5387 _cx: &mut App,
5388 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5389 let askpass_delegates = self.askpass_delegates.clone();
5390 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5391 let id = self.id;
5392
5393 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5394 match git_repo {
5395 RepositoryState::Local(LocalRepositoryState {
5396 backend,
5397 environment,
5398 ..
5399 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5400 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5401 askpass_delegates.lock().insert(askpass_id, askpass);
5402 let _defer = util::defer(|| {
5403 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5404 debug_assert!(askpass_delegate.is_some());
5405 });
5406
5407 let response = client
5408 .request(proto::Fetch {
5409 project_id: project_id.0,
5410 repository_id: id.to_proto(),
5411 askpass_id,
5412 remote: fetch_options.to_proto(),
5413 })
5414 .await?;
5415
5416 Ok(RemoteCommandOutput {
5417 stdout: response.stdout,
5418 stderr: response.stderr,
5419 })
5420 }
5421 }
5422 })
5423 }
5424
5425 pub fn push(
5426 &mut self,
5427 branch: SharedString,
5428 remote_branch: SharedString,
5429 remote: SharedString,
5430 options: Option<PushOptions>,
5431 askpass: AskPassDelegate,
5432 cx: &mut Context<Self>,
5433 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5434 let askpass_delegates = self.askpass_delegates.clone();
5435 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5436 let id = self.id;
5437
5438 let args = options
5439 .map(|option| match option {
5440 PushOptions::SetUpstream => " --set-upstream",
5441 PushOptions::Force => " --force-with-lease",
5442 })
5443 .unwrap_or("");
5444
5445 let updates_tx = self
5446 .git_store()
5447 .and_then(|git_store| match &git_store.read(cx).state {
5448 GitStoreState::Local { downstream, .. } => downstream
5449 .as_ref()
5450 .map(|downstream| downstream.updates_tx.clone()),
5451 _ => None,
5452 });
5453
5454 let this = cx.weak_entity();
5455 self.send_job(
5456 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5457 move |git_repo, mut cx| async move {
5458 match git_repo {
5459 RepositoryState::Local(LocalRepositoryState {
5460 backend,
5461 environment,
5462 ..
5463 }) => {
5464 let result = backend
5465 .push(
5466 branch.to_string(),
5467 remote_branch.to_string(),
5468 remote.to_string(),
5469 options,
5470 askpass,
5471 environment.clone(),
5472 cx.clone(),
5473 )
5474 .await;
5475 // TODO would be nice to not have to do this manually
5476 if result.is_ok() {
5477 let branches = backend.branches().await?;
5478 let branch = branches.into_iter().find(|branch| branch.is_head);
5479 log::info!("head branch after scan is {branch:?}");
5480 let snapshot = this.update(&mut cx, |this, cx| {
5481 this.snapshot.branch = branch;
5482 cx.emit(RepositoryEvent::BranchChanged);
5483 this.snapshot.clone()
5484 })?;
5485 if let Some(updates_tx) = updates_tx {
5486 updates_tx
5487 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5488 .ok();
5489 }
5490 }
5491 result
5492 }
5493 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5494 askpass_delegates.lock().insert(askpass_id, askpass);
5495 let _defer = util::defer(|| {
5496 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5497 debug_assert!(askpass_delegate.is_some());
5498 });
5499 let response = client
5500 .request(proto::Push {
5501 project_id: project_id.0,
5502 repository_id: id.to_proto(),
5503 askpass_id,
5504 branch_name: branch.to_string(),
5505 remote_branch_name: remote_branch.to_string(),
5506 remote_name: remote.to_string(),
5507 options: options.map(|options| match options {
5508 PushOptions::Force => proto::push::PushOptions::Force,
5509 PushOptions::SetUpstream => {
5510 proto::push::PushOptions::SetUpstream
5511 }
5512 }
5513 as i32),
5514 })
5515 .await?;
5516
5517 Ok(RemoteCommandOutput {
5518 stdout: response.stdout,
5519 stderr: response.stderr,
5520 })
5521 }
5522 }
5523 },
5524 )
5525 }
5526
5527 pub fn pull(
5528 &mut self,
5529 branch: Option<SharedString>,
5530 remote: SharedString,
5531 rebase: bool,
5532 askpass: AskPassDelegate,
5533 _cx: &mut App,
5534 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5535 let askpass_delegates = self.askpass_delegates.clone();
5536 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5537 let id = self.id;
5538
5539 let mut status = "git pull".to_string();
5540 if rebase {
5541 status.push_str(" --rebase");
5542 }
5543 status.push_str(&format!(" {}", remote));
5544 if let Some(b) = &branch {
5545 status.push_str(&format!(" {}", b));
5546 }
5547
5548 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5549 match git_repo {
5550 RepositoryState::Local(LocalRepositoryState {
5551 backend,
5552 environment,
5553 ..
5554 }) => {
5555 backend
5556 .pull(
5557 branch.as_ref().map(|b| b.to_string()),
5558 remote.to_string(),
5559 rebase,
5560 askpass,
5561 environment.clone(),
5562 cx,
5563 )
5564 .await
5565 }
5566 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5567 askpass_delegates.lock().insert(askpass_id, askpass);
5568 let _defer = util::defer(|| {
5569 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5570 debug_assert!(askpass_delegate.is_some());
5571 });
5572 let response = client
5573 .request(proto::Pull {
5574 project_id: project_id.0,
5575 repository_id: id.to_proto(),
5576 askpass_id,
5577 rebase,
5578 branch_name: branch.as_ref().map(|b| b.to_string()),
5579 remote_name: remote.to_string(),
5580 })
5581 .await?;
5582
5583 Ok(RemoteCommandOutput {
5584 stdout: response.stdout,
5585 stderr: response.stderr,
5586 })
5587 }
5588 }
5589 })
5590 }
5591
5592 fn spawn_set_index_text_job(
5593 &mut self,
5594 path: RepoPath,
5595 content: Option<String>,
5596 hunk_staging_operation_count: Option<usize>,
5597 cx: &mut Context<Self>,
5598 ) -> oneshot::Receiver<anyhow::Result<()>> {
5599 let id = self.id;
5600 let this = cx.weak_entity();
5601 let git_store = self.git_store.clone();
5602 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5603 self.send_keyed_job(
5604 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5605 None,
5606 move |git_repo, mut cx| async move {
5607 log::debug!(
5608 "start updating index text for buffer {}",
5609 path.as_unix_str()
5610 );
5611
5612 match git_repo {
5613 RepositoryState::Local(LocalRepositoryState {
5614 fs,
5615 backend,
5616 environment,
5617 ..
5618 }) => {
5619 let executable = match fs.metadata(&abs_path).await {
5620 Ok(Some(meta)) => meta.is_executable,
5621 Ok(None) => false,
5622 Err(_err) => false,
5623 };
5624 backend
5625 .set_index_text(path.clone(), content, environment.clone(), executable)
5626 .await?;
5627 }
5628 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5629 client
5630 .request(proto::SetIndexText {
5631 project_id: project_id.0,
5632 repository_id: id.to_proto(),
5633 path: path.to_proto(),
5634 text: content,
5635 })
5636 .await?;
5637 }
5638 }
5639 log::debug!(
5640 "finish updating index text for buffer {}",
5641 path.as_unix_str()
5642 );
5643
5644 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5645 let project_path = this
5646 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5647 .ok()
5648 .flatten();
5649 git_store
5650 .update(&mut cx, |git_store, cx| {
5651 let buffer_id = git_store
5652 .buffer_store
5653 .read(cx)
5654 .get_by_path(&project_path?)?
5655 .read(cx)
5656 .remote_id();
5657 let diff_state = git_store.diffs.get(&buffer_id)?;
5658 diff_state.update(cx, |diff_state, _| {
5659 diff_state.hunk_staging_operation_count_as_of_write =
5660 hunk_staging_operation_count;
5661 });
5662 Some(())
5663 })
5664 .context("Git store dropped")?;
5665 }
5666 Ok(())
5667 },
5668 )
5669 }
5670
5671 pub fn create_remote(
5672 &mut self,
5673 remote_name: String,
5674 remote_url: String,
5675 ) -> oneshot::Receiver<Result<()>> {
5676 let id = self.id;
5677 self.send_job(
5678 Some(format!("git remote add {remote_name} {remote_url}").into()),
5679 move |repo, _cx| async move {
5680 match repo {
5681 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5682 backend.create_remote(remote_name, remote_url).await
5683 }
5684 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5685 client
5686 .request(proto::GitCreateRemote {
5687 project_id: project_id.0,
5688 repository_id: id.to_proto(),
5689 remote_name,
5690 remote_url,
5691 })
5692 .await?;
5693
5694 Ok(())
5695 }
5696 }
5697 },
5698 )
5699 }
5700
5701 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5702 let id = self.id;
5703 self.send_job(
5704 Some(format!("git remove remote {remote_name}").into()),
5705 move |repo, _cx| async move {
5706 match repo {
5707 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5708 backend.remove_remote(remote_name).await
5709 }
5710 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5711 client
5712 .request(proto::GitRemoveRemote {
5713 project_id: project_id.0,
5714 repository_id: id.to_proto(),
5715 remote_name,
5716 })
5717 .await?;
5718
5719 Ok(())
5720 }
5721 }
5722 },
5723 )
5724 }
5725
5726 pub fn get_remotes(
5727 &mut self,
5728 branch_name: Option<String>,
5729 is_push: bool,
5730 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5731 let id = self.id;
5732 self.send_job(None, move |repo, _cx| async move {
5733 match repo {
5734 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5735 let remote = if let Some(branch_name) = branch_name {
5736 if is_push {
5737 backend.get_push_remote(branch_name).await?
5738 } else {
5739 backend.get_branch_remote(branch_name).await?
5740 }
5741 } else {
5742 None
5743 };
5744
5745 match remote {
5746 Some(remote) => Ok(vec![remote]),
5747 None => backend.get_all_remotes().await,
5748 }
5749 }
5750 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5751 let response = client
5752 .request(proto::GetRemotes {
5753 project_id: project_id.0,
5754 repository_id: id.to_proto(),
5755 branch_name,
5756 is_push,
5757 })
5758 .await?;
5759
5760 let remotes = response
5761 .remotes
5762 .into_iter()
5763 .map(|remotes| Remote {
5764 name: remotes.name.into(),
5765 })
5766 .collect();
5767
5768 Ok(remotes)
5769 }
5770 }
5771 })
5772 }
5773
5774 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5775 let id = self.id;
5776 self.send_job(None, move |repo, _| async move {
5777 match repo {
5778 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5779 backend.branches().await
5780 }
5781 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5782 let response = client
5783 .request(proto::GitGetBranches {
5784 project_id: project_id.0,
5785 repository_id: id.to_proto(),
5786 })
5787 .await?;
5788
5789 let branches = response
5790 .branches
5791 .into_iter()
5792 .map(|branch| proto_to_branch(&branch))
5793 .collect();
5794
5795 Ok(branches)
5796 }
5797 }
5798 })
5799 }
5800
5801 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5802 /// returns the pathed for the linked worktree.
5803 ///
5804 /// Returns None if this is the main checkout.
5805 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5806 if self.work_directory_abs_path != self.original_repo_abs_path {
5807 Some(&self.work_directory_abs_path)
5808 } else {
5809 None
5810 }
5811 }
5812
5813 pub fn path_for_new_linked_worktree(
5814 &self,
5815 branch_name: &str,
5816 worktree_directory_setting: &str,
5817 ) -> Result<PathBuf> {
5818 let original_repo = self.original_repo_abs_path.clone();
5819 let project_name = original_repo
5820 .file_name()
5821 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5822 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5823 Ok(directory.join(branch_name).join(project_name))
5824 }
5825
5826 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5827 let id = self.id;
5828 self.send_job(None, move |repo, _| async move {
5829 match repo {
5830 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5831 backend.worktrees().await
5832 }
5833 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5834 let response = client
5835 .request(proto::GitGetWorktrees {
5836 project_id: project_id.0,
5837 repository_id: id.to_proto(),
5838 })
5839 .await?;
5840
5841 let worktrees = response
5842 .worktrees
5843 .into_iter()
5844 .map(|worktree| proto_to_worktree(&worktree))
5845 .collect();
5846
5847 Ok(worktrees)
5848 }
5849 }
5850 })
5851 }
5852
5853 pub fn create_worktree(
5854 &mut self,
5855 branch_name: String,
5856 path: PathBuf,
5857 commit: Option<String>,
5858 ) -> oneshot::Receiver<Result<()>> {
5859 let id = self.id;
5860 self.send_job(
5861 Some(format!("git worktree add: {}", branch_name).into()),
5862 move |repo, _cx| async move {
5863 match repo {
5864 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5865 backend.create_worktree(branch_name, path, commit).await
5866 }
5867 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5868 client
5869 .request(proto::GitCreateWorktree {
5870 project_id: project_id.0,
5871 repository_id: id.to_proto(),
5872 name: branch_name,
5873 directory: path.to_string_lossy().to_string(),
5874 commit,
5875 })
5876 .await?;
5877
5878 Ok(())
5879 }
5880 }
5881 },
5882 )
5883 }
5884
5885 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5886 let id = self.id;
5887 self.send_job(
5888 Some(format!("git worktree remove: {}", path.display()).into()),
5889 move |repo, _cx| async move {
5890 match repo {
5891 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5892 backend.remove_worktree(path, force).await
5893 }
5894 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5895 client
5896 .request(proto::GitRemoveWorktree {
5897 project_id: project_id.0,
5898 repository_id: id.to_proto(),
5899 path: path.to_string_lossy().to_string(),
5900 force,
5901 })
5902 .await?;
5903
5904 Ok(())
5905 }
5906 }
5907 },
5908 )
5909 }
5910
5911 pub fn rename_worktree(
5912 &mut self,
5913 old_path: PathBuf,
5914 new_path: PathBuf,
5915 ) -> oneshot::Receiver<Result<()>> {
5916 let id = self.id;
5917 self.send_job(
5918 Some(format!("git worktree move: {}", old_path.display()).into()),
5919 move |repo, _cx| async move {
5920 match repo {
5921 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5922 backend.rename_worktree(old_path, new_path).await
5923 }
5924 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5925 client
5926 .request(proto::GitRenameWorktree {
5927 project_id: project_id.0,
5928 repository_id: id.to_proto(),
5929 old_path: old_path.to_string_lossy().to_string(),
5930 new_path: new_path.to_string_lossy().to_string(),
5931 })
5932 .await?;
5933
5934 Ok(())
5935 }
5936 }
5937 },
5938 )
5939 }
5940
5941 pub fn default_branch(
5942 &mut self,
5943 include_remote_name: bool,
5944 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5945 let id = self.id;
5946 self.send_job(None, move |repo, _| async move {
5947 match repo {
5948 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5949 backend.default_branch(include_remote_name).await
5950 }
5951 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5952 let response = client
5953 .request(proto::GetDefaultBranch {
5954 project_id: project_id.0,
5955 repository_id: id.to_proto(),
5956 })
5957 .await?;
5958
5959 anyhow::Ok(response.branch.map(SharedString::from))
5960 }
5961 }
5962 })
5963 }
5964
5965 pub fn diff_tree(
5966 &mut self,
5967 diff_type: DiffTreeType,
5968 _cx: &App,
5969 ) -> oneshot::Receiver<Result<TreeDiff>> {
5970 let repository_id = self.snapshot.id;
5971 self.send_job(None, move |repo, _cx| async move {
5972 match repo {
5973 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5974 backend.diff_tree(diff_type).await
5975 }
5976 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5977 let response = client
5978 .request(proto::GetTreeDiff {
5979 project_id: project_id.0,
5980 repository_id: repository_id.0,
5981 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5982 base: diff_type.base().to_string(),
5983 head: diff_type.head().to_string(),
5984 })
5985 .await?;
5986
5987 let entries = response
5988 .entries
5989 .into_iter()
5990 .filter_map(|entry| {
5991 let status = match entry.status() {
5992 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5993 proto::tree_diff_status::Status::Modified => {
5994 TreeDiffStatus::Modified {
5995 old: git::Oid::from_str(
5996 &entry.oid.context("missing oid").log_err()?,
5997 )
5998 .log_err()?,
5999 }
6000 }
6001 proto::tree_diff_status::Status::Deleted => {
6002 TreeDiffStatus::Deleted {
6003 old: git::Oid::from_str(
6004 &entry.oid.context("missing oid").log_err()?,
6005 )
6006 .log_err()?,
6007 }
6008 }
6009 };
6010 Some((
6011 RepoPath::from_rel_path(
6012 &RelPath::from_proto(&entry.path).log_err()?,
6013 ),
6014 status,
6015 ))
6016 })
6017 .collect();
6018
6019 Ok(TreeDiff { entries })
6020 }
6021 }
6022 })
6023 }
6024
6025 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6026 let id = self.id;
6027 self.send_job(None, move |repo, _cx| async move {
6028 match repo {
6029 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6030 backend.diff(diff_type).await
6031 }
6032 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6033 let (proto_diff_type, merge_base_ref) = match &diff_type {
6034 DiffType::HeadToIndex => {
6035 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6036 }
6037 DiffType::HeadToWorktree => {
6038 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6039 }
6040 DiffType::MergeBase { base_ref } => (
6041 proto::git_diff::DiffType::MergeBase.into(),
6042 Some(base_ref.to_string()),
6043 ),
6044 };
6045 let response = client
6046 .request(proto::GitDiff {
6047 project_id: project_id.0,
6048 repository_id: id.to_proto(),
6049 diff_type: proto_diff_type,
6050 merge_base_ref,
6051 })
6052 .await?;
6053
6054 Ok(response.diff)
6055 }
6056 }
6057 })
6058 }
6059
6060 pub fn create_branch(
6061 &mut self,
6062 branch_name: String,
6063 base_branch: Option<String>,
6064 ) -> oneshot::Receiver<Result<()>> {
6065 let id = self.id;
6066 let status_msg = if let Some(ref base) = base_branch {
6067 format!("git switch -c {branch_name} {base}").into()
6068 } else {
6069 format!("git switch -c {branch_name}").into()
6070 };
6071 self.send_job(Some(status_msg), move |repo, _cx| async move {
6072 match repo {
6073 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6074 backend.create_branch(branch_name, base_branch).await
6075 }
6076 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6077 client
6078 .request(proto::GitCreateBranch {
6079 project_id: project_id.0,
6080 repository_id: id.to_proto(),
6081 branch_name,
6082 })
6083 .await?;
6084
6085 Ok(())
6086 }
6087 }
6088 })
6089 }
6090
6091 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6092 let id = self.id;
6093 self.send_job(
6094 Some(format!("git switch {branch_name}").into()),
6095 move |repo, _cx| async move {
6096 match repo {
6097 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6098 backend.change_branch(branch_name).await
6099 }
6100 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6101 client
6102 .request(proto::GitChangeBranch {
6103 project_id: project_id.0,
6104 repository_id: id.to_proto(),
6105 branch_name,
6106 })
6107 .await?;
6108
6109 Ok(())
6110 }
6111 }
6112 },
6113 )
6114 }
6115
6116 pub fn delete_branch(
6117 &mut self,
6118 is_remote: bool,
6119 branch_name: String,
6120 ) -> oneshot::Receiver<Result<()>> {
6121 let id = self.id;
6122 self.send_job(
6123 Some(
6124 format!(
6125 "git branch {} {}",
6126 if is_remote { "-dr" } else { "-d" },
6127 branch_name
6128 )
6129 .into(),
6130 ),
6131 move |repo, _cx| async move {
6132 match repo {
6133 RepositoryState::Local(state) => {
6134 state.backend.delete_branch(is_remote, branch_name).await
6135 }
6136 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6137 client
6138 .request(proto::GitDeleteBranch {
6139 project_id: project_id.0,
6140 repository_id: id.to_proto(),
6141 is_remote,
6142 branch_name,
6143 })
6144 .await?;
6145
6146 Ok(())
6147 }
6148 }
6149 },
6150 )
6151 }
6152
6153 pub fn rename_branch(
6154 &mut self,
6155 branch: String,
6156 new_name: String,
6157 ) -> oneshot::Receiver<Result<()>> {
6158 let id = self.id;
6159 self.send_job(
6160 Some(format!("git branch -m {branch} {new_name}").into()),
6161 move |repo, _cx| async move {
6162 match repo {
6163 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6164 backend.rename_branch(branch, new_name).await
6165 }
6166 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6167 client
6168 .request(proto::GitRenameBranch {
6169 project_id: project_id.0,
6170 repository_id: id.to_proto(),
6171 branch,
6172 new_name,
6173 })
6174 .await?;
6175
6176 Ok(())
6177 }
6178 }
6179 },
6180 )
6181 }
6182
6183 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6184 let id = self.id;
6185 self.send_job(None, move |repo, _cx| async move {
6186 match repo {
6187 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6188 backend.check_for_pushed_commit().await
6189 }
6190 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6191 let response = client
6192 .request(proto::CheckForPushedCommits {
6193 project_id: project_id.0,
6194 repository_id: id.to_proto(),
6195 })
6196 .await?;
6197
6198 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6199
6200 Ok(branches)
6201 }
6202 }
6203 })
6204 }
6205
6206 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6207 self.send_job(None, |repo, _cx| async move {
6208 match repo {
6209 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6210 backend.checkpoint().await
6211 }
6212 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6213 }
6214 })
6215 }
6216
6217 pub fn restore_checkpoint(
6218 &mut self,
6219 checkpoint: GitRepositoryCheckpoint,
6220 ) -> oneshot::Receiver<Result<()>> {
6221 self.send_job(None, move |repo, _cx| async move {
6222 match repo {
6223 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6224 backend.restore_checkpoint(checkpoint).await
6225 }
6226 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6227 }
6228 })
6229 }
6230
6231 pub(crate) fn apply_remote_update(
6232 &mut self,
6233 update: proto::UpdateRepository,
6234 cx: &mut Context<Self>,
6235 ) -> Result<()> {
6236 if let Some(main_path) = &update.original_repo_abs_path {
6237 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6238 }
6239
6240 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6241 let new_head_commit = update
6242 .head_commit_details
6243 .as_ref()
6244 .map(proto_to_commit_details);
6245 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6246 cx.emit(RepositoryEvent::BranchChanged)
6247 }
6248 self.snapshot.branch = new_branch;
6249 self.snapshot.head_commit = new_head_commit;
6250
6251 // We don't store any merge head state for downstream projects; the upstream
6252 // will track it and we will just get the updated conflicts
6253 let new_merge_heads = TreeMap::from_ordered_entries(
6254 update
6255 .current_merge_conflicts
6256 .into_iter()
6257 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6258 );
6259 let conflicts_changed =
6260 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6261 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6262 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6263 let new_stash_entries = GitStash {
6264 entries: update
6265 .stash_entries
6266 .iter()
6267 .filter_map(|entry| proto_to_stash(entry).ok())
6268 .collect(),
6269 };
6270 if self.snapshot.stash_entries != new_stash_entries {
6271 cx.emit(RepositoryEvent::StashEntriesChanged)
6272 }
6273 self.snapshot.stash_entries = new_stash_entries;
6274 let new_linked_worktrees: Arc<[GitWorktree]> = update
6275 .linked_worktrees
6276 .iter()
6277 .map(proto_to_worktree)
6278 .collect();
6279 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6280 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6281 }
6282 self.snapshot.linked_worktrees = new_linked_worktrees;
6283 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6284 self.snapshot.remote_origin_url = update.remote_origin_url;
6285
6286 let edits = update
6287 .removed_statuses
6288 .into_iter()
6289 .filter_map(|path| {
6290 Some(sum_tree::Edit::Remove(PathKey(
6291 RelPath::from_proto(&path).log_err()?,
6292 )))
6293 })
6294 .chain(
6295 update
6296 .updated_statuses
6297 .into_iter()
6298 .filter_map(|updated_status| {
6299 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6300 }),
6301 )
6302 .collect::<Vec<_>>();
6303 if conflicts_changed || !edits.is_empty() {
6304 cx.emit(RepositoryEvent::StatusesChanged);
6305 }
6306 self.snapshot.statuses_by_path.edit(edits, ());
6307
6308 if update.is_last_update {
6309 self.snapshot.scan_id = update.scan_id;
6310 }
6311 self.clear_pending_ops(cx);
6312 Ok(())
6313 }
6314
6315 pub fn compare_checkpoints(
6316 &mut self,
6317 left: GitRepositoryCheckpoint,
6318 right: GitRepositoryCheckpoint,
6319 ) -> oneshot::Receiver<Result<bool>> {
6320 self.send_job(None, move |repo, _cx| async move {
6321 match repo {
6322 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6323 backend.compare_checkpoints(left, right).await
6324 }
6325 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6326 }
6327 })
6328 }
6329
6330 pub fn diff_checkpoints(
6331 &mut self,
6332 base_checkpoint: GitRepositoryCheckpoint,
6333 target_checkpoint: GitRepositoryCheckpoint,
6334 ) -> oneshot::Receiver<Result<String>> {
6335 self.send_job(None, move |repo, _cx| async move {
6336 match repo {
6337 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6338 backend
6339 .diff_checkpoints(base_checkpoint, target_checkpoint)
6340 .await
6341 }
6342 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6343 }
6344 })
6345 }
6346
6347 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6348 let updated = SumTree::from_iter(
6349 self.pending_ops.iter().filter_map(|ops| {
6350 let inner_ops: Vec<PendingOp> =
6351 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6352 if inner_ops.is_empty() {
6353 None
6354 } else {
6355 Some(PendingOps {
6356 repo_path: ops.repo_path.clone(),
6357 ops: inner_ops,
6358 })
6359 }
6360 }),
6361 (),
6362 );
6363
6364 if updated != self.pending_ops {
6365 cx.emit(RepositoryEvent::PendingOpsChanged {
6366 pending_ops: self.pending_ops.clone(),
6367 })
6368 }
6369
6370 self.pending_ops = updated;
6371 }
6372
6373 fn schedule_scan(
6374 &mut self,
6375 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6376 cx: &mut Context<Self>,
6377 ) {
6378 let this = cx.weak_entity();
6379 let _ = self.send_keyed_job(
6380 Some(GitJobKey::ReloadGitState),
6381 None,
6382 |state, mut cx| async move {
6383 log::debug!("run scheduled git status scan");
6384
6385 let Some(this) = this.upgrade() else {
6386 return Ok(());
6387 };
6388 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6389 bail!("not a local repository")
6390 };
6391 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6392 this.update(&mut cx, |this, cx| {
6393 this.clear_pending_ops(cx);
6394 });
6395 if let Some(updates_tx) = updates_tx {
6396 updates_tx
6397 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6398 .ok();
6399 }
6400 Ok(())
6401 },
6402 );
6403 }
6404
6405 fn spawn_local_git_worker(
6406 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6407 cx: &mut Context<Self>,
6408 ) -> mpsc::UnboundedSender<GitJob> {
6409 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6410
6411 cx.spawn(async move |_, cx| {
6412 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6413 if let Some(git_hosting_provider_registry) =
6414 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6415 {
6416 git_hosting_providers::register_additional_providers(
6417 git_hosting_provider_registry,
6418 state.backend.clone(),
6419 )
6420 .await;
6421 }
6422 let state = RepositoryState::Local(state);
6423 let mut jobs = VecDeque::new();
6424 loop {
6425 while let Ok(Some(next_job)) = job_rx.try_next() {
6426 jobs.push_back(next_job);
6427 }
6428
6429 if let Some(job) = jobs.pop_front() {
6430 if let Some(current_key) = &job.key
6431 && jobs
6432 .iter()
6433 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6434 {
6435 continue;
6436 }
6437 (job.job)(state.clone(), cx).await;
6438 } else if let Some(job) = job_rx.next().await {
6439 jobs.push_back(job);
6440 } else {
6441 break;
6442 }
6443 }
6444 anyhow::Ok(())
6445 })
6446 .detach_and_log_err(cx);
6447
6448 job_tx
6449 }
6450
6451 fn spawn_remote_git_worker(
6452 state: RemoteRepositoryState,
6453 cx: &mut Context<Self>,
6454 ) -> mpsc::UnboundedSender<GitJob> {
6455 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6456
6457 cx.spawn(async move |_, cx| {
6458 let state = RepositoryState::Remote(state);
6459 let mut jobs = VecDeque::new();
6460 loop {
6461 while let Ok(Some(next_job)) = job_rx.try_next() {
6462 jobs.push_back(next_job);
6463 }
6464
6465 if let Some(job) = jobs.pop_front() {
6466 if let Some(current_key) = &job.key
6467 && jobs
6468 .iter()
6469 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6470 {
6471 continue;
6472 }
6473 (job.job)(state.clone(), cx).await;
6474 } else if let Some(job) = job_rx.next().await {
6475 jobs.push_back(job);
6476 } else {
6477 break;
6478 }
6479 }
6480 anyhow::Ok(())
6481 })
6482 .detach_and_log_err(cx);
6483
6484 job_tx
6485 }
6486
6487 fn load_staged_text(
6488 &mut self,
6489 buffer_id: BufferId,
6490 repo_path: RepoPath,
6491 cx: &App,
6492 ) -> Task<Result<Option<String>>> {
6493 let rx = self.send_job(None, move |state, _| async move {
6494 match state {
6495 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6496 anyhow::Ok(backend.load_index_text(repo_path).await)
6497 }
6498 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6499 let response = client
6500 .request(proto::OpenUnstagedDiff {
6501 project_id: project_id.to_proto(),
6502 buffer_id: buffer_id.to_proto(),
6503 })
6504 .await?;
6505 Ok(response.staged_text)
6506 }
6507 }
6508 });
6509 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6510 }
6511
6512 fn load_committed_text(
6513 &mut self,
6514 buffer_id: BufferId,
6515 repo_path: RepoPath,
6516 cx: &App,
6517 ) -> Task<Result<DiffBasesChange>> {
6518 let rx = self.send_job(None, move |state, _| async move {
6519 match state {
6520 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6521 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6522 let staged_text = backend.load_index_text(repo_path).await;
6523 let diff_bases_change = if committed_text == staged_text {
6524 DiffBasesChange::SetBoth(committed_text)
6525 } else {
6526 DiffBasesChange::SetEach {
6527 index: staged_text,
6528 head: committed_text,
6529 }
6530 };
6531 anyhow::Ok(diff_bases_change)
6532 }
6533 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6534 use proto::open_uncommitted_diff_response::Mode;
6535
6536 let response = client
6537 .request(proto::OpenUncommittedDiff {
6538 project_id: project_id.to_proto(),
6539 buffer_id: buffer_id.to_proto(),
6540 })
6541 .await?;
6542 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6543 let bases = match mode {
6544 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6545 Mode::IndexAndHead => DiffBasesChange::SetEach {
6546 head: response.committed_text,
6547 index: response.staged_text,
6548 },
6549 };
6550 Ok(bases)
6551 }
6552 }
6553 });
6554
6555 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6556 }
6557
6558 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6559 let repository_id = self.snapshot.id;
6560 let rx = self.send_job(None, move |state, _| async move {
6561 match state {
6562 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6563 backend.load_blob_content(oid).await
6564 }
6565 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6566 let response = client
6567 .request(proto::GetBlobContent {
6568 project_id: project_id.to_proto(),
6569 repository_id: repository_id.0,
6570 oid: oid.to_string(),
6571 })
6572 .await?;
6573 Ok(response.content)
6574 }
6575 }
6576 });
6577 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6578 }
6579
6580 fn paths_changed(
6581 &mut self,
6582 paths: Vec<RepoPath>,
6583 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6584 cx: &mut Context<Self>,
6585 ) {
6586 if !paths.is_empty() {
6587 self.paths_needing_status_update.push(paths);
6588 }
6589
6590 let this = cx.weak_entity();
6591 let _ = self.send_keyed_job(
6592 Some(GitJobKey::RefreshStatuses),
6593 None,
6594 |state, mut cx| async move {
6595 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6596 (
6597 this.snapshot.clone(),
6598 mem::take(&mut this.paths_needing_status_update),
6599 )
6600 })?;
6601 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6602 bail!("not a local repository")
6603 };
6604
6605 if changed_paths.is_empty() {
6606 return Ok(());
6607 }
6608
6609 let has_head = prev_snapshot.head_commit.is_some();
6610
6611 let stash_entries = backend.stash_entries().await?;
6612 let changed_path_statuses = cx
6613 .background_spawn(async move {
6614 let mut changed_paths =
6615 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6616 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6617
6618 let status_task = backend.status(&changed_paths_vec);
6619 let diff_stat_future = if has_head {
6620 backend.diff_stat(&changed_paths_vec)
6621 } else {
6622 future::ready(Ok(status::GitDiffStat {
6623 entries: Arc::default(),
6624 }))
6625 .boxed()
6626 };
6627
6628 let (statuses, diff_stats) =
6629 futures::future::try_join(status_task, diff_stat_future).await?;
6630
6631 let diff_stats: HashMap<RepoPath, DiffStat> =
6632 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6633
6634 let mut changed_path_statuses = Vec::new();
6635 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6636 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6637
6638 for (repo_path, status) in &*statuses.entries {
6639 let current_diff_stat = diff_stats.get(repo_path).copied();
6640
6641 changed_paths.remove(repo_path);
6642 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6643 && cursor.item().is_some_and(|entry| {
6644 entry.status == *status && entry.diff_stat == current_diff_stat
6645 })
6646 {
6647 continue;
6648 }
6649
6650 changed_path_statuses.push(Edit::Insert(StatusEntry {
6651 repo_path: repo_path.clone(),
6652 status: *status,
6653 diff_stat: current_diff_stat,
6654 }));
6655 }
6656 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6657 for path in changed_paths.into_iter() {
6658 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6659 changed_path_statuses
6660 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6661 }
6662 }
6663 anyhow::Ok(changed_path_statuses)
6664 })
6665 .await?;
6666
6667 this.update(&mut cx, |this, cx| {
6668 if this.snapshot.stash_entries != stash_entries {
6669 cx.emit(RepositoryEvent::StashEntriesChanged);
6670 this.snapshot.stash_entries = stash_entries;
6671 }
6672
6673 if !changed_path_statuses.is_empty() {
6674 cx.emit(RepositoryEvent::StatusesChanged);
6675 this.snapshot
6676 .statuses_by_path
6677 .edit(changed_path_statuses, ());
6678 this.snapshot.scan_id += 1;
6679 }
6680
6681 if let Some(updates_tx) = updates_tx {
6682 updates_tx
6683 .unbounded_send(DownstreamUpdate::UpdateRepository(
6684 this.snapshot.clone(),
6685 ))
6686 .ok();
6687 }
6688 })
6689 },
6690 );
6691 }
6692
6693 /// currently running git command and when it started
6694 pub fn current_job(&self) -> Option<JobInfo> {
6695 self.active_jobs.values().next().cloned()
6696 }
6697
6698 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6699 self.send_job(None, |_, _| async {})
6700 }
6701
6702 fn spawn_job_with_tracking<AsyncFn>(
6703 &mut self,
6704 paths: Vec<RepoPath>,
6705 git_status: pending_op::GitStatus,
6706 cx: &mut Context<Self>,
6707 f: AsyncFn,
6708 ) -> Task<Result<()>>
6709 where
6710 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6711 {
6712 let ids = self.new_pending_ops_for_paths(paths, git_status);
6713
6714 cx.spawn(async move |this, cx| {
6715 let (job_status, result) = match f(this.clone(), cx).await {
6716 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6717 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6718 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6719 };
6720
6721 this.update(cx, |this, _| {
6722 let mut edits = Vec::with_capacity(ids.len());
6723 for (id, entry) in ids {
6724 if let Some(mut ops) = this
6725 .pending_ops
6726 .get(&PathKey(entry.as_ref().clone()), ())
6727 .cloned()
6728 {
6729 if let Some(op) = ops.op_by_id_mut(id) {
6730 op.job_status = job_status;
6731 }
6732 edits.push(sum_tree::Edit::Insert(ops));
6733 }
6734 }
6735 this.pending_ops.edit(edits, ());
6736 })?;
6737
6738 result
6739 })
6740 }
6741
6742 fn new_pending_ops_for_paths(
6743 &mut self,
6744 paths: Vec<RepoPath>,
6745 git_status: pending_op::GitStatus,
6746 ) -> Vec<(PendingOpId, RepoPath)> {
6747 let mut edits = Vec::with_capacity(paths.len());
6748 let mut ids = Vec::with_capacity(paths.len());
6749 for path in paths {
6750 let mut ops = self
6751 .pending_ops
6752 .get(&PathKey(path.as_ref().clone()), ())
6753 .cloned()
6754 .unwrap_or_else(|| PendingOps::new(&path));
6755 let id = ops.max_id() + 1;
6756 ops.ops.push(PendingOp {
6757 id,
6758 git_status,
6759 job_status: pending_op::JobStatus::Running,
6760 });
6761 edits.push(sum_tree::Edit::Insert(ops));
6762 ids.push((id, path));
6763 }
6764 self.pending_ops.edit(edits, ());
6765 ids
6766 }
6767 pub fn default_remote_url(&self) -> Option<String> {
6768 self.remote_upstream_url
6769 .clone()
6770 .or(self.remote_origin_url.clone())
6771 }
6772}
6773
6774/// If `path` is a git linked worktree checkout, resolves it to the main
6775/// repository's working directory path. Returns `None` if `path` is a normal
6776/// repository, not a git repo, or if resolution fails.
6777///
6778/// Resolution works by:
6779/// 1. Reading the `.git` file to get the `gitdir:` pointer
6780/// 2. Following that to the worktree-specific git directory
6781/// 3. Reading the `commondir` file to find the shared `.git` directory
6782/// 4. Deriving the main repo's working directory from the common dir
6783pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6784 let dot_git = path.join(".git");
6785 let metadata = fs.metadata(&dot_git).await.ok()??;
6786 if metadata.is_dir {
6787 return None; // Normal repo, not a linked worktree
6788 }
6789 // It's a .git file — parse the gitdir: pointer
6790 let content = fs.load(&dot_git).await.ok()?;
6791 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6792 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6793 // Read commondir to find the main .git directory
6794 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6795 let common_dir = fs
6796 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6797 .await
6798 .ok()?;
6799 Some(git::repository::original_repo_path_from_common_dir(
6800 &common_dir,
6801 ))
6802}
6803
6804/// Validates that the resolved worktree directory is acceptable:
6805/// - The setting must not be an absolute path.
6806/// - The resolved path must be either a subdirectory of the working
6807/// directory or a subdirectory of its parent (i.e., a sibling).
6808///
6809/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6810pub fn worktrees_directory_for_repo(
6811 original_repo_abs_path: &Path,
6812 worktree_directory_setting: &str,
6813) -> Result<PathBuf> {
6814 // Check the original setting before trimming, since a path like "///"
6815 // is absolute but becomes "" after stripping trailing separators.
6816 // Also check for leading `/` or `\` explicitly, because on Windows
6817 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6818 // would slip through even though it's clearly not a relative path.
6819 if Path::new(worktree_directory_setting).is_absolute()
6820 || worktree_directory_setting.starts_with('/')
6821 || worktree_directory_setting.starts_with('\\')
6822 {
6823 anyhow::bail!(
6824 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6825 );
6826 }
6827
6828 if worktree_directory_setting.is_empty() {
6829 anyhow::bail!("git.worktree_directory must not be empty");
6830 }
6831
6832 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6833 if trimmed == ".." {
6834 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6835 }
6836
6837 let joined = original_repo_abs_path.join(trimmed);
6838 let resolved = util::normalize_path(&joined);
6839 let resolved = if resolved.starts_with(original_repo_abs_path) {
6840 resolved
6841 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
6842 resolved.join(repo_dir_name)
6843 } else {
6844 resolved
6845 };
6846
6847 let parent = original_repo_abs_path
6848 .parent()
6849 .unwrap_or(original_repo_abs_path);
6850
6851 if !resolved.starts_with(parent) {
6852 anyhow::bail!(
6853 "git.worktree_directory resolved to {resolved:?}, which is outside \
6854 the project root and its parent directory. It must resolve to a \
6855 subdirectory of {original_repo_abs_path:?} or a sibling of it."
6856 );
6857 }
6858
6859 Ok(resolved)
6860}
6861
6862/// Returns a short name for a linked worktree suitable for UI display
6863///
6864/// Uses the main worktree path to come up with a short name that disambiguates
6865/// the linked worktree from the main worktree.
6866pub fn linked_worktree_short_name(
6867 main_worktree_path: &Path,
6868 linked_worktree_path: &Path,
6869) -> Option<SharedString> {
6870 if main_worktree_path == linked_worktree_path {
6871 return None;
6872 }
6873
6874 let project_name = main_worktree_path.file_name()?.to_str()?;
6875 let directory_name = linked_worktree_path.file_name()?.to_str()?;
6876 let name = if directory_name != project_name {
6877 directory_name.to_string()
6878 } else {
6879 linked_worktree_path
6880 .parent()?
6881 .file_name()?
6882 .to_str()?
6883 .to_string()
6884 };
6885 Some(name.into())
6886}
6887
6888fn get_permalink_in_rust_registry_src(
6889 provider_registry: Arc<GitHostingProviderRegistry>,
6890 path: PathBuf,
6891 selection: Range<u32>,
6892) -> Result<url::Url> {
6893 #[derive(Deserialize)]
6894 struct CargoVcsGit {
6895 sha1: String,
6896 }
6897
6898 #[derive(Deserialize)]
6899 struct CargoVcsInfo {
6900 git: CargoVcsGit,
6901 path_in_vcs: String,
6902 }
6903
6904 #[derive(Deserialize)]
6905 struct CargoPackage {
6906 repository: String,
6907 }
6908
6909 #[derive(Deserialize)]
6910 struct CargoToml {
6911 package: CargoPackage,
6912 }
6913
6914 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6915 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6916 Some((dir, json))
6917 }) else {
6918 bail!("No .cargo_vcs_info.json found in parent directories")
6919 };
6920 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6921 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6922 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6923 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6924 .context("parsing package.repository field of manifest")?;
6925 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6926 let permalink = provider.build_permalink(
6927 remote,
6928 BuildPermalinkParams::new(
6929 &cargo_vcs_info.git.sha1,
6930 &RepoPath::from_rel_path(
6931 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6932 ),
6933 Some(selection),
6934 ),
6935 );
6936 Ok(permalink)
6937}
6938
6939fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6940 let Some(blame) = blame else {
6941 return proto::BlameBufferResponse {
6942 blame_response: None,
6943 };
6944 };
6945
6946 let entries = blame
6947 .entries
6948 .into_iter()
6949 .map(|entry| proto::BlameEntry {
6950 sha: entry.sha.as_bytes().into(),
6951 start_line: entry.range.start,
6952 end_line: entry.range.end,
6953 original_line_number: entry.original_line_number,
6954 author: entry.author,
6955 author_mail: entry.author_mail,
6956 author_time: entry.author_time,
6957 author_tz: entry.author_tz,
6958 committer: entry.committer_name,
6959 committer_mail: entry.committer_email,
6960 committer_time: entry.committer_time,
6961 committer_tz: entry.committer_tz,
6962 summary: entry.summary,
6963 previous: entry.previous,
6964 filename: entry.filename,
6965 })
6966 .collect::<Vec<_>>();
6967
6968 let messages = blame
6969 .messages
6970 .into_iter()
6971 .map(|(oid, message)| proto::CommitMessage {
6972 oid: oid.as_bytes().into(),
6973 message,
6974 })
6975 .collect::<Vec<_>>();
6976
6977 proto::BlameBufferResponse {
6978 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6979 }
6980}
6981
6982fn deserialize_blame_buffer_response(
6983 response: proto::BlameBufferResponse,
6984) -> Option<git::blame::Blame> {
6985 let response = response.blame_response?;
6986 let entries = response
6987 .entries
6988 .into_iter()
6989 .filter_map(|entry| {
6990 Some(git::blame::BlameEntry {
6991 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6992 range: entry.start_line..entry.end_line,
6993 original_line_number: entry.original_line_number,
6994 committer_name: entry.committer,
6995 committer_time: entry.committer_time,
6996 committer_tz: entry.committer_tz,
6997 committer_email: entry.committer_mail,
6998 author: entry.author,
6999 author_mail: entry.author_mail,
7000 author_time: entry.author_time,
7001 author_tz: entry.author_tz,
7002 summary: entry.summary,
7003 previous: entry.previous,
7004 filename: entry.filename,
7005 })
7006 })
7007 .collect::<Vec<_>>();
7008
7009 let messages = response
7010 .messages
7011 .into_iter()
7012 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7013 .collect::<HashMap<_, _>>();
7014
7015 Some(Blame { entries, messages })
7016}
7017
7018fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7019 proto::Branch {
7020 is_head: branch.is_head,
7021 ref_name: branch.ref_name.to_string(),
7022 unix_timestamp: branch
7023 .most_recent_commit
7024 .as_ref()
7025 .map(|commit| commit.commit_timestamp as u64),
7026 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7027 ref_name: upstream.ref_name.to_string(),
7028 tracking: upstream
7029 .tracking
7030 .status()
7031 .map(|upstream| proto::UpstreamTracking {
7032 ahead: upstream.ahead as u64,
7033 behind: upstream.behind as u64,
7034 }),
7035 }),
7036 most_recent_commit: branch
7037 .most_recent_commit
7038 .as_ref()
7039 .map(|commit| proto::CommitSummary {
7040 sha: commit.sha.to_string(),
7041 subject: commit.subject.to_string(),
7042 commit_timestamp: commit.commit_timestamp,
7043 author_name: commit.author_name.to_string(),
7044 }),
7045 }
7046}
7047
7048fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7049 proto::Worktree {
7050 path: worktree.path.to_string_lossy().to_string(),
7051 ref_name: worktree
7052 .ref_name
7053 .as_ref()
7054 .map(|s| s.to_string())
7055 .unwrap_or_default(),
7056 sha: worktree.sha.to_string(),
7057 }
7058}
7059
7060fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7061 git::repository::Worktree {
7062 path: PathBuf::from(proto.path.clone()),
7063 ref_name: Some(SharedString::from(&proto.ref_name)),
7064 sha: proto.sha.clone().into(),
7065 }
7066}
7067
7068fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7069 git::repository::Branch {
7070 is_head: proto.is_head,
7071 ref_name: proto.ref_name.clone().into(),
7072 upstream: proto
7073 .upstream
7074 .as_ref()
7075 .map(|upstream| git::repository::Upstream {
7076 ref_name: upstream.ref_name.to_string().into(),
7077 tracking: upstream
7078 .tracking
7079 .as_ref()
7080 .map(|tracking| {
7081 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7082 ahead: tracking.ahead as u32,
7083 behind: tracking.behind as u32,
7084 })
7085 })
7086 .unwrap_or(git::repository::UpstreamTracking::Gone),
7087 }),
7088 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7089 git::repository::CommitSummary {
7090 sha: commit.sha.to_string().into(),
7091 subject: commit.subject.to_string().into(),
7092 commit_timestamp: commit.commit_timestamp,
7093 author_name: commit.author_name.to_string().into(),
7094 has_parent: true,
7095 }
7096 }),
7097 }
7098}
7099
7100fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7101 proto::GitCommitDetails {
7102 sha: commit.sha.to_string(),
7103 message: commit.message.to_string(),
7104 commit_timestamp: commit.commit_timestamp,
7105 author_email: commit.author_email.to_string(),
7106 author_name: commit.author_name.to_string(),
7107 }
7108}
7109
7110fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7111 CommitDetails {
7112 sha: proto.sha.clone().into(),
7113 message: proto.message.clone().into(),
7114 commit_timestamp: proto.commit_timestamp,
7115 author_email: proto.author_email.clone().into(),
7116 author_name: proto.author_name.clone().into(),
7117 }
7118}
7119
7120/// This snapshot computes the repository state on the foreground thread while
7121/// running the git commands on the background thread. We update branch, head,
7122/// remotes, and worktrees first so the UI can react sooner, then compute file
7123/// state and emit those events immediately after.
7124async fn compute_snapshot(
7125 this: Entity<Repository>,
7126 backend: Arc<dyn GitRepository>,
7127 cx: &mut AsyncApp,
7128) -> Result<RepositorySnapshot> {
7129 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7130 this.paths_needing_status_update.clear();
7131 (
7132 this.id,
7133 this.work_directory_abs_path.clone(),
7134 this.snapshot.clone(),
7135 )
7136 });
7137
7138 let head_commit_future = {
7139 let backend = backend.clone();
7140 async move {
7141 Ok(match backend.head_sha().await {
7142 Some(head_sha) => backend.show(head_sha).await.log_err(),
7143 None => None,
7144 })
7145 }
7146 };
7147 let (branches, head_commit, all_worktrees) = cx
7148 .background_spawn({
7149 let backend = backend.clone();
7150 async move {
7151 futures::future::try_join3(
7152 backend.branches(),
7153 head_commit_future,
7154 backend.worktrees(),
7155 )
7156 .await
7157 }
7158 })
7159 .await?;
7160 let branch = branches.into_iter().find(|branch| branch.is_head);
7161
7162 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7163 .into_iter()
7164 .filter(|wt| wt.path != *work_directory_abs_path)
7165 .collect();
7166
7167 let (remote_origin_url, remote_upstream_url) = cx
7168 .background_spawn({
7169 let backend = backend.clone();
7170 async move {
7171 Ok::<_, anyhow::Error>(
7172 futures::future::join(
7173 backend.remote_url("origin"),
7174 backend.remote_url("upstream"),
7175 )
7176 .await,
7177 )
7178 }
7179 })
7180 .await?;
7181
7182 let snapshot = this.update(cx, |this, cx| {
7183 let branch_changed =
7184 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7185 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7186
7187 this.snapshot = RepositorySnapshot {
7188 id,
7189 work_directory_abs_path,
7190 branch,
7191 head_commit,
7192 remote_origin_url,
7193 remote_upstream_url,
7194 linked_worktrees,
7195 scan_id: prev_snapshot.scan_id + 1,
7196 ..prev_snapshot
7197 };
7198
7199 if branch_changed {
7200 cx.emit(RepositoryEvent::BranchChanged);
7201 }
7202
7203 if worktrees_changed {
7204 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7205 }
7206
7207 this.snapshot.clone()
7208 });
7209
7210 let (statuses, diff_stats, stash_entries) = cx
7211 .background_spawn({
7212 let backend = backend.clone();
7213 let snapshot = snapshot.clone();
7214 async move {
7215 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7216 if snapshot.head_commit.is_some() {
7217 backend.diff_stat(&[])
7218 } else {
7219 future::ready(Ok(status::GitDiffStat {
7220 entries: Arc::default(),
7221 }))
7222 .boxed()
7223 };
7224 futures::future::try_join3(
7225 backend.status(&[RepoPath::from_rel_path(
7226 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7227 )]),
7228 diff_stat_future,
7229 backend.stash_entries(),
7230 )
7231 .await
7232 }
7233 })
7234 .await?;
7235
7236 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7237 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7238 let mut conflicted_paths = Vec::new();
7239 let statuses_by_path = SumTree::from_iter(
7240 statuses.entries.iter().map(|(repo_path, status)| {
7241 if status.is_conflicted() {
7242 conflicted_paths.push(repo_path.clone());
7243 }
7244 StatusEntry {
7245 repo_path: repo_path.clone(),
7246 status: *status,
7247 diff_stat: diff_stat_map.get(repo_path).copied(),
7248 }
7249 }),
7250 (),
7251 );
7252
7253 let merge_details = cx
7254 .background_spawn({
7255 let backend = backend.clone();
7256 let mut merge_details = snapshot.merge.clone();
7257 async move {
7258 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7259 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7260 }
7261 })
7262 .await?;
7263 let (merge_details, conflicts_changed) = merge_details;
7264 log::debug!("new merge details: {merge_details:?}");
7265
7266 Ok(this.update(cx, |this, cx| {
7267 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7268 cx.emit(RepositoryEvent::StatusesChanged);
7269 }
7270 if stash_entries != this.snapshot.stash_entries {
7271 cx.emit(RepositoryEvent::StashEntriesChanged);
7272 }
7273
7274 this.snapshot.scan_id += 1;
7275 this.snapshot.merge = merge_details;
7276 this.snapshot.statuses_by_path = statuses_by_path;
7277 this.snapshot.stash_entries = stash_entries;
7278
7279 this.snapshot.clone()
7280 }))
7281}
7282
7283fn status_from_proto(
7284 simple_status: i32,
7285 status: Option<proto::GitFileStatus>,
7286) -> anyhow::Result<FileStatus> {
7287 use proto::git_file_status::Variant;
7288
7289 let Some(variant) = status.and_then(|status| status.variant) else {
7290 let code = proto::GitStatus::from_i32(simple_status)
7291 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7292 let result = match code {
7293 proto::GitStatus::Added => TrackedStatus {
7294 worktree_status: StatusCode::Added,
7295 index_status: StatusCode::Unmodified,
7296 }
7297 .into(),
7298 proto::GitStatus::Modified => TrackedStatus {
7299 worktree_status: StatusCode::Modified,
7300 index_status: StatusCode::Unmodified,
7301 }
7302 .into(),
7303 proto::GitStatus::Conflict => UnmergedStatus {
7304 first_head: UnmergedStatusCode::Updated,
7305 second_head: UnmergedStatusCode::Updated,
7306 }
7307 .into(),
7308 proto::GitStatus::Deleted => TrackedStatus {
7309 worktree_status: StatusCode::Deleted,
7310 index_status: StatusCode::Unmodified,
7311 }
7312 .into(),
7313 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7314 };
7315 return Ok(result);
7316 };
7317
7318 let result = match variant {
7319 Variant::Untracked(_) => FileStatus::Untracked,
7320 Variant::Ignored(_) => FileStatus::Ignored,
7321 Variant::Unmerged(unmerged) => {
7322 let [first_head, second_head] =
7323 [unmerged.first_head, unmerged.second_head].map(|head| {
7324 let code = proto::GitStatus::from_i32(head)
7325 .with_context(|| format!("Invalid git status code: {head}"))?;
7326 let result = match code {
7327 proto::GitStatus::Added => UnmergedStatusCode::Added,
7328 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7329 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7330 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7331 };
7332 Ok(result)
7333 });
7334 let [first_head, second_head] = [first_head?, second_head?];
7335 UnmergedStatus {
7336 first_head,
7337 second_head,
7338 }
7339 .into()
7340 }
7341 Variant::Tracked(tracked) => {
7342 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7343 .map(|status| {
7344 let code = proto::GitStatus::from_i32(status)
7345 .with_context(|| format!("Invalid git status code: {status}"))?;
7346 let result = match code {
7347 proto::GitStatus::Modified => StatusCode::Modified,
7348 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7349 proto::GitStatus::Added => StatusCode::Added,
7350 proto::GitStatus::Deleted => StatusCode::Deleted,
7351 proto::GitStatus::Renamed => StatusCode::Renamed,
7352 proto::GitStatus::Copied => StatusCode::Copied,
7353 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7354 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7355 };
7356 Ok(result)
7357 });
7358 let [index_status, worktree_status] = [index_status?, worktree_status?];
7359 TrackedStatus {
7360 index_status,
7361 worktree_status,
7362 }
7363 .into()
7364 }
7365 };
7366 Ok(result)
7367}
7368
7369fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7370 use proto::git_file_status::{Tracked, Unmerged, Variant};
7371
7372 let variant = match status {
7373 FileStatus::Untracked => Variant::Untracked(Default::default()),
7374 FileStatus::Ignored => Variant::Ignored(Default::default()),
7375 FileStatus::Unmerged(UnmergedStatus {
7376 first_head,
7377 second_head,
7378 }) => Variant::Unmerged(Unmerged {
7379 first_head: unmerged_status_to_proto(first_head),
7380 second_head: unmerged_status_to_proto(second_head),
7381 }),
7382 FileStatus::Tracked(TrackedStatus {
7383 index_status,
7384 worktree_status,
7385 }) => Variant::Tracked(Tracked {
7386 index_status: tracked_status_to_proto(index_status),
7387 worktree_status: tracked_status_to_proto(worktree_status),
7388 }),
7389 };
7390 proto::GitFileStatus {
7391 variant: Some(variant),
7392 }
7393}
7394
7395fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7396 match code {
7397 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7398 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7399 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7400 }
7401}
7402
7403fn tracked_status_to_proto(code: StatusCode) -> i32 {
7404 match code {
7405 StatusCode::Added => proto::GitStatus::Added as _,
7406 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7407 StatusCode::Modified => proto::GitStatus::Modified as _,
7408 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7409 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7410 StatusCode::Copied => proto::GitStatus::Copied as _,
7411 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7412 }
7413}