1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let unstaged_diff = this
825 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
826 .await?;
827 buffer_diff.update(cx, |buffer_diff, _| {
828 buffer_diff.set_secondary_diff(unstaged_diff);
829 });
830
831 this.update(cx, |this, cx| {
832 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
833 .detach();
834
835 this.loading_diffs.remove(&(buffer_id, diff_kind));
836
837 let git_store = cx.weak_entity();
838 let diff_state = this
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842
843 diff_state.update(cx, |state, _| {
844 if let Some(oid) = oid {
845 if let Some(content) = content {
846 state.oid_texts.insert(oid, content);
847 }
848 }
849 state.oid_diffs.insert(oid, buffer_diff.downgrade());
850 });
851 })?;
852
853 Ok(buffer_diff)
854 }
855 .await;
856 result.map_err(Arc::new)
857 })
858 .shared();
859
860 self.loading_diffs
861 .insert((buffer_id, diff_kind), task.clone());
862 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
863 }
864
865 #[ztracing::instrument(skip_all)]
866 pub fn open_uncommitted_diff(
867 &mut self,
868 buffer: Entity<Buffer>,
869 cx: &mut Context<Self>,
870 ) -> Task<Result<Entity<BufferDiff>>> {
871 let buffer_id = buffer.read(cx).remote_id();
872
873 if let Some(diff_state) = self.diffs.get(&buffer_id)
874 && let Some(uncommitted_diff) = diff_state
875 .read(cx)
876 .uncommitted_diff
877 .as_ref()
878 .and_then(|weak| weak.upgrade())
879 {
880 if let Some(task) =
881 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
882 {
883 return cx.background_executor().spawn(async move {
884 task.await;
885 Ok(uncommitted_diff)
886 });
887 }
888 return Task::ready(Ok(uncommitted_diff));
889 }
890
891 let Some((repo, repo_path)) =
892 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
893 else {
894 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
895 };
896
897 let task = self
898 .loading_diffs
899 .entry((buffer_id, DiffKind::Uncommitted))
900 .or_insert_with(|| {
901 let changes = repo.update(cx, |repo, cx| {
902 repo.load_committed_text(buffer_id, repo_path, cx)
903 });
904
905 // todo(lw): hot foreground spawn
906 cx.spawn(async move |this, cx| {
907 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
908 .await
909 .map_err(Arc::new)
910 })
911 .shared()
912 })
913 .clone();
914
915 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
916 }
917
918 #[ztracing::instrument(skip_all)]
919 async fn open_diff_internal(
920 this: WeakEntity<Self>,
921 kind: DiffKind,
922 texts: Result<DiffBasesChange>,
923 buffer_entity: Entity<Buffer>,
924 cx: &mut AsyncApp,
925 ) -> Result<Entity<BufferDiff>> {
926 let diff_bases_change = match texts {
927 Err(e) => {
928 this.update(cx, |this, cx| {
929 let buffer = buffer_entity.read(cx);
930 let buffer_id = buffer.remote_id();
931 this.loading_diffs.remove(&(buffer_id, kind));
932 })?;
933 return Err(e);
934 }
935 Ok(change) => change,
936 };
937
938 this.update(cx, |this, cx| {
939 let buffer = buffer_entity.read(cx);
940 let buffer_id = buffer.remote_id();
941 let language = buffer.language().cloned();
942 let language_registry = buffer.language_registry();
943 let text_snapshot = buffer.text_snapshot();
944 this.loading_diffs.remove(&(buffer_id, kind));
945
946 let git_store = cx.weak_entity();
947 let diff_state = this
948 .diffs
949 .entry(buffer_id)
950 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
951
952 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953
954 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
955 diff_state.update(cx, |diff_state, cx| {
956 diff_state.language_changed = true;
957 diff_state.language = language;
958 diff_state.language_registry = language_registry;
959
960 match kind {
961 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
962 DiffKind::Uncommitted => {
963 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
964 diff
965 } else {
966 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
967 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
968 unstaged_diff
969 };
970
971 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
972 diff_state.uncommitted_diff = Some(diff.downgrade())
973 }
974 DiffKind::SinceOid(_) => {
975 unreachable!("open_diff_internal is not used for OID diffs")
976 }
977 }
978
979 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
980 let rx = diff_state.wait_for_recalculation();
981
982 anyhow::Ok(async move {
983 if let Some(rx) = rx {
984 rx.await;
985 }
986 Ok(diff)
987 })
988 })
989 })??
990 .await
991 }
992
993 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
994 let diff_state = self.diffs.get(&buffer_id)?;
995 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
996 }
997
998 pub fn get_uncommitted_diff(
999 &self,
1000 buffer_id: BufferId,
1001 cx: &App,
1002 ) -> Option<Entity<BufferDiff>> {
1003 let diff_state = self.diffs.get(&buffer_id)?;
1004 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1005 }
1006
1007 pub fn get_diff_since_oid(
1008 &self,
1009 buffer_id: BufferId,
1010 oid: Option<git::Oid>,
1011 cx: &App,
1012 ) -> Option<Entity<BufferDiff>> {
1013 let diff_state = self.diffs.get(&buffer_id)?;
1014 diff_state.read(cx).oid_diff(oid)
1015 }
1016
1017 pub fn open_conflict_set(
1018 &mut self,
1019 buffer: Entity<Buffer>,
1020 cx: &mut Context<Self>,
1021 ) -> Entity<ConflictSet> {
1022 log::debug!("open conflict set");
1023 let buffer_id = buffer.read(cx).remote_id();
1024
1025 if let Some(git_state) = self.diffs.get(&buffer_id)
1026 && let Some(conflict_set) = git_state
1027 .read(cx)
1028 .conflict_set
1029 .as_ref()
1030 .and_then(|weak| weak.upgrade())
1031 {
1032 let conflict_set = conflict_set;
1033 let buffer_snapshot = buffer.read(cx).text_snapshot();
1034
1035 git_state.update(cx, |state, cx| {
1036 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1037 });
1038
1039 return conflict_set;
1040 }
1041
1042 let is_unmerged = self
1043 .repository_and_path_for_buffer_id(buffer_id, cx)
1044 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1045 let git_store = cx.weak_entity();
1046 let buffer_git_state = self
1047 .diffs
1048 .entry(buffer_id)
1049 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1050 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1051
1052 self._subscriptions
1053 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1054 cx.emit(GitStoreEvent::ConflictsUpdated);
1055 }));
1056
1057 buffer_git_state.update(cx, |state, cx| {
1058 state.conflict_set = Some(conflict_set.downgrade());
1059 let buffer_snapshot = buffer.read(cx).text_snapshot();
1060 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1061 });
1062
1063 conflict_set
1064 }
1065
1066 pub fn project_path_git_status(
1067 &self,
1068 project_path: &ProjectPath,
1069 cx: &App,
1070 ) -> Option<FileStatus> {
1071 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1072 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1073 }
1074
1075 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1076 let mut work_directory_abs_paths = Vec::new();
1077 let mut checkpoints = Vec::new();
1078 for repository in self.repositories.values() {
1079 repository.update(cx, |repository, _| {
1080 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1081 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1082 });
1083 }
1084
1085 cx.background_executor().spawn(async move {
1086 let checkpoints = future::try_join_all(checkpoints).await?;
1087 Ok(GitStoreCheckpoint {
1088 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1089 .into_iter()
1090 .zip(checkpoints)
1091 .collect(),
1092 })
1093 })
1094 }
1095
1096 pub fn restore_checkpoint(
1097 &self,
1098 checkpoint: GitStoreCheckpoint,
1099 cx: &mut App,
1100 ) -> Task<Result<()>> {
1101 let repositories_by_work_dir_abs_path = self
1102 .repositories
1103 .values()
1104 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1105 .collect::<HashMap<_, _>>();
1106
1107 let mut tasks = Vec::new();
1108 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1109 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1110 let restore = repository.update(cx, |repository, _| {
1111 repository.restore_checkpoint(checkpoint)
1112 });
1113 tasks.push(async move { restore.await? });
1114 }
1115 }
1116 cx.background_spawn(async move {
1117 future::try_join_all(tasks).await?;
1118 Ok(())
1119 })
1120 }
1121
1122 /// Compares two checkpoints, returning true if they are equal.
1123 pub fn compare_checkpoints(
1124 &self,
1125 left: GitStoreCheckpoint,
1126 mut right: GitStoreCheckpoint,
1127 cx: &mut App,
1128 ) -> Task<Result<bool>> {
1129 let repositories_by_work_dir_abs_path = self
1130 .repositories
1131 .values()
1132 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1133 .collect::<HashMap<_, _>>();
1134
1135 let mut tasks = Vec::new();
1136 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1137 if let Some(right_checkpoint) = right
1138 .checkpoints_by_work_dir_abs_path
1139 .remove(&work_dir_abs_path)
1140 {
1141 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1142 {
1143 let compare = repository.update(cx, |repository, _| {
1144 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1145 });
1146
1147 tasks.push(async move { compare.await? });
1148 }
1149 } else {
1150 return Task::ready(Ok(false));
1151 }
1152 }
1153 cx.background_spawn(async move {
1154 Ok(future::try_join_all(tasks)
1155 .await?
1156 .into_iter()
1157 .all(|result| result))
1158 })
1159 }
1160
1161 /// Blames a buffer.
1162 pub fn blame_buffer(
1163 &self,
1164 buffer: &Entity<Buffer>,
1165 version: Option<clock::Global>,
1166 cx: &mut Context<Self>,
1167 ) -> Task<Result<Option<Blame>>> {
1168 let buffer = buffer.read(cx);
1169 let Some((repo, repo_path)) =
1170 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1171 else {
1172 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1173 };
1174 let content = match &version {
1175 Some(version) => buffer.rope_for_version(version),
1176 None => buffer.as_rope().clone(),
1177 };
1178 let line_ending = buffer.line_ending();
1179 let version = version.unwrap_or(buffer.version());
1180 let buffer_id = buffer.remote_id();
1181
1182 let repo = repo.downgrade();
1183 cx.spawn(async move |_, cx| {
1184 let repository_state = repo
1185 .update(cx, |repo, _| repo.repository_state.clone())?
1186 .await
1187 .map_err(|err| anyhow::anyhow!(err))?;
1188 match repository_state {
1189 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1190 .blame(repo_path.clone(), content, line_ending)
1191 .await
1192 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1193 .map(Some),
1194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1195 let response = client
1196 .request(proto::BlameBuffer {
1197 project_id: project_id.to_proto(),
1198 buffer_id: buffer_id.into(),
1199 version: serialize_version(&version),
1200 })
1201 .await?;
1202 Ok(deserialize_blame_buffer_response(response))
1203 }
1204 }
1205 })
1206 }
1207
1208 pub fn file_history(
1209 &self,
1210 repo: &Entity<Repository>,
1211 path: RepoPath,
1212 cx: &mut App,
1213 ) -> Task<Result<git::repository::FileHistory>> {
1214 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1215
1216 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1217 }
1218
1219 pub fn file_history_paginated(
1220 &self,
1221 repo: &Entity<Repository>,
1222 path: RepoPath,
1223 skip: usize,
1224 limit: Option<usize>,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn get_permalink_to_line(
1233 &self,
1234 buffer: &Entity<Buffer>,
1235 selection: Range<u32>,
1236 cx: &mut App,
1237 ) -> Task<Result<url::Url>> {
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 return Task::ready(Err(anyhow!("buffer has no file")));
1240 };
1241
1242 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1243 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1244 cx,
1245 ) else {
1246 // If we're not in a Git repo, check whether this is a Rust source
1247 // file in the Cargo registry (presumably opened with go-to-definition
1248 // from a normal Rust file). If so, we can put together a permalink
1249 // using crate metadata.
1250 if buffer
1251 .read(cx)
1252 .language()
1253 .is_none_or(|lang| lang.name() != "Rust")
1254 {
1255 return Task::ready(Err(anyhow!("no permalink available")));
1256 }
1257 let file_path = file.worktree.read(cx).absolutize(&file.path);
1258 return cx.spawn(async move |cx| {
1259 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1260 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1261 .context("no permalink available")
1262 });
1263 };
1264
1265 let buffer_id = buffer.read(cx).remote_id();
1266 let branch = repo.read(cx).branch.clone();
1267 let remote = branch
1268 .as_ref()
1269 .and_then(|b| b.upstream.as_ref())
1270 .and_then(|b| b.remote_name())
1271 .unwrap_or("origin")
1272 .to_string();
1273
1274 let rx = repo.update(cx, |repo, _| {
1275 repo.send_job(None, move |state, cx| async move {
1276 match state {
1277 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1278 let origin_url = backend
1279 .remote_url(&remote)
1280 .await
1281 .with_context(|| format!("remote \"{remote}\" not found"))?;
1282
1283 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1284
1285 let provider_registry =
1286 cx.update(GitHostingProviderRegistry::default_global);
1287
1288 let (provider, remote) =
1289 parse_git_remote_url(provider_registry, &origin_url)
1290 .context("parsing Git remote URL")?;
1291
1292 Ok(provider.build_permalink(
1293 remote,
1294 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1295 ))
1296 }
1297 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1298 let response = client
1299 .request(proto::GetPermalinkToLine {
1300 project_id: project_id.to_proto(),
1301 buffer_id: buffer_id.into(),
1302 selection: Some(proto::Range {
1303 start: selection.start as u64,
1304 end: selection.end as u64,
1305 }),
1306 })
1307 .await?;
1308
1309 url::Url::parse(&response.permalink).context("failed to parse permalink")
1310 }
1311 }
1312 })
1313 });
1314 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1315 }
1316
1317 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1318 match &self.state {
1319 GitStoreState::Local {
1320 downstream: downstream_client,
1321 ..
1322 } => downstream_client
1323 .as_ref()
1324 .map(|state| (state.client.clone(), state.project_id)),
1325 GitStoreState::Remote {
1326 downstream: downstream_client,
1327 ..
1328 } => downstream_client.clone(),
1329 }
1330 }
1331
1332 fn upstream_client(&self) -> Option<AnyProtoClient> {
1333 match &self.state {
1334 GitStoreState::Local { .. } => None,
1335 GitStoreState::Remote {
1336 upstream_client, ..
1337 } => Some(upstream_client.clone()),
1338 }
1339 }
1340
1341 fn on_worktree_store_event(
1342 &mut self,
1343 worktree_store: Entity<WorktreeStore>,
1344 event: &WorktreeStoreEvent,
1345 cx: &mut Context<Self>,
1346 ) {
1347 let GitStoreState::Local {
1348 project_environment,
1349 downstream,
1350 next_repository_id,
1351 fs,
1352 } = &self.state
1353 else {
1354 return;
1355 };
1356
1357 match event {
1358 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1359 if let Some(worktree) = self
1360 .worktree_store
1361 .read(cx)
1362 .worktree_for_id(*worktree_id, cx)
1363 {
1364 let paths_by_git_repo =
1365 self.process_updated_entries(&worktree, updated_entries, cx);
1366 let downstream = downstream
1367 .as_ref()
1368 .map(|downstream| downstream.updates_tx.clone());
1369 cx.spawn(async move |_, cx| {
1370 let paths_by_git_repo = paths_by_git_repo.await;
1371 for (repo, paths) in paths_by_git_repo {
1372 repo.update(cx, |repo, cx| {
1373 repo.paths_changed(paths, downstream.clone(), cx);
1374 });
1375 }
1376 })
1377 .detach();
1378 }
1379 }
1380 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1381 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1382 else {
1383 return;
1384 };
1385 if !worktree.read(cx).is_visible() {
1386 log::debug!(
1387 "not adding repositories for local worktree {:?} because it's not visible",
1388 worktree.read(cx).abs_path()
1389 );
1390 return;
1391 }
1392 self.update_repositories_from_worktree(
1393 *worktree_id,
1394 project_environment.clone(),
1395 next_repository_id.clone(),
1396 downstream
1397 .as_ref()
1398 .map(|downstream| downstream.updates_tx.clone()),
1399 changed_repos.clone(),
1400 fs.clone(),
1401 cx,
1402 );
1403 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1404 }
1405 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1406 let repos_without_worktree: Vec<RepositoryId> = self
1407 .worktree_ids
1408 .iter_mut()
1409 .filter_map(|(repo_id, worktree_ids)| {
1410 worktree_ids.remove(worktree_id);
1411 if worktree_ids.is_empty() {
1412 Some(*repo_id)
1413 } else {
1414 None
1415 }
1416 })
1417 .collect();
1418 let is_active_repo_removed = repos_without_worktree
1419 .iter()
1420 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1421
1422 for repo_id in repos_without_worktree {
1423 self.repositories.remove(&repo_id);
1424 self.worktree_ids.remove(&repo_id);
1425 if let Some(updates_tx) =
1426 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1427 {
1428 updates_tx
1429 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1430 .ok();
1431 }
1432 }
1433
1434 if is_active_repo_removed {
1435 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1436 self.active_repo_id = Some(repo_id);
1437 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1438 } else {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 }
1443 }
1444 _ => {}
1445 }
1446 }
1447 fn on_repository_event(
1448 &mut self,
1449 repo: Entity<Repository>,
1450 event: &RepositoryEvent,
1451 cx: &mut Context<Self>,
1452 ) {
1453 let id = repo.read(cx).id;
1454 let repo_snapshot = repo.read(cx).snapshot.clone();
1455 for (buffer_id, diff) in self.diffs.iter() {
1456 if let Some((buffer_repo, repo_path)) =
1457 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1458 && buffer_repo == repo
1459 {
1460 diff.update(cx, |diff, cx| {
1461 if let Some(conflict_set) = &diff.conflict_set {
1462 let conflict_status_changed =
1463 conflict_set.update(cx, |conflict_set, cx| {
1464 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1465 conflict_set.set_has_conflict(has_conflict, cx)
1466 })?;
1467 if conflict_status_changed {
1468 let buffer_store = self.buffer_store.read(cx);
1469 if let Some(buffer) = buffer_store.get(*buffer_id) {
1470 let _ = diff
1471 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1472 }
1473 }
1474 }
1475 anyhow::Ok(())
1476 })
1477 .ok();
1478 }
1479 }
1480 cx.emit(GitStoreEvent::RepositoryUpdated(
1481 id,
1482 event.clone(),
1483 self.active_repo_id == Some(id),
1484 ))
1485 }
1486
1487 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1488 cx.emit(GitStoreEvent::JobsUpdated)
1489 }
1490
1491 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1492 fn update_repositories_from_worktree(
1493 &mut self,
1494 worktree_id: WorktreeId,
1495 project_environment: Entity<ProjectEnvironment>,
1496 next_repository_id: Arc<AtomicU64>,
1497 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1498 updated_git_repositories: UpdatedGitRepositoriesSet,
1499 fs: Arc<dyn Fs>,
1500 cx: &mut Context<Self>,
1501 ) {
1502 let mut removed_ids = Vec::new();
1503 for update in updated_git_repositories.iter() {
1504 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1505 let existing_work_directory_abs_path =
1506 repo.read(cx).work_directory_abs_path.clone();
1507 Some(&existing_work_directory_abs_path)
1508 == update.old_work_directory_abs_path.as_ref()
1509 || Some(&existing_work_directory_abs_path)
1510 == update.new_work_directory_abs_path.as_ref()
1511 }) {
1512 let repo_id = *id;
1513 if let Some(new_work_directory_abs_path) =
1514 update.new_work_directory_abs_path.clone()
1515 {
1516 self.worktree_ids
1517 .entry(repo_id)
1518 .or_insert_with(HashSet::new)
1519 .insert(worktree_id);
1520 existing.update(cx, |existing, cx| {
1521 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1522 existing.schedule_scan(updates_tx.clone(), cx);
1523 });
1524 } else {
1525 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1526 worktree_ids.remove(&worktree_id);
1527 if worktree_ids.is_empty() {
1528 removed_ids.push(repo_id);
1529 }
1530 }
1531 }
1532 } else if let UpdatedGitRepository {
1533 new_work_directory_abs_path: Some(work_directory_abs_path),
1534 dot_git_abs_path: Some(dot_git_abs_path),
1535 repository_dir_abs_path: Some(repository_dir_abs_path),
1536 common_dir_abs_path: Some(common_dir_abs_path),
1537 ..
1538 } = update
1539 {
1540 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1541 work_directory_abs_path,
1542 common_dir_abs_path,
1543 repository_dir_abs_path,
1544 )
1545 .into();
1546 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1547 let is_trusted = TrustedWorktrees::try_get_global(cx)
1548 .map(|trusted_worktrees| {
1549 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1550 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1551 })
1552 })
1553 .unwrap_or(false);
1554 let git_store = cx.weak_entity();
1555 let repo = cx.new(|cx| {
1556 let mut repo = Repository::local(
1557 id,
1558 work_directory_abs_path.clone(),
1559 original_repo_abs_path.clone(),
1560 dot_git_abs_path.clone(),
1561 project_environment.downgrade(),
1562 fs.clone(),
1563 is_trusted,
1564 git_store,
1565 cx,
1566 );
1567 if let Some(updates_tx) = updates_tx.as_ref() {
1568 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1569 updates_tx
1570 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1571 .ok();
1572 }
1573 repo.schedule_scan(updates_tx.clone(), cx);
1574 repo
1575 });
1576 self._subscriptions
1577 .push(cx.subscribe(&repo, Self::on_repository_event));
1578 self._subscriptions
1579 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1580 self.repositories.insert(id, repo);
1581 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1582 cx.emit(GitStoreEvent::RepositoryAdded);
1583 self.active_repo_id.get_or_insert_with(|| {
1584 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1585 id
1586 });
1587 }
1588 }
1589
1590 for id in removed_ids {
1591 if self.active_repo_id == Some(id) {
1592 self.active_repo_id = None;
1593 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1594 }
1595 self.repositories.remove(&id);
1596 if let Some(updates_tx) = updates_tx.as_ref() {
1597 updates_tx
1598 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1599 .ok();
1600 }
1601 }
1602 }
1603
1604 fn on_trusted_worktrees_event(
1605 &mut self,
1606 _: Entity<TrustedWorktreesStore>,
1607 event: &TrustedWorktreesEvent,
1608 cx: &mut Context<Self>,
1609 ) {
1610 if !matches!(self.state, GitStoreState::Local { .. }) {
1611 return;
1612 }
1613
1614 let (is_trusted, event_paths) = match event {
1615 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1616 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1617 };
1618
1619 for (repo_id, worktree_ids) in &self.worktree_ids {
1620 if worktree_ids
1621 .iter()
1622 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1623 {
1624 if let Some(repo) = self.repositories.get(repo_id) {
1625 let repository_state = repo.read(cx).repository_state.clone();
1626 cx.background_spawn(async move {
1627 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1628 state.backend.set_trusted(is_trusted);
1629 }
1630 })
1631 .detach();
1632 }
1633 }
1634 }
1635 }
1636
1637 fn on_buffer_store_event(
1638 &mut self,
1639 _: Entity<BufferStore>,
1640 event: &BufferStoreEvent,
1641 cx: &mut Context<Self>,
1642 ) {
1643 match event {
1644 BufferStoreEvent::BufferAdded(buffer) => {
1645 cx.subscribe(buffer, |this, buffer, event, cx| {
1646 if let BufferEvent::LanguageChanged(_) = event {
1647 let buffer_id = buffer.read(cx).remote_id();
1648 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1649 diff_state.update(cx, |diff_state, cx| {
1650 diff_state.buffer_language_changed(buffer, cx);
1651 });
1652 }
1653 }
1654 })
1655 .detach();
1656 }
1657 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1658 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1659 diffs.remove(buffer_id);
1660 }
1661 }
1662 BufferStoreEvent::BufferDropped(buffer_id) => {
1663 self.diffs.remove(buffer_id);
1664 for diffs in self.shared_diffs.values_mut() {
1665 diffs.remove(buffer_id);
1666 }
1667 }
1668 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1669 // Whenever a buffer's file path changes, it's possible that the
1670 // new path is actually a path that is being tracked by a git
1671 // repository. In that case, we'll want to update the buffer's
1672 // `BufferDiffState`, in case it already has one.
1673 let buffer_id = buffer.read(cx).remote_id();
1674 let diff_state = self.diffs.get(&buffer_id);
1675 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1676
1677 if let Some(diff_state) = diff_state
1678 && let Some((repo, repo_path)) = repo
1679 {
1680 let buffer = buffer.clone();
1681 let diff_state = diff_state.clone();
1682
1683 cx.spawn(async move |_git_store, cx| {
1684 async {
1685 let diff_bases_change = repo
1686 .update(cx, |repo, cx| {
1687 repo.load_committed_text(buffer_id, repo_path, cx)
1688 })
1689 .await?;
1690
1691 diff_state.update(cx, |diff_state, cx| {
1692 let buffer_snapshot = buffer.read(cx).text_snapshot();
1693 diff_state.diff_bases_changed(
1694 buffer_snapshot,
1695 Some(diff_bases_change),
1696 cx,
1697 );
1698 });
1699 anyhow::Ok(())
1700 }
1701 .await
1702 .log_err();
1703 })
1704 .detach();
1705 }
1706 }
1707 }
1708 }
1709
1710 pub fn recalculate_buffer_diffs(
1711 &mut self,
1712 buffers: Vec<Entity<Buffer>>,
1713 cx: &mut Context<Self>,
1714 ) -> impl Future<Output = ()> + use<> {
1715 let mut futures = Vec::new();
1716 for buffer in buffers {
1717 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1718 let buffer = buffer.read(cx).text_snapshot();
1719 diff_state.update(cx, |diff_state, cx| {
1720 diff_state.recalculate_diffs(buffer.clone(), cx);
1721 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1722 });
1723 futures.push(diff_state.update(cx, |diff_state, cx| {
1724 diff_state
1725 .reparse_conflict_markers(buffer, cx)
1726 .map(|_| {})
1727 .boxed()
1728 }));
1729 }
1730 }
1731 async move {
1732 futures::future::join_all(futures).await;
1733 }
1734 }
1735
1736 fn on_buffer_diff_event(
1737 &mut self,
1738 diff: Entity<buffer_diff::BufferDiff>,
1739 event: &BufferDiffEvent,
1740 cx: &mut Context<Self>,
1741 ) {
1742 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1743 let buffer_id = diff.read(cx).buffer_id;
1744 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1745 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1746 diff_state.hunk_staging_operation_count += 1;
1747 diff_state.hunk_staging_operation_count
1748 });
1749 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1750 let recv = repo.update(cx, |repo, cx| {
1751 log::debug!("hunks changed for {}", path.as_unix_str());
1752 repo.spawn_set_index_text_job(
1753 path,
1754 new_index_text.as_ref().map(|rope| rope.to_string()),
1755 Some(hunk_staging_operation_count),
1756 cx,
1757 )
1758 });
1759 let diff = diff.downgrade();
1760 cx.spawn(async move |this, cx| {
1761 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1762 diff.update(cx, |diff, cx| {
1763 diff.clear_pending_hunks(cx);
1764 })
1765 .ok();
1766 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1767 .ok();
1768 }
1769 })
1770 .detach();
1771 }
1772 }
1773 }
1774 }
1775
1776 fn local_worktree_git_repos_changed(
1777 &mut self,
1778 worktree: Entity<Worktree>,
1779 changed_repos: &UpdatedGitRepositoriesSet,
1780 cx: &mut Context<Self>,
1781 ) {
1782 log::debug!("local worktree repos changed");
1783 debug_assert!(worktree.read(cx).is_local());
1784
1785 for repository in self.repositories.values() {
1786 repository.update(cx, |repository, cx| {
1787 let repo_abs_path = &repository.work_directory_abs_path;
1788 if changed_repos.iter().any(|update| {
1789 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1790 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1791 }) {
1792 repository.reload_buffer_diff_bases(cx);
1793 }
1794 });
1795 }
1796 }
1797
1798 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1799 &self.repositories
1800 }
1801
1802 /// Returns the original (main) repository working directory for the given worktree.
1803 /// For normal checkouts this equals the worktree's own path; for linked
1804 /// worktrees it points back to the original repo.
1805 pub fn original_repo_path_for_worktree(
1806 &self,
1807 worktree_id: WorktreeId,
1808 cx: &App,
1809 ) -> Option<Arc<Path>> {
1810 self.active_repo_id
1811 .iter()
1812 .chain(self.worktree_ids.keys())
1813 .find(|repo_id| {
1814 self.worktree_ids
1815 .get(repo_id)
1816 .is_some_and(|ids| ids.contains(&worktree_id))
1817 })
1818 .and_then(|repo_id| self.repositories.get(repo_id))
1819 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1820 }
1821
1822 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1823 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1824 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1825 Some(status.status)
1826 }
1827
1828 pub fn repository_and_path_for_buffer_id(
1829 &self,
1830 buffer_id: BufferId,
1831 cx: &App,
1832 ) -> Option<(Entity<Repository>, RepoPath)> {
1833 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1834 let project_path = buffer.read(cx).project_path(cx)?;
1835 self.repository_and_path_for_project_path(&project_path, cx)
1836 }
1837
1838 pub fn repository_and_path_for_project_path(
1839 &self,
1840 path: &ProjectPath,
1841 cx: &App,
1842 ) -> Option<(Entity<Repository>, RepoPath)> {
1843 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1844 self.repositories
1845 .values()
1846 .filter_map(|repo| {
1847 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1848 Some((repo.clone(), repo_path))
1849 })
1850 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1851 }
1852
1853 pub fn git_init(
1854 &self,
1855 path: Arc<Path>,
1856 fallback_branch_name: String,
1857 cx: &App,
1858 ) -> Task<Result<()>> {
1859 match &self.state {
1860 GitStoreState::Local { fs, .. } => {
1861 let fs = fs.clone();
1862 cx.background_executor()
1863 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1864 }
1865 GitStoreState::Remote {
1866 upstream_client,
1867 upstream_project_id: project_id,
1868 ..
1869 } => {
1870 let client = upstream_client.clone();
1871 let project_id = *project_id;
1872 cx.background_executor().spawn(async move {
1873 client
1874 .request(proto::GitInit {
1875 project_id: project_id,
1876 abs_path: path.to_string_lossy().into_owned(),
1877 fallback_branch_name,
1878 })
1879 .await?;
1880 Ok(())
1881 })
1882 }
1883 }
1884 }
1885
1886 pub fn git_clone(
1887 &self,
1888 repo: String,
1889 path: impl Into<Arc<std::path::Path>>,
1890 cx: &App,
1891 ) -> Task<Result<()>> {
1892 let path = path.into();
1893 match &self.state {
1894 GitStoreState::Local { fs, .. } => {
1895 let fs = fs.clone();
1896 cx.background_executor()
1897 .spawn(async move { fs.git_clone(&repo, &path).await })
1898 }
1899 GitStoreState::Remote {
1900 upstream_client,
1901 upstream_project_id,
1902 ..
1903 } => {
1904 if upstream_client.is_via_collab() {
1905 return Task::ready(Err(anyhow!(
1906 "Git Clone isn't supported for project guests"
1907 )));
1908 }
1909 let request = upstream_client.request(proto::GitClone {
1910 project_id: *upstream_project_id,
1911 abs_path: path.to_string_lossy().into_owned(),
1912 remote_repo: repo,
1913 });
1914
1915 cx.background_spawn(async move {
1916 let result = request.await?;
1917
1918 match result.success {
1919 true => Ok(()),
1920 false => Err(anyhow!("Git Clone failed")),
1921 }
1922 })
1923 }
1924 }
1925 }
1926
1927 async fn handle_update_repository(
1928 this: Entity<Self>,
1929 envelope: TypedEnvelope<proto::UpdateRepository>,
1930 mut cx: AsyncApp,
1931 ) -> Result<()> {
1932 this.update(&mut cx, |this, cx| {
1933 let path_style = this.worktree_store.read(cx).path_style();
1934 let mut update = envelope.payload;
1935
1936 let id = RepositoryId::from_proto(update.id);
1937 let client = this.upstream_client().context("no upstream client")?;
1938
1939 let original_repo_abs_path: Option<Arc<Path>> = update
1940 .original_repo_abs_path
1941 .as_deref()
1942 .map(|p| Path::new(p).into());
1943
1944 let mut repo_subscription = None;
1945 let repo = this.repositories.entry(id).or_insert_with(|| {
1946 let git_store = cx.weak_entity();
1947 let repo = cx.new(|cx| {
1948 Repository::remote(
1949 id,
1950 Path::new(&update.abs_path).into(),
1951 original_repo_abs_path.clone(),
1952 path_style,
1953 ProjectId(update.project_id),
1954 client,
1955 git_store,
1956 cx,
1957 )
1958 });
1959 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1960 cx.emit(GitStoreEvent::RepositoryAdded);
1961 repo
1962 });
1963 this._subscriptions.extend(repo_subscription);
1964
1965 repo.update(cx, {
1966 let update = update.clone();
1967 |repo, cx| repo.apply_remote_update(update, cx)
1968 })?;
1969
1970 this.active_repo_id.get_or_insert_with(|| {
1971 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1972 id
1973 });
1974
1975 if let Some((client, project_id)) = this.downstream_client() {
1976 update.project_id = project_id.to_proto();
1977 client.send(update).log_err();
1978 }
1979 Ok(())
1980 })
1981 }
1982
1983 async fn handle_remove_repository(
1984 this: Entity<Self>,
1985 envelope: TypedEnvelope<proto::RemoveRepository>,
1986 mut cx: AsyncApp,
1987 ) -> Result<()> {
1988 this.update(&mut cx, |this, cx| {
1989 let mut update = envelope.payload;
1990 let id = RepositoryId::from_proto(update.id);
1991 this.repositories.remove(&id);
1992 if let Some((client, project_id)) = this.downstream_client() {
1993 update.project_id = project_id.to_proto();
1994 client.send(update).log_err();
1995 }
1996 if this.active_repo_id == Some(id) {
1997 this.active_repo_id = None;
1998 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1999 }
2000 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2001 });
2002 Ok(())
2003 }
2004
2005 async fn handle_git_init(
2006 this: Entity<Self>,
2007 envelope: TypedEnvelope<proto::GitInit>,
2008 cx: AsyncApp,
2009 ) -> Result<proto::Ack> {
2010 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2011 let name = envelope.payload.fallback_branch_name;
2012 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2013 .await?;
2014
2015 Ok(proto::Ack {})
2016 }
2017
2018 async fn handle_git_clone(
2019 this: Entity<Self>,
2020 envelope: TypedEnvelope<proto::GitClone>,
2021 cx: AsyncApp,
2022 ) -> Result<proto::GitCloneResponse> {
2023 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2024 let repo_name = envelope.payload.remote_repo;
2025 let result = cx
2026 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2027 .await;
2028
2029 Ok(proto::GitCloneResponse {
2030 success: result.is_ok(),
2031 })
2032 }
2033
2034 async fn handle_fetch(
2035 this: Entity<Self>,
2036 envelope: TypedEnvelope<proto::Fetch>,
2037 mut cx: AsyncApp,
2038 ) -> Result<proto::RemoteMessageResponse> {
2039 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2040 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2041 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2042 let askpass_id = envelope.payload.askpass_id;
2043
2044 let askpass = make_remote_delegate(
2045 this,
2046 envelope.payload.project_id,
2047 repository_id,
2048 askpass_id,
2049 &mut cx,
2050 );
2051
2052 let remote_output = repository_handle
2053 .update(&mut cx, |repository_handle, cx| {
2054 repository_handle.fetch(fetch_options, askpass, cx)
2055 })
2056 .await??;
2057
2058 Ok(proto::RemoteMessageResponse {
2059 stdout: remote_output.stdout,
2060 stderr: remote_output.stderr,
2061 })
2062 }
2063
2064 async fn handle_push(
2065 this: Entity<Self>,
2066 envelope: TypedEnvelope<proto::Push>,
2067 mut cx: AsyncApp,
2068 ) -> Result<proto::RemoteMessageResponse> {
2069 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2070 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2071
2072 let askpass_id = envelope.payload.askpass_id;
2073 let askpass = make_remote_delegate(
2074 this,
2075 envelope.payload.project_id,
2076 repository_id,
2077 askpass_id,
2078 &mut cx,
2079 );
2080
2081 let options = envelope
2082 .payload
2083 .options
2084 .as_ref()
2085 .map(|_| match envelope.payload.options() {
2086 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2087 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2088 });
2089
2090 let branch_name = envelope.payload.branch_name.into();
2091 let remote_branch_name = envelope.payload.remote_branch_name.into();
2092 let remote_name = envelope.payload.remote_name.into();
2093
2094 let remote_output = repository_handle
2095 .update(&mut cx, |repository_handle, cx| {
2096 repository_handle.push(
2097 branch_name,
2098 remote_branch_name,
2099 remote_name,
2100 options,
2101 askpass,
2102 cx,
2103 )
2104 })
2105 .await??;
2106 Ok(proto::RemoteMessageResponse {
2107 stdout: remote_output.stdout,
2108 stderr: remote_output.stderr,
2109 })
2110 }
2111
2112 async fn handle_pull(
2113 this: Entity<Self>,
2114 envelope: TypedEnvelope<proto::Pull>,
2115 mut cx: AsyncApp,
2116 ) -> Result<proto::RemoteMessageResponse> {
2117 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2118 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2119 let askpass_id = envelope.payload.askpass_id;
2120 let askpass = make_remote_delegate(
2121 this,
2122 envelope.payload.project_id,
2123 repository_id,
2124 askpass_id,
2125 &mut cx,
2126 );
2127
2128 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2129 let remote_name = envelope.payload.remote_name.into();
2130 let rebase = envelope.payload.rebase;
2131
2132 let remote_message = repository_handle
2133 .update(&mut cx, |repository_handle, cx| {
2134 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2135 })
2136 .await??;
2137
2138 Ok(proto::RemoteMessageResponse {
2139 stdout: remote_message.stdout,
2140 stderr: remote_message.stderr,
2141 })
2142 }
2143
2144 async fn handle_stage(
2145 this: Entity<Self>,
2146 envelope: TypedEnvelope<proto::Stage>,
2147 mut cx: AsyncApp,
2148 ) -> Result<proto::Ack> {
2149 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2150 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2151
2152 let entries = envelope
2153 .payload
2154 .paths
2155 .into_iter()
2156 .map(|path| RepoPath::new(&path))
2157 .collect::<Result<Vec<_>>>()?;
2158
2159 repository_handle
2160 .update(&mut cx, |repository_handle, cx| {
2161 repository_handle.stage_entries(entries, cx)
2162 })
2163 .await?;
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_unstage(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::Unstage>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::Ack> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174
2175 let entries = envelope
2176 .payload
2177 .paths
2178 .into_iter()
2179 .map(|path| RepoPath::new(&path))
2180 .collect::<Result<Vec<_>>>()?;
2181
2182 repository_handle
2183 .update(&mut cx, |repository_handle, cx| {
2184 repository_handle.unstage_entries(entries, cx)
2185 })
2186 .await?;
2187
2188 Ok(proto::Ack {})
2189 }
2190
2191 async fn handle_stash(
2192 this: Entity<Self>,
2193 envelope: TypedEnvelope<proto::Stash>,
2194 mut cx: AsyncApp,
2195 ) -> Result<proto::Ack> {
2196 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2197 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2198
2199 let entries = envelope
2200 .payload
2201 .paths
2202 .into_iter()
2203 .map(|path| RepoPath::new(&path))
2204 .collect::<Result<Vec<_>>>()?;
2205
2206 repository_handle
2207 .update(&mut cx, |repository_handle, cx| {
2208 repository_handle.stash_entries(entries, cx)
2209 })
2210 .await?;
2211
2212 Ok(proto::Ack {})
2213 }
2214
2215 async fn handle_stash_pop(
2216 this: Entity<Self>,
2217 envelope: TypedEnvelope<proto::StashPop>,
2218 mut cx: AsyncApp,
2219 ) -> Result<proto::Ack> {
2220 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2221 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2222 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2223
2224 repository_handle
2225 .update(&mut cx, |repository_handle, cx| {
2226 repository_handle.stash_pop(stash_index, cx)
2227 })
2228 .await?;
2229
2230 Ok(proto::Ack {})
2231 }
2232
2233 async fn handle_stash_apply(
2234 this: Entity<Self>,
2235 envelope: TypedEnvelope<proto::StashApply>,
2236 mut cx: AsyncApp,
2237 ) -> Result<proto::Ack> {
2238 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2239 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2240 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2241
2242 repository_handle
2243 .update(&mut cx, |repository_handle, cx| {
2244 repository_handle.stash_apply(stash_index, cx)
2245 })
2246 .await?;
2247
2248 Ok(proto::Ack {})
2249 }
2250
2251 async fn handle_stash_drop(
2252 this: Entity<Self>,
2253 envelope: TypedEnvelope<proto::StashDrop>,
2254 mut cx: AsyncApp,
2255 ) -> Result<proto::Ack> {
2256 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2257 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2258 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2259
2260 repository_handle
2261 .update(&mut cx, |repository_handle, cx| {
2262 repository_handle.stash_drop(stash_index, cx)
2263 })
2264 .await??;
2265
2266 Ok(proto::Ack {})
2267 }
2268
2269 async fn handle_set_index_text(
2270 this: Entity<Self>,
2271 envelope: TypedEnvelope<proto::SetIndexText>,
2272 mut cx: AsyncApp,
2273 ) -> Result<proto::Ack> {
2274 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2275 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2276 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2277
2278 repository_handle
2279 .update(&mut cx, |repository_handle, cx| {
2280 repository_handle.spawn_set_index_text_job(
2281 repo_path,
2282 envelope.payload.text,
2283 None,
2284 cx,
2285 )
2286 })
2287 .await??;
2288 Ok(proto::Ack {})
2289 }
2290
2291 async fn handle_run_hook(
2292 this: Entity<Self>,
2293 envelope: TypedEnvelope<proto::RunGitHook>,
2294 mut cx: AsyncApp,
2295 ) -> Result<proto::Ack> {
2296 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2297 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2298 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2299 repository_handle
2300 .update(&mut cx, |repository_handle, cx| {
2301 repository_handle.run_hook(hook, cx)
2302 })
2303 .await??;
2304 Ok(proto::Ack {})
2305 }
2306
2307 async fn handle_commit(
2308 this: Entity<Self>,
2309 envelope: TypedEnvelope<proto::Commit>,
2310 mut cx: AsyncApp,
2311 ) -> Result<proto::Ack> {
2312 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2313 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2314 let askpass_id = envelope.payload.askpass_id;
2315
2316 let askpass = make_remote_delegate(
2317 this,
2318 envelope.payload.project_id,
2319 repository_id,
2320 askpass_id,
2321 &mut cx,
2322 );
2323
2324 let message = SharedString::from(envelope.payload.message);
2325 let name = envelope.payload.name.map(SharedString::from);
2326 let email = envelope.payload.email.map(SharedString::from);
2327 let options = envelope.payload.options.unwrap_or_default();
2328
2329 repository_handle
2330 .update(&mut cx, |repository_handle, cx| {
2331 repository_handle.commit(
2332 message,
2333 name.zip(email),
2334 CommitOptions {
2335 amend: options.amend,
2336 signoff: options.signoff,
2337 },
2338 askpass,
2339 cx,
2340 )
2341 })
2342 .await??;
2343 Ok(proto::Ack {})
2344 }
2345
2346 async fn handle_get_remotes(
2347 this: Entity<Self>,
2348 envelope: TypedEnvelope<proto::GetRemotes>,
2349 mut cx: AsyncApp,
2350 ) -> Result<proto::GetRemotesResponse> {
2351 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2352 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2353
2354 let branch_name = envelope.payload.branch_name;
2355 let is_push = envelope.payload.is_push;
2356
2357 let remotes = repository_handle
2358 .update(&mut cx, |repository_handle, _| {
2359 repository_handle.get_remotes(branch_name, is_push)
2360 })
2361 .await??;
2362
2363 Ok(proto::GetRemotesResponse {
2364 remotes: remotes
2365 .into_iter()
2366 .map(|remotes| proto::get_remotes_response::Remote {
2367 name: remotes.name.to_string(),
2368 })
2369 .collect::<Vec<_>>(),
2370 })
2371 }
2372
2373 async fn handle_get_worktrees(
2374 this: Entity<Self>,
2375 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2376 mut cx: AsyncApp,
2377 ) -> Result<proto::GitWorktreesResponse> {
2378 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2379 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2380
2381 let worktrees = repository_handle
2382 .update(&mut cx, |repository_handle, _| {
2383 repository_handle.worktrees()
2384 })
2385 .await??;
2386
2387 Ok(proto::GitWorktreesResponse {
2388 worktrees: worktrees
2389 .into_iter()
2390 .map(|worktree| worktree_to_proto(&worktree))
2391 .collect::<Vec<_>>(),
2392 })
2393 }
2394
2395 async fn handle_create_worktree(
2396 this: Entity<Self>,
2397 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2398 mut cx: AsyncApp,
2399 ) -> Result<proto::Ack> {
2400 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2401 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2402 let directory = PathBuf::from(envelope.payload.directory);
2403 let name = envelope.payload.name;
2404 let commit = envelope.payload.commit;
2405
2406 repository_handle
2407 .update(&mut cx, |repository_handle, _| {
2408 repository_handle.create_worktree(name, directory, commit)
2409 })
2410 .await??;
2411
2412 Ok(proto::Ack {})
2413 }
2414
2415 async fn handle_remove_worktree(
2416 this: Entity<Self>,
2417 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2418 mut cx: AsyncApp,
2419 ) -> Result<proto::Ack> {
2420 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2421 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2422 let path = PathBuf::from(envelope.payload.path);
2423 let force = envelope.payload.force;
2424
2425 repository_handle
2426 .update(&mut cx, |repository_handle, _| {
2427 repository_handle.remove_worktree(path, force)
2428 })
2429 .await??;
2430
2431 Ok(proto::Ack {})
2432 }
2433
2434 async fn handle_rename_worktree(
2435 this: Entity<Self>,
2436 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2437 mut cx: AsyncApp,
2438 ) -> Result<proto::Ack> {
2439 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2440 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2441 let old_path = PathBuf::from(envelope.payload.old_path);
2442 let new_path = PathBuf::from(envelope.payload.new_path);
2443
2444 repository_handle
2445 .update(&mut cx, |repository_handle, _| {
2446 repository_handle.rename_worktree(old_path, new_path)
2447 })
2448 .await??;
2449
2450 Ok(proto::Ack {})
2451 }
2452
2453 async fn handle_get_branches(
2454 this: Entity<Self>,
2455 envelope: TypedEnvelope<proto::GitGetBranches>,
2456 mut cx: AsyncApp,
2457 ) -> Result<proto::GitBranchesResponse> {
2458 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2459 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2460
2461 let branches = repository_handle
2462 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2463 .await??;
2464
2465 Ok(proto::GitBranchesResponse {
2466 branches: branches
2467 .into_iter()
2468 .map(|branch| branch_to_proto(&branch))
2469 .collect::<Vec<_>>(),
2470 })
2471 }
2472 async fn handle_get_default_branch(
2473 this: Entity<Self>,
2474 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2475 mut cx: AsyncApp,
2476 ) -> Result<proto::GetDefaultBranchResponse> {
2477 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2478 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2479
2480 let branch = repository_handle
2481 .update(&mut cx, |repository_handle, _| {
2482 repository_handle.default_branch(false)
2483 })
2484 .await??
2485 .map(Into::into);
2486
2487 Ok(proto::GetDefaultBranchResponse { branch })
2488 }
2489 async fn handle_create_branch(
2490 this: Entity<Self>,
2491 envelope: TypedEnvelope<proto::GitCreateBranch>,
2492 mut cx: AsyncApp,
2493 ) -> Result<proto::Ack> {
2494 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2495 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2496 let branch_name = envelope.payload.branch_name;
2497
2498 repository_handle
2499 .update(&mut cx, |repository_handle, _| {
2500 repository_handle.create_branch(branch_name, None)
2501 })
2502 .await??;
2503
2504 Ok(proto::Ack {})
2505 }
2506
2507 async fn handle_change_branch(
2508 this: Entity<Self>,
2509 envelope: TypedEnvelope<proto::GitChangeBranch>,
2510 mut cx: AsyncApp,
2511 ) -> Result<proto::Ack> {
2512 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2513 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2514 let branch_name = envelope.payload.branch_name;
2515
2516 repository_handle
2517 .update(&mut cx, |repository_handle, _| {
2518 repository_handle.change_branch(branch_name)
2519 })
2520 .await??;
2521
2522 Ok(proto::Ack {})
2523 }
2524
2525 async fn handle_rename_branch(
2526 this: Entity<Self>,
2527 envelope: TypedEnvelope<proto::GitRenameBranch>,
2528 mut cx: AsyncApp,
2529 ) -> Result<proto::Ack> {
2530 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2531 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2532 let branch = envelope.payload.branch;
2533 let new_name = envelope.payload.new_name;
2534
2535 repository_handle
2536 .update(&mut cx, |repository_handle, _| {
2537 repository_handle.rename_branch(branch, new_name)
2538 })
2539 .await??;
2540
2541 Ok(proto::Ack {})
2542 }
2543
2544 async fn handle_create_remote(
2545 this: Entity<Self>,
2546 envelope: TypedEnvelope<proto::GitCreateRemote>,
2547 mut cx: AsyncApp,
2548 ) -> Result<proto::Ack> {
2549 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2550 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2551 let remote_name = envelope.payload.remote_name;
2552 let remote_url = envelope.payload.remote_url;
2553
2554 repository_handle
2555 .update(&mut cx, |repository_handle, _| {
2556 repository_handle.create_remote(remote_name, remote_url)
2557 })
2558 .await??;
2559
2560 Ok(proto::Ack {})
2561 }
2562
2563 async fn handle_delete_branch(
2564 this: Entity<Self>,
2565 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2566 mut cx: AsyncApp,
2567 ) -> Result<proto::Ack> {
2568 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2569 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2570 let is_remote = envelope.payload.is_remote;
2571 let branch_name = envelope.payload.branch_name;
2572
2573 repository_handle
2574 .update(&mut cx, |repository_handle, _| {
2575 repository_handle.delete_branch(is_remote, branch_name)
2576 })
2577 .await??;
2578
2579 Ok(proto::Ack {})
2580 }
2581
2582 async fn handle_remove_remote(
2583 this: Entity<Self>,
2584 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2585 mut cx: AsyncApp,
2586 ) -> Result<proto::Ack> {
2587 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2588 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2589 let remote_name = envelope.payload.remote_name;
2590
2591 repository_handle
2592 .update(&mut cx, |repository_handle, _| {
2593 repository_handle.remove_remote(remote_name)
2594 })
2595 .await??;
2596
2597 Ok(proto::Ack {})
2598 }
2599
2600 async fn handle_show(
2601 this: Entity<Self>,
2602 envelope: TypedEnvelope<proto::GitShow>,
2603 mut cx: AsyncApp,
2604 ) -> Result<proto::GitCommitDetails> {
2605 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2606 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2607
2608 let commit = repository_handle
2609 .update(&mut cx, |repository_handle, _| {
2610 repository_handle.show(envelope.payload.commit)
2611 })
2612 .await??;
2613 Ok(proto::GitCommitDetails {
2614 sha: commit.sha.into(),
2615 message: commit.message.into(),
2616 commit_timestamp: commit.commit_timestamp,
2617 author_email: commit.author_email.into(),
2618 author_name: commit.author_name.into(),
2619 })
2620 }
2621
2622 async fn handle_load_commit_diff(
2623 this: Entity<Self>,
2624 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2625 mut cx: AsyncApp,
2626 ) -> Result<proto::LoadCommitDiffResponse> {
2627 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2628 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2629
2630 let commit_diff = repository_handle
2631 .update(&mut cx, |repository_handle, _| {
2632 repository_handle.load_commit_diff(envelope.payload.commit)
2633 })
2634 .await??;
2635 Ok(proto::LoadCommitDiffResponse {
2636 files: commit_diff
2637 .files
2638 .into_iter()
2639 .map(|file| proto::CommitFile {
2640 path: file.path.to_proto(),
2641 old_text: file.old_text,
2642 new_text: file.new_text,
2643 is_binary: file.is_binary,
2644 })
2645 .collect(),
2646 })
2647 }
2648
2649 async fn handle_file_history(
2650 this: Entity<Self>,
2651 envelope: TypedEnvelope<proto::GitFileHistory>,
2652 mut cx: AsyncApp,
2653 ) -> Result<proto::GitFileHistoryResponse> {
2654 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2655 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2656 let path = RepoPath::from_proto(&envelope.payload.path)?;
2657 let skip = envelope.payload.skip as usize;
2658 let limit = envelope.payload.limit.map(|l| l as usize);
2659
2660 let file_history = repository_handle
2661 .update(&mut cx, |repository_handle, _| {
2662 repository_handle.file_history_paginated(path, skip, limit)
2663 })
2664 .await??;
2665
2666 Ok(proto::GitFileHistoryResponse {
2667 entries: file_history
2668 .entries
2669 .into_iter()
2670 .map(|entry| proto::FileHistoryEntry {
2671 sha: entry.sha.to_string(),
2672 subject: entry.subject.to_string(),
2673 message: entry.message.to_string(),
2674 commit_timestamp: entry.commit_timestamp,
2675 author_name: entry.author_name.to_string(),
2676 author_email: entry.author_email.to_string(),
2677 })
2678 .collect(),
2679 path: file_history.path.to_proto(),
2680 })
2681 }
2682
2683 async fn handle_reset(
2684 this: Entity<Self>,
2685 envelope: TypedEnvelope<proto::GitReset>,
2686 mut cx: AsyncApp,
2687 ) -> Result<proto::Ack> {
2688 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2689 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2690
2691 let mode = match envelope.payload.mode() {
2692 git_reset::ResetMode::Soft => ResetMode::Soft,
2693 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2694 };
2695
2696 repository_handle
2697 .update(&mut cx, |repository_handle, cx| {
2698 repository_handle.reset(envelope.payload.commit, mode, cx)
2699 })
2700 .await??;
2701 Ok(proto::Ack {})
2702 }
2703
2704 async fn handle_checkout_files(
2705 this: Entity<Self>,
2706 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2707 mut cx: AsyncApp,
2708 ) -> Result<proto::Ack> {
2709 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2710 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2711 let paths = envelope
2712 .payload
2713 .paths
2714 .iter()
2715 .map(|s| RepoPath::from_proto(s))
2716 .collect::<Result<Vec<_>>>()?;
2717
2718 repository_handle
2719 .update(&mut cx, |repository_handle, cx| {
2720 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2721 })
2722 .await?;
2723 Ok(proto::Ack {})
2724 }
2725
2726 async fn handle_open_commit_message_buffer(
2727 this: Entity<Self>,
2728 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2729 mut cx: AsyncApp,
2730 ) -> Result<proto::OpenBufferResponse> {
2731 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2732 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2733 let buffer = repository
2734 .update(&mut cx, |repository, cx| {
2735 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2736 })
2737 .await?;
2738
2739 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2740 this.update(&mut cx, |this, cx| {
2741 this.buffer_store.update(cx, |buffer_store, cx| {
2742 buffer_store
2743 .create_buffer_for_peer(
2744 &buffer,
2745 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2746 cx,
2747 )
2748 .detach_and_log_err(cx);
2749 })
2750 });
2751
2752 Ok(proto::OpenBufferResponse {
2753 buffer_id: buffer_id.to_proto(),
2754 })
2755 }
2756
2757 async fn handle_askpass(
2758 this: Entity<Self>,
2759 envelope: TypedEnvelope<proto::AskPassRequest>,
2760 mut cx: AsyncApp,
2761 ) -> Result<proto::AskPassResponse> {
2762 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2763 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2764
2765 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2766 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2767 debug_panic!("no askpass found");
2768 anyhow::bail!("no askpass found");
2769 };
2770
2771 let response = askpass
2772 .ask_password(envelope.payload.prompt)
2773 .await
2774 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2775
2776 delegates
2777 .lock()
2778 .insert(envelope.payload.askpass_id, askpass);
2779
2780 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2781 Ok(proto::AskPassResponse {
2782 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2783 })
2784 }
2785
2786 async fn handle_check_for_pushed_commits(
2787 this: Entity<Self>,
2788 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2789 mut cx: AsyncApp,
2790 ) -> Result<proto::CheckForPushedCommitsResponse> {
2791 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2792 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2793
2794 let branches = repository_handle
2795 .update(&mut cx, |repository_handle, _| {
2796 repository_handle.check_for_pushed_commits()
2797 })
2798 .await??;
2799 Ok(proto::CheckForPushedCommitsResponse {
2800 pushed_to: branches
2801 .into_iter()
2802 .map(|commit| commit.to_string())
2803 .collect(),
2804 })
2805 }
2806
2807 async fn handle_git_diff(
2808 this: Entity<Self>,
2809 envelope: TypedEnvelope<proto::GitDiff>,
2810 mut cx: AsyncApp,
2811 ) -> Result<proto::GitDiffResponse> {
2812 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2813 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2814 let diff_type = match envelope.payload.diff_type() {
2815 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2816 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2817 proto::git_diff::DiffType::MergeBase => {
2818 let base_ref = envelope
2819 .payload
2820 .merge_base_ref
2821 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2822 DiffType::MergeBase {
2823 base_ref: base_ref.into(),
2824 }
2825 }
2826 };
2827
2828 let mut diff = repository_handle
2829 .update(&mut cx, |repository_handle, cx| {
2830 repository_handle.diff(diff_type, cx)
2831 })
2832 .await??;
2833 const ONE_MB: usize = 1_000_000;
2834 if diff.len() > ONE_MB {
2835 diff = diff.chars().take(ONE_MB).collect()
2836 }
2837
2838 Ok(proto::GitDiffResponse { diff })
2839 }
2840
2841 async fn handle_tree_diff(
2842 this: Entity<Self>,
2843 request: TypedEnvelope<proto::GetTreeDiff>,
2844 mut cx: AsyncApp,
2845 ) -> Result<proto::GetTreeDiffResponse> {
2846 let repository_id = RepositoryId(request.payload.repository_id);
2847 let diff_type = if request.payload.is_merge {
2848 DiffTreeType::MergeBase {
2849 base: request.payload.base.into(),
2850 head: request.payload.head.into(),
2851 }
2852 } else {
2853 DiffTreeType::Since {
2854 base: request.payload.base.into(),
2855 head: request.payload.head.into(),
2856 }
2857 };
2858
2859 let diff = this
2860 .update(&mut cx, |this, cx| {
2861 let repository = this.repositories().get(&repository_id)?;
2862 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2863 })
2864 .context("missing repository")?
2865 .await??;
2866
2867 Ok(proto::GetTreeDiffResponse {
2868 entries: diff
2869 .entries
2870 .into_iter()
2871 .map(|(path, status)| proto::TreeDiffStatus {
2872 path: path.as_ref().to_proto(),
2873 status: match status {
2874 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2875 TreeDiffStatus::Modified { .. } => {
2876 proto::tree_diff_status::Status::Modified.into()
2877 }
2878 TreeDiffStatus::Deleted { .. } => {
2879 proto::tree_diff_status::Status::Deleted.into()
2880 }
2881 },
2882 oid: match status {
2883 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2884 Some(old.to_string())
2885 }
2886 TreeDiffStatus::Added => None,
2887 },
2888 })
2889 .collect(),
2890 })
2891 }
2892
2893 async fn handle_get_blob_content(
2894 this: Entity<Self>,
2895 request: TypedEnvelope<proto::GetBlobContent>,
2896 mut cx: AsyncApp,
2897 ) -> Result<proto::GetBlobContentResponse> {
2898 let oid = git::Oid::from_str(&request.payload.oid)?;
2899 let repository_id = RepositoryId(request.payload.repository_id);
2900 let content = this
2901 .update(&mut cx, |this, cx| {
2902 let repository = this.repositories().get(&repository_id)?;
2903 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2904 })
2905 .context("missing repository")?
2906 .await?;
2907 Ok(proto::GetBlobContentResponse { content })
2908 }
2909
2910 async fn handle_open_unstaged_diff(
2911 this: Entity<Self>,
2912 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2913 mut cx: AsyncApp,
2914 ) -> Result<proto::OpenUnstagedDiffResponse> {
2915 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2916 let diff = this
2917 .update(&mut cx, |this, cx| {
2918 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2919 Some(this.open_unstaged_diff(buffer, cx))
2920 })
2921 .context("missing buffer")?
2922 .await?;
2923 this.update(&mut cx, |this, _| {
2924 let shared_diffs = this
2925 .shared_diffs
2926 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2927 .or_default();
2928 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2929 });
2930 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2931 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2932 }
2933
2934 async fn handle_open_uncommitted_diff(
2935 this: Entity<Self>,
2936 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2937 mut cx: AsyncApp,
2938 ) -> Result<proto::OpenUncommittedDiffResponse> {
2939 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2940 let diff = this
2941 .update(&mut cx, |this, cx| {
2942 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2943 Some(this.open_uncommitted_diff(buffer, cx))
2944 })
2945 .context("missing buffer")?
2946 .await?;
2947 this.update(&mut cx, |this, _| {
2948 let shared_diffs = this
2949 .shared_diffs
2950 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2951 .or_default();
2952 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2953 });
2954 Ok(diff.read_with(&cx, |diff, cx| {
2955 use proto::open_uncommitted_diff_response::Mode;
2956
2957 let unstaged_diff = diff.secondary_diff();
2958 let index_snapshot = unstaged_diff.and_then(|diff| {
2959 let diff = diff.read(cx);
2960 diff.base_text_exists().then(|| diff.base_text(cx))
2961 });
2962
2963 let mode;
2964 let staged_text;
2965 let committed_text;
2966 if diff.base_text_exists() {
2967 let committed_snapshot = diff.base_text(cx);
2968 committed_text = Some(committed_snapshot.text());
2969 if let Some(index_text) = index_snapshot {
2970 if index_text.remote_id() == committed_snapshot.remote_id() {
2971 mode = Mode::IndexMatchesHead;
2972 staged_text = None;
2973 } else {
2974 mode = Mode::IndexAndHead;
2975 staged_text = Some(index_text.text());
2976 }
2977 } else {
2978 mode = Mode::IndexAndHead;
2979 staged_text = None;
2980 }
2981 } else {
2982 mode = Mode::IndexAndHead;
2983 committed_text = None;
2984 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2985 }
2986
2987 proto::OpenUncommittedDiffResponse {
2988 committed_text,
2989 staged_text,
2990 mode: mode.into(),
2991 }
2992 }))
2993 }
2994
2995 async fn handle_update_diff_bases(
2996 this: Entity<Self>,
2997 request: TypedEnvelope<proto::UpdateDiffBases>,
2998 mut cx: AsyncApp,
2999 ) -> Result<()> {
3000 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3001 this.update(&mut cx, |this, cx| {
3002 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3003 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3004 {
3005 let buffer = buffer.read(cx).text_snapshot();
3006 diff_state.update(cx, |diff_state, cx| {
3007 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3008 })
3009 }
3010 });
3011 Ok(())
3012 }
3013
3014 async fn handle_blame_buffer(
3015 this: Entity<Self>,
3016 envelope: TypedEnvelope<proto::BlameBuffer>,
3017 mut cx: AsyncApp,
3018 ) -> Result<proto::BlameBufferResponse> {
3019 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3020 let version = deserialize_version(&envelope.payload.version);
3021 let buffer = this.read_with(&cx, |this, cx| {
3022 this.buffer_store.read(cx).get_existing(buffer_id)
3023 })?;
3024 buffer
3025 .update(&mut cx, |buffer, _| {
3026 buffer.wait_for_version(version.clone())
3027 })
3028 .await?;
3029 let blame = this
3030 .update(&mut cx, |this, cx| {
3031 this.blame_buffer(&buffer, Some(version), cx)
3032 })
3033 .await?;
3034 Ok(serialize_blame_buffer_response(blame))
3035 }
3036
3037 async fn handle_get_permalink_to_line(
3038 this: Entity<Self>,
3039 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3040 mut cx: AsyncApp,
3041 ) -> Result<proto::GetPermalinkToLineResponse> {
3042 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3043 // let version = deserialize_version(&envelope.payload.version);
3044 let selection = {
3045 let proto_selection = envelope
3046 .payload
3047 .selection
3048 .context("no selection to get permalink for defined")?;
3049 proto_selection.start as u32..proto_selection.end as u32
3050 };
3051 let buffer = this.read_with(&cx, |this, cx| {
3052 this.buffer_store.read(cx).get_existing(buffer_id)
3053 })?;
3054 let permalink = this
3055 .update(&mut cx, |this, cx| {
3056 this.get_permalink_to_line(&buffer, selection, cx)
3057 })
3058 .await?;
3059 Ok(proto::GetPermalinkToLineResponse {
3060 permalink: permalink.to_string(),
3061 })
3062 }
3063
3064 fn repository_for_request(
3065 this: &Entity<Self>,
3066 id: RepositoryId,
3067 cx: &mut AsyncApp,
3068 ) -> Result<Entity<Repository>> {
3069 this.read_with(cx, |this, _| {
3070 this.repositories
3071 .get(&id)
3072 .context("missing repository handle")
3073 .cloned()
3074 })
3075 }
3076
3077 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3078 self.repositories
3079 .iter()
3080 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3081 .collect()
3082 }
3083
3084 fn process_updated_entries(
3085 &self,
3086 worktree: &Entity<Worktree>,
3087 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3088 cx: &mut App,
3089 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3090 let path_style = worktree.read(cx).path_style();
3091 let mut repo_paths = self
3092 .repositories
3093 .values()
3094 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3095 .collect::<Vec<_>>();
3096 let mut entries: Vec<_> = updated_entries
3097 .iter()
3098 .map(|(path, _, _)| path.clone())
3099 .collect();
3100 entries.sort();
3101 let worktree = worktree.read(cx);
3102
3103 let entries = entries
3104 .into_iter()
3105 .map(|path| worktree.absolutize(&path))
3106 .collect::<Arc<[_]>>();
3107
3108 let executor = cx.background_executor().clone();
3109 cx.background_executor().spawn(async move {
3110 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3111 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3112 let mut tasks = FuturesOrdered::new();
3113 for (repo_path, repo) in repo_paths.into_iter().rev() {
3114 let entries = entries.clone();
3115 let task = executor.spawn(async move {
3116 // Find all repository paths that belong to this repo
3117 let mut ix = entries.partition_point(|path| path < &*repo_path);
3118 if ix == entries.len() {
3119 return None;
3120 };
3121
3122 let mut paths = Vec::new();
3123 // All paths prefixed by a given repo will constitute a continuous range.
3124 while let Some(path) = entries.get(ix)
3125 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3126 &repo_path, path, path_style,
3127 )
3128 {
3129 paths.push((repo_path, ix));
3130 ix += 1;
3131 }
3132 if paths.is_empty() {
3133 None
3134 } else {
3135 Some((repo, paths))
3136 }
3137 });
3138 tasks.push_back(task);
3139 }
3140
3141 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3142 let mut path_was_used = vec![false; entries.len()];
3143 let tasks = tasks.collect::<Vec<_>>().await;
3144 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3145 // We always want to assign a path to it's innermost repository.
3146 for t in tasks {
3147 let Some((repo, paths)) = t else {
3148 continue;
3149 };
3150 let entry = paths_by_git_repo.entry(repo).or_default();
3151 for (repo_path, ix) in paths {
3152 if path_was_used[ix] {
3153 continue;
3154 }
3155 path_was_used[ix] = true;
3156 entry.push(repo_path);
3157 }
3158 }
3159
3160 paths_by_git_repo
3161 })
3162 }
3163}
3164
3165impl BufferGitState {
3166 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3167 Self {
3168 unstaged_diff: Default::default(),
3169 uncommitted_diff: Default::default(),
3170 oid_diffs: Default::default(),
3171 recalculate_diff_task: Default::default(),
3172 language: Default::default(),
3173 language_registry: Default::default(),
3174 recalculating_tx: postage::watch::channel_with(false).0,
3175 hunk_staging_operation_count: 0,
3176 hunk_staging_operation_count_as_of_write: 0,
3177 head_text: Default::default(),
3178 index_text: Default::default(),
3179 oid_texts: Default::default(),
3180 head_changed: Default::default(),
3181 index_changed: Default::default(),
3182 language_changed: Default::default(),
3183 conflict_updated_futures: Default::default(),
3184 conflict_set: Default::default(),
3185 reparse_conflict_markers_task: Default::default(),
3186 }
3187 }
3188
3189 #[ztracing::instrument(skip_all)]
3190 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3191 self.language = buffer.read(cx).language().cloned();
3192 self.language_changed = true;
3193 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3194 }
3195
3196 fn reparse_conflict_markers(
3197 &mut self,
3198 buffer: text::BufferSnapshot,
3199 cx: &mut Context<Self>,
3200 ) -> oneshot::Receiver<()> {
3201 let (tx, rx) = oneshot::channel();
3202
3203 let Some(conflict_set) = self
3204 .conflict_set
3205 .as_ref()
3206 .and_then(|conflict_set| conflict_set.upgrade())
3207 else {
3208 return rx;
3209 };
3210
3211 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3212 if conflict_set.has_conflict {
3213 Some(conflict_set.snapshot())
3214 } else {
3215 None
3216 }
3217 });
3218
3219 if let Some(old_snapshot) = old_snapshot {
3220 self.conflict_updated_futures.push(tx);
3221 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3222 let (snapshot, changed_range) = cx
3223 .background_spawn(async move {
3224 let new_snapshot = ConflictSet::parse(&buffer);
3225 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3226 (new_snapshot, changed_range)
3227 })
3228 .await;
3229 this.update(cx, |this, cx| {
3230 if let Some(conflict_set) = &this.conflict_set {
3231 conflict_set
3232 .update(cx, |conflict_set, cx| {
3233 conflict_set.set_snapshot(snapshot, changed_range, cx);
3234 })
3235 .ok();
3236 }
3237 let futures = std::mem::take(&mut this.conflict_updated_futures);
3238 for tx in futures {
3239 tx.send(()).ok();
3240 }
3241 })
3242 }))
3243 }
3244
3245 rx
3246 }
3247
3248 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3249 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3250 }
3251
3252 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3253 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3254 }
3255
3256 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3257 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3258 }
3259
3260 fn handle_base_texts_updated(
3261 &mut self,
3262 buffer: text::BufferSnapshot,
3263 message: proto::UpdateDiffBases,
3264 cx: &mut Context<Self>,
3265 ) {
3266 use proto::update_diff_bases::Mode;
3267
3268 let Some(mode) = Mode::from_i32(message.mode) else {
3269 return;
3270 };
3271
3272 let diff_bases_change = match mode {
3273 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3274 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3275 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3276 Mode::IndexAndHead => DiffBasesChange::SetEach {
3277 index: message.staged_text,
3278 head: message.committed_text,
3279 },
3280 };
3281
3282 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3283 }
3284
3285 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3286 if *self.recalculating_tx.borrow() {
3287 let mut rx = self.recalculating_tx.subscribe();
3288 Some(async move {
3289 loop {
3290 let is_recalculating = rx.recv().await;
3291 if is_recalculating != Some(true) {
3292 break;
3293 }
3294 }
3295 })
3296 } else {
3297 None
3298 }
3299 }
3300
3301 fn diff_bases_changed(
3302 &mut self,
3303 buffer: text::BufferSnapshot,
3304 diff_bases_change: Option<DiffBasesChange>,
3305 cx: &mut Context<Self>,
3306 ) {
3307 match diff_bases_change {
3308 Some(DiffBasesChange::SetIndex(index)) => {
3309 self.index_text = index.map(|mut index| {
3310 text::LineEnding::normalize(&mut index);
3311 Arc::from(index.as_str())
3312 });
3313 self.index_changed = true;
3314 }
3315 Some(DiffBasesChange::SetHead(head)) => {
3316 self.head_text = head.map(|mut head| {
3317 text::LineEnding::normalize(&mut head);
3318 Arc::from(head.as_str())
3319 });
3320 self.head_changed = true;
3321 }
3322 Some(DiffBasesChange::SetBoth(text)) => {
3323 let text = text.map(|mut text| {
3324 text::LineEnding::normalize(&mut text);
3325 Arc::from(text.as_str())
3326 });
3327 self.head_text = text.clone();
3328 self.index_text = text;
3329 self.head_changed = true;
3330 self.index_changed = true;
3331 }
3332 Some(DiffBasesChange::SetEach { index, head }) => {
3333 self.index_text = index.map(|mut index| {
3334 text::LineEnding::normalize(&mut index);
3335 Arc::from(index.as_str())
3336 });
3337 self.index_changed = true;
3338 self.head_text = head.map(|mut head| {
3339 text::LineEnding::normalize(&mut head);
3340 Arc::from(head.as_str())
3341 });
3342 self.head_changed = true;
3343 }
3344 None => {}
3345 }
3346
3347 self.recalculate_diffs(buffer, cx)
3348 }
3349
3350 #[ztracing::instrument(skip_all)]
3351 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3352 *self.recalculating_tx.borrow_mut() = true;
3353
3354 let language = self.language.clone();
3355 let language_registry = self.language_registry.clone();
3356 let unstaged_diff = self.unstaged_diff();
3357 let uncommitted_diff = self.uncommitted_diff();
3358 let head = self.head_text.clone();
3359 let index = self.index_text.clone();
3360 let index_changed = self.index_changed;
3361 let head_changed = self.head_changed;
3362 let language_changed = self.language_changed;
3363 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3364 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3365 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3366 (None, None) => true,
3367 _ => false,
3368 };
3369
3370 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3371 .oid_diffs
3372 .iter()
3373 .filter_map(|(oid, weak)| {
3374 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3375 weak.upgrade().map(|diff| (*oid, diff, base_text))
3376 })
3377 .collect();
3378
3379 self.oid_diffs.retain(|oid, weak| {
3380 let alive = weak.upgrade().is_some();
3381 if !alive {
3382 if let Some(oid) = oid {
3383 self.oid_texts.remove(oid);
3384 }
3385 }
3386 alive
3387 });
3388 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3389 log::debug!(
3390 "start recalculating diffs for buffer {}",
3391 buffer.remote_id()
3392 );
3393
3394 let mut new_unstaged_diff = None;
3395 if let Some(unstaged_diff) = &unstaged_diff {
3396 new_unstaged_diff = Some(
3397 cx.update(|cx| {
3398 unstaged_diff.read(cx).update_diff(
3399 buffer.clone(),
3400 index,
3401 index_changed.then_some(false),
3402 language.clone(),
3403 cx,
3404 )
3405 })
3406 .await,
3407 );
3408 }
3409
3410 // Dropping BufferDiff can be expensive, so yield back to the event loop
3411 // for a bit
3412 yield_now().await;
3413
3414 let mut new_uncommitted_diff = None;
3415 if let Some(uncommitted_diff) = &uncommitted_diff {
3416 new_uncommitted_diff = if index_matches_head {
3417 new_unstaged_diff.clone()
3418 } else {
3419 Some(
3420 cx.update(|cx| {
3421 uncommitted_diff.read(cx).update_diff(
3422 buffer.clone(),
3423 head,
3424 head_changed.then_some(true),
3425 language.clone(),
3426 cx,
3427 )
3428 })
3429 .await,
3430 )
3431 }
3432 }
3433
3434 // Dropping BufferDiff can be expensive, so yield back to the event loop
3435 // for a bit
3436 yield_now().await;
3437
3438 let cancel = this.update(cx, |this, _| {
3439 // This checks whether all pending stage/unstage operations
3440 // have quiesced (i.e. both the corresponding write and the
3441 // read of that write have completed). If not, then we cancel
3442 // this recalculation attempt to avoid invalidating pending
3443 // state too quickly; another recalculation will come along
3444 // later and clear the pending state once the state of the index has settled.
3445 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3446 *this.recalculating_tx.borrow_mut() = false;
3447 true
3448 } else {
3449 false
3450 }
3451 })?;
3452 if cancel {
3453 log::debug!(
3454 concat!(
3455 "aborting recalculating diffs for buffer {}",
3456 "due to subsequent hunk operations",
3457 ),
3458 buffer.remote_id()
3459 );
3460 return Ok(());
3461 }
3462
3463 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3464 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3465 {
3466 let task = unstaged_diff.update(cx, |diff, cx| {
3467 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3468 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3469 // view multibuffers.
3470 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3471 });
3472 Some(task.await)
3473 } else {
3474 None
3475 };
3476
3477 yield_now().await;
3478
3479 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3480 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3481 {
3482 uncommitted_diff
3483 .update(cx, |diff, cx| {
3484 if language_changed {
3485 diff.language_changed(language.clone(), language_registry, cx);
3486 }
3487 diff.set_snapshot_with_secondary(
3488 new_uncommitted_diff,
3489 &buffer,
3490 unstaged_changed_range.flatten(),
3491 true,
3492 cx,
3493 )
3494 })
3495 .await;
3496 }
3497
3498 yield_now().await;
3499
3500 for (oid, oid_diff, base_text) in oid_diffs {
3501 let new_oid_diff = cx
3502 .update(|cx| {
3503 oid_diff.read(cx).update_diff(
3504 buffer.clone(),
3505 base_text,
3506 None,
3507 language.clone(),
3508 cx,
3509 )
3510 })
3511 .await;
3512
3513 oid_diff
3514 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3515 .await;
3516
3517 log::debug!(
3518 "finished recalculating oid diff for buffer {} oid {:?}",
3519 buffer.remote_id(),
3520 oid
3521 );
3522
3523 yield_now().await;
3524 }
3525
3526 log::debug!(
3527 "finished recalculating diffs for buffer {}",
3528 buffer.remote_id()
3529 );
3530
3531 if let Some(this) = this.upgrade() {
3532 this.update(cx, |this, _| {
3533 this.index_changed = false;
3534 this.head_changed = false;
3535 this.language_changed = false;
3536 *this.recalculating_tx.borrow_mut() = false;
3537 });
3538 }
3539
3540 Ok(())
3541 }));
3542 }
3543}
3544
3545fn make_remote_delegate(
3546 this: Entity<GitStore>,
3547 project_id: u64,
3548 repository_id: RepositoryId,
3549 askpass_id: u64,
3550 cx: &mut AsyncApp,
3551) -> AskPassDelegate {
3552 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3553 this.update(cx, |this, cx| {
3554 let Some((client, _)) = this.downstream_client() else {
3555 return;
3556 };
3557 let response = client.request(proto::AskPassRequest {
3558 project_id,
3559 repository_id: repository_id.to_proto(),
3560 askpass_id,
3561 prompt,
3562 });
3563 cx.spawn(async move |_, _| {
3564 let mut response = response.await?.response;
3565 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3566 .ok();
3567 response.zeroize();
3568 anyhow::Ok(())
3569 })
3570 .detach_and_log_err(cx);
3571 });
3572 })
3573}
3574
3575impl RepositoryId {
3576 pub fn to_proto(self) -> u64 {
3577 self.0
3578 }
3579
3580 pub fn from_proto(id: u64) -> Self {
3581 RepositoryId(id)
3582 }
3583}
3584
3585impl RepositorySnapshot {
3586 fn empty(
3587 id: RepositoryId,
3588 work_directory_abs_path: Arc<Path>,
3589 original_repo_abs_path: Option<Arc<Path>>,
3590 path_style: PathStyle,
3591 ) -> Self {
3592 Self {
3593 id,
3594 statuses_by_path: Default::default(),
3595 original_repo_abs_path: original_repo_abs_path
3596 .unwrap_or_else(|| work_directory_abs_path.clone()),
3597 work_directory_abs_path,
3598 branch: None,
3599 head_commit: None,
3600 scan_id: 0,
3601 merge: Default::default(),
3602 remote_origin_url: None,
3603 remote_upstream_url: None,
3604 stash_entries: Default::default(),
3605 linked_worktrees: Arc::from([]),
3606 path_style,
3607 }
3608 }
3609
3610 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3611 proto::UpdateRepository {
3612 branch_summary: self.branch.as_ref().map(branch_to_proto),
3613 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3614 updated_statuses: self
3615 .statuses_by_path
3616 .iter()
3617 .map(|entry| entry.to_proto())
3618 .collect(),
3619 removed_statuses: Default::default(),
3620 current_merge_conflicts: self
3621 .merge
3622 .merge_heads_by_conflicted_path
3623 .iter()
3624 .map(|(repo_path, _)| repo_path.to_proto())
3625 .collect(),
3626 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3627 project_id,
3628 id: self.id.to_proto(),
3629 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3630 entry_ids: vec![self.id.to_proto()],
3631 scan_id: self.scan_id,
3632 is_last_update: true,
3633 stash_entries: self
3634 .stash_entries
3635 .entries
3636 .iter()
3637 .map(stash_to_proto)
3638 .collect(),
3639 remote_upstream_url: self.remote_upstream_url.clone(),
3640 remote_origin_url: self.remote_origin_url.clone(),
3641 original_repo_abs_path: Some(
3642 self.original_repo_abs_path.to_string_lossy().into_owned(),
3643 ),
3644 linked_worktrees: self
3645 .linked_worktrees
3646 .iter()
3647 .map(worktree_to_proto)
3648 .collect(),
3649 }
3650 }
3651
3652 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3653 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3654 let mut removed_statuses: Vec<String> = Vec::new();
3655
3656 let mut new_statuses = self.statuses_by_path.iter().peekable();
3657 let mut old_statuses = old.statuses_by_path.iter().peekable();
3658
3659 let mut current_new_entry = new_statuses.next();
3660 let mut current_old_entry = old_statuses.next();
3661 loop {
3662 match (current_new_entry, current_old_entry) {
3663 (Some(new_entry), Some(old_entry)) => {
3664 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3665 Ordering::Less => {
3666 updated_statuses.push(new_entry.to_proto());
3667 current_new_entry = new_statuses.next();
3668 }
3669 Ordering::Equal => {
3670 if new_entry.status != old_entry.status
3671 || new_entry.diff_stat != old_entry.diff_stat
3672 {
3673 updated_statuses.push(new_entry.to_proto());
3674 }
3675 current_old_entry = old_statuses.next();
3676 current_new_entry = new_statuses.next();
3677 }
3678 Ordering::Greater => {
3679 removed_statuses.push(old_entry.repo_path.to_proto());
3680 current_old_entry = old_statuses.next();
3681 }
3682 }
3683 }
3684 (None, Some(old_entry)) => {
3685 removed_statuses.push(old_entry.repo_path.to_proto());
3686 current_old_entry = old_statuses.next();
3687 }
3688 (Some(new_entry), None) => {
3689 updated_statuses.push(new_entry.to_proto());
3690 current_new_entry = new_statuses.next();
3691 }
3692 (None, None) => break,
3693 }
3694 }
3695
3696 proto::UpdateRepository {
3697 branch_summary: self.branch.as_ref().map(branch_to_proto),
3698 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3699 updated_statuses,
3700 removed_statuses,
3701 current_merge_conflicts: self
3702 .merge
3703 .merge_heads_by_conflicted_path
3704 .iter()
3705 .map(|(path, _)| path.to_proto())
3706 .collect(),
3707 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3708 project_id,
3709 id: self.id.to_proto(),
3710 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3711 entry_ids: vec![],
3712 scan_id: self.scan_id,
3713 is_last_update: true,
3714 stash_entries: self
3715 .stash_entries
3716 .entries
3717 .iter()
3718 .map(stash_to_proto)
3719 .collect(),
3720 remote_upstream_url: self.remote_upstream_url.clone(),
3721 remote_origin_url: self.remote_origin_url.clone(),
3722 original_repo_abs_path: Some(
3723 self.original_repo_abs_path.to_string_lossy().into_owned(),
3724 ),
3725 linked_worktrees: self
3726 .linked_worktrees
3727 .iter()
3728 .map(worktree_to_proto)
3729 .collect(),
3730 }
3731 }
3732
3733 /// The main worktree is the original checkout that other worktrees were
3734 /// created from.
3735 ///
3736 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3737 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3738 ///
3739 /// Submodules also return `true` here, since they are not linked worktrees.
3740 pub fn is_main_worktree(&self) -> bool {
3741 self.work_directory_abs_path == self.original_repo_abs_path
3742 }
3743
3744 /// Returns true if this repository is a linked worktree, that is, one that
3745 /// was created from another worktree.
3746 ///
3747 /// Returns `false` for both the main worktree and submodules.
3748 pub fn is_linked_worktree(&self) -> bool {
3749 !self.is_main_worktree()
3750 }
3751
3752 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3753 &self.linked_worktrees
3754 }
3755
3756 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3757 self.statuses_by_path.iter().cloned()
3758 }
3759
3760 pub fn status_summary(&self) -> GitSummary {
3761 self.statuses_by_path.summary().item_summary
3762 }
3763
3764 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3765 self.statuses_by_path
3766 .get(&PathKey(path.as_ref().clone()), ())
3767 .cloned()
3768 }
3769
3770 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3771 self.statuses_by_path
3772 .get(&PathKey(path.as_ref().clone()), ())
3773 .and_then(|entry| entry.diff_stat)
3774 }
3775
3776 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3777 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3778 }
3779
3780 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3781 self.path_style
3782 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3783 .unwrap()
3784 .into()
3785 }
3786
3787 #[inline]
3788 fn abs_path_to_repo_path_inner(
3789 work_directory_abs_path: &Path,
3790 abs_path: &Path,
3791 path_style: PathStyle,
3792 ) -> Option<RepoPath> {
3793 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3794 Some(RepoPath::from_rel_path(&rel_path))
3795 }
3796
3797 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3798 self.merge
3799 .merge_heads_by_conflicted_path
3800 .contains_key(repo_path)
3801 }
3802
3803 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3804 let had_conflict_on_last_merge_head_change = self
3805 .merge
3806 .merge_heads_by_conflicted_path
3807 .contains_key(repo_path);
3808 let has_conflict_currently = self
3809 .status_for_path(repo_path)
3810 .is_some_and(|entry| entry.status.is_conflicted());
3811 had_conflict_on_last_merge_head_change || has_conflict_currently
3812 }
3813
3814 /// This is the name that will be displayed in the repository selector for this repository.
3815 pub fn display_name(&self) -> SharedString {
3816 self.work_directory_abs_path
3817 .file_name()
3818 .unwrap_or_default()
3819 .to_string_lossy()
3820 .to_string()
3821 .into()
3822 }
3823}
3824
3825pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3826 proto::StashEntry {
3827 oid: entry.oid.as_bytes().to_vec(),
3828 message: entry.message.clone(),
3829 branch: entry.branch.clone(),
3830 index: entry.index as u64,
3831 timestamp: entry.timestamp,
3832 }
3833}
3834
3835pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3836 Ok(StashEntry {
3837 oid: Oid::from_bytes(&entry.oid)?,
3838 message: entry.message.clone(),
3839 index: entry.index as usize,
3840 branch: entry.branch.clone(),
3841 timestamp: entry.timestamp,
3842 })
3843}
3844
3845impl MergeDetails {
3846 async fn update(
3847 &mut self,
3848 backend: &Arc<dyn GitRepository>,
3849 current_conflicted_paths: Vec<RepoPath>,
3850 ) -> Result<bool> {
3851 log::debug!("load merge details");
3852 self.message = backend.merge_message().await.map(SharedString::from);
3853 let heads = backend
3854 .revparse_batch(vec![
3855 "MERGE_HEAD".into(),
3856 "CHERRY_PICK_HEAD".into(),
3857 "REBASE_HEAD".into(),
3858 "REVERT_HEAD".into(),
3859 "APPLY_HEAD".into(),
3860 ])
3861 .await
3862 .log_err()
3863 .unwrap_or_default()
3864 .into_iter()
3865 .map(|opt| opt.map(SharedString::from))
3866 .collect::<Vec<_>>();
3867
3868 let mut conflicts_changed = false;
3869
3870 // Record the merge state for newly conflicted paths
3871 for path in ¤t_conflicted_paths {
3872 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3873 conflicts_changed = true;
3874 self.merge_heads_by_conflicted_path
3875 .insert(path.clone(), heads.clone());
3876 }
3877 }
3878
3879 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3880 self.merge_heads_by_conflicted_path
3881 .retain(|path, old_merge_heads| {
3882 let keep = current_conflicted_paths.contains(path)
3883 || (old_merge_heads == &heads
3884 && old_merge_heads.iter().any(|head| head.is_some()));
3885 if !keep {
3886 conflicts_changed = true;
3887 }
3888 keep
3889 });
3890
3891 Ok(conflicts_changed)
3892 }
3893}
3894
3895impl Repository {
3896 pub fn is_trusted(&self) -> bool {
3897 match self.repository_state.peek() {
3898 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3899 _ => false,
3900 }
3901 }
3902
3903 pub fn snapshot(&self) -> RepositorySnapshot {
3904 self.snapshot.clone()
3905 }
3906
3907 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3908 self.pending_ops.iter().cloned()
3909 }
3910
3911 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3912 self.pending_ops.summary().clone()
3913 }
3914
3915 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3916 self.pending_ops
3917 .get(&PathKey(path.as_ref().clone()), ())
3918 .cloned()
3919 }
3920
3921 fn local(
3922 id: RepositoryId,
3923 work_directory_abs_path: Arc<Path>,
3924 original_repo_abs_path: Arc<Path>,
3925 dot_git_abs_path: Arc<Path>,
3926 project_environment: WeakEntity<ProjectEnvironment>,
3927 fs: Arc<dyn Fs>,
3928 is_trusted: bool,
3929 git_store: WeakEntity<GitStore>,
3930 cx: &mut Context<Self>,
3931 ) -> Self {
3932 let snapshot = RepositorySnapshot::empty(
3933 id,
3934 work_directory_abs_path.clone(),
3935 Some(original_repo_abs_path),
3936 PathStyle::local(),
3937 );
3938 let state = cx
3939 .spawn(async move |_, cx| {
3940 LocalRepositoryState::new(
3941 work_directory_abs_path,
3942 dot_git_abs_path,
3943 project_environment,
3944 fs,
3945 is_trusted,
3946 cx,
3947 )
3948 .await
3949 .map_err(|err| err.to_string())
3950 })
3951 .shared();
3952 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3953 let state = cx
3954 .spawn(async move |_, _| {
3955 let state = state.await?;
3956 Ok(RepositoryState::Local(state))
3957 })
3958 .shared();
3959
3960 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3961 RepositoryEvent::BranchChanged => {
3962 if this.scan_id > 1 {
3963 this.initial_graph_data.clear();
3964 }
3965 }
3966 _ => {}
3967 })
3968 .detach();
3969
3970 Repository {
3971 this: cx.weak_entity(),
3972 git_store,
3973 snapshot,
3974 pending_ops: Default::default(),
3975 repository_state: state,
3976 commit_message_buffer: None,
3977 askpass_delegates: Default::default(),
3978 paths_needing_status_update: Default::default(),
3979 latest_askpass_id: 0,
3980 job_sender,
3981 job_id: 0,
3982 active_jobs: Default::default(),
3983 initial_graph_data: Default::default(),
3984 commit_data: Default::default(),
3985 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3986 }
3987 }
3988
3989 fn remote(
3990 id: RepositoryId,
3991 work_directory_abs_path: Arc<Path>,
3992 original_repo_abs_path: Option<Arc<Path>>,
3993 path_style: PathStyle,
3994 project_id: ProjectId,
3995 client: AnyProtoClient,
3996 git_store: WeakEntity<GitStore>,
3997 cx: &mut Context<Self>,
3998 ) -> Self {
3999 let snapshot = RepositorySnapshot::empty(
4000 id,
4001 work_directory_abs_path,
4002 original_repo_abs_path,
4003 path_style,
4004 );
4005 let repository_state = RemoteRepositoryState { project_id, client };
4006 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4007 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4008 Self {
4009 this: cx.weak_entity(),
4010 snapshot,
4011 commit_message_buffer: None,
4012 git_store,
4013 pending_ops: Default::default(),
4014 paths_needing_status_update: Default::default(),
4015 job_sender,
4016 repository_state,
4017 askpass_delegates: Default::default(),
4018 latest_askpass_id: 0,
4019 active_jobs: Default::default(),
4020 job_id: 0,
4021 initial_graph_data: Default::default(),
4022 commit_data: Default::default(),
4023 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4024 }
4025 }
4026
4027 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4028 self.git_store.upgrade()
4029 }
4030
4031 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4032 let this = cx.weak_entity();
4033 let git_store = self.git_store.clone();
4034 let _ = self.send_keyed_job(
4035 Some(GitJobKey::ReloadBufferDiffBases),
4036 None,
4037 |state, mut cx| async move {
4038 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4039 log::error!("tried to recompute diffs for a non-local repository");
4040 return Ok(());
4041 };
4042
4043 let Some(this) = this.upgrade() else {
4044 return Ok(());
4045 };
4046
4047 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4048 git_store.update(cx, |git_store, cx| {
4049 git_store
4050 .diffs
4051 .iter()
4052 .filter_map(|(buffer_id, diff_state)| {
4053 let buffer_store = git_store.buffer_store.read(cx);
4054 let buffer = buffer_store.get(*buffer_id)?;
4055 let file = File::from_dyn(buffer.read(cx).file())?;
4056 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4057 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4058 log::debug!(
4059 "start reload diff bases for repo path {}",
4060 repo_path.as_unix_str()
4061 );
4062 diff_state.update(cx, |diff_state, _| {
4063 let has_unstaged_diff = diff_state
4064 .unstaged_diff
4065 .as_ref()
4066 .is_some_and(|diff| diff.is_upgradable());
4067 let has_uncommitted_diff = diff_state
4068 .uncommitted_diff
4069 .as_ref()
4070 .is_some_and(|set| set.is_upgradable());
4071
4072 Some((
4073 buffer,
4074 repo_path,
4075 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4076 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4077 ))
4078 })
4079 })
4080 .collect::<Vec<_>>()
4081 })
4082 })?;
4083
4084 let buffer_diff_base_changes = cx
4085 .background_spawn(async move {
4086 let mut changes = Vec::new();
4087 for (buffer, repo_path, current_index_text, current_head_text) in
4088 &repo_diff_state_updates
4089 {
4090 let index_text = if current_index_text.is_some() {
4091 backend.load_index_text(repo_path.clone()).await
4092 } else {
4093 None
4094 };
4095 let head_text = if current_head_text.is_some() {
4096 backend.load_committed_text(repo_path.clone()).await
4097 } else {
4098 None
4099 };
4100
4101 let change =
4102 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4103 (Some(current_index), Some(current_head)) => {
4104 let index_changed =
4105 index_text.as_deref() != current_index.as_deref();
4106 let head_changed =
4107 head_text.as_deref() != current_head.as_deref();
4108 if index_changed && head_changed {
4109 if index_text == head_text {
4110 Some(DiffBasesChange::SetBoth(head_text))
4111 } else {
4112 Some(DiffBasesChange::SetEach {
4113 index: index_text,
4114 head: head_text,
4115 })
4116 }
4117 } else if index_changed {
4118 Some(DiffBasesChange::SetIndex(index_text))
4119 } else if head_changed {
4120 Some(DiffBasesChange::SetHead(head_text))
4121 } else {
4122 None
4123 }
4124 }
4125 (Some(current_index), None) => {
4126 let index_changed =
4127 index_text.as_deref() != current_index.as_deref();
4128 index_changed
4129 .then_some(DiffBasesChange::SetIndex(index_text))
4130 }
4131 (None, Some(current_head)) => {
4132 let head_changed =
4133 head_text.as_deref() != current_head.as_deref();
4134 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4135 }
4136 (None, None) => None,
4137 };
4138
4139 changes.push((buffer.clone(), change))
4140 }
4141 changes
4142 })
4143 .await;
4144
4145 git_store.update(&mut cx, |git_store, cx| {
4146 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4147 let buffer_snapshot = buffer.read(cx).text_snapshot();
4148 let buffer_id = buffer_snapshot.remote_id();
4149 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4150 continue;
4151 };
4152
4153 let downstream_client = git_store.downstream_client();
4154 diff_state.update(cx, |diff_state, cx| {
4155 use proto::update_diff_bases::Mode;
4156
4157 if let Some((diff_bases_change, (client, project_id))) =
4158 diff_bases_change.clone().zip(downstream_client)
4159 {
4160 let (staged_text, committed_text, mode) = match diff_bases_change {
4161 DiffBasesChange::SetIndex(index) => {
4162 (index, None, Mode::IndexOnly)
4163 }
4164 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4165 DiffBasesChange::SetEach { index, head } => {
4166 (index, head, Mode::IndexAndHead)
4167 }
4168 DiffBasesChange::SetBoth(text) => {
4169 (None, text, Mode::IndexMatchesHead)
4170 }
4171 };
4172 client
4173 .send(proto::UpdateDiffBases {
4174 project_id: project_id.to_proto(),
4175 buffer_id: buffer_id.to_proto(),
4176 staged_text,
4177 committed_text,
4178 mode: mode as i32,
4179 })
4180 .log_err();
4181 }
4182
4183 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4184 });
4185 }
4186 })
4187 },
4188 );
4189 }
4190
4191 pub fn send_job<F, Fut, R>(
4192 &mut self,
4193 status: Option<SharedString>,
4194 job: F,
4195 ) -> oneshot::Receiver<R>
4196 where
4197 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4198 Fut: Future<Output = R> + 'static,
4199 R: Send + 'static,
4200 {
4201 self.send_keyed_job(None, status, job)
4202 }
4203
4204 fn send_keyed_job<F, Fut, R>(
4205 &mut self,
4206 key: Option<GitJobKey>,
4207 status: Option<SharedString>,
4208 job: F,
4209 ) -> oneshot::Receiver<R>
4210 where
4211 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4212 Fut: Future<Output = R> + 'static,
4213 R: Send + 'static,
4214 {
4215 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4216 let job_id = post_inc(&mut self.job_id);
4217 let this = self.this.clone();
4218 self.job_sender
4219 .unbounded_send(GitJob {
4220 key,
4221 job: Box::new(move |state, cx: &mut AsyncApp| {
4222 let job = job(state, cx.clone());
4223 cx.spawn(async move |cx| {
4224 if let Some(s) = status.clone() {
4225 this.update(cx, |this, cx| {
4226 this.active_jobs.insert(
4227 job_id,
4228 JobInfo {
4229 start: Instant::now(),
4230 message: s.clone(),
4231 },
4232 );
4233
4234 cx.notify();
4235 })
4236 .ok();
4237 }
4238 let result = job.await;
4239
4240 this.update(cx, |this, cx| {
4241 this.active_jobs.remove(&job_id);
4242 cx.notify();
4243 })
4244 .ok();
4245
4246 result_tx.send(result).ok();
4247 })
4248 }),
4249 })
4250 .ok();
4251 result_rx
4252 }
4253
4254 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4255 let Some(git_store) = self.git_store.upgrade() else {
4256 return;
4257 };
4258 let entity = cx.entity();
4259 git_store.update(cx, |git_store, cx| {
4260 let Some((&id, _)) = git_store
4261 .repositories
4262 .iter()
4263 .find(|(_, handle)| *handle == &entity)
4264 else {
4265 return;
4266 };
4267 git_store.active_repo_id = Some(id);
4268 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4269 });
4270 }
4271
4272 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4273 self.snapshot.status()
4274 }
4275
4276 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4277 self.snapshot.diff_stat_for_path(path)
4278 }
4279
4280 pub fn cached_stash(&self) -> GitStash {
4281 self.snapshot.stash_entries.clone()
4282 }
4283
4284 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4285 let git_store = self.git_store.upgrade()?;
4286 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4287 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4288 let abs_path = SanitizedPath::new(&abs_path);
4289 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4290 Some(ProjectPath {
4291 worktree_id: worktree.read(cx).id(),
4292 path: relative_path,
4293 })
4294 }
4295
4296 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4297 let git_store = self.git_store.upgrade()?;
4298 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4299 let abs_path = worktree_store.absolutize(path, cx)?;
4300 self.snapshot.abs_path_to_repo_path(&abs_path)
4301 }
4302
4303 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4304 other
4305 .read(cx)
4306 .snapshot
4307 .work_directory_abs_path
4308 .starts_with(&self.snapshot.work_directory_abs_path)
4309 }
4310
4311 pub fn open_commit_buffer(
4312 &mut self,
4313 languages: Option<Arc<LanguageRegistry>>,
4314 buffer_store: Entity<BufferStore>,
4315 cx: &mut Context<Self>,
4316 ) -> Task<Result<Entity<Buffer>>> {
4317 let id = self.id;
4318 if let Some(buffer) = self.commit_message_buffer.clone() {
4319 return Task::ready(Ok(buffer));
4320 }
4321 let this = cx.weak_entity();
4322
4323 let rx = self.send_job(None, move |state, mut cx| async move {
4324 let Some(this) = this.upgrade() else {
4325 bail!("git store was dropped");
4326 };
4327 match state {
4328 RepositoryState::Local(..) => {
4329 this.update(&mut cx, |_, cx| {
4330 Self::open_local_commit_buffer(languages, buffer_store, cx)
4331 })
4332 .await
4333 }
4334 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4335 let request = client.request(proto::OpenCommitMessageBuffer {
4336 project_id: project_id.0,
4337 repository_id: id.to_proto(),
4338 });
4339 let response = request.await.context("requesting to open commit buffer")?;
4340 let buffer_id = BufferId::new(response.buffer_id)?;
4341 let buffer = buffer_store
4342 .update(&mut cx, |buffer_store, cx| {
4343 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4344 })
4345 .await?;
4346 if let Some(language_registry) = languages {
4347 let git_commit_language =
4348 language_registry.language_for_name("Git Commit").await?;
4349 buffer.update(&mut cx, |buffer, cx| {
4350 buffer.set_language(Some(git_commit_language), cx);
4351 });
4352 }
4353 this.update(&mut cx, |this, _| {
4354 this.commit_message_buffer = Some(buffer.clone());
4355 });
4356 Ok(buffer)
4357 }
4358 }
4359 });
4360
4361 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4362 }
4363
4364 fn open_local_commit_buffer(
4365 language_registry: Option<Arc<LanguageRegistry>>,
4366 buffer_store: Entity<BufferStore>,
4367 cx: &mut Context<Self>,
4368 ) -> Task<Result<Entity<Buffer>>> {
4369 cx.spawn(async move |repository, cx| {
4370 let git_commit_language = match language_registry {
4371 Some(language_registry) => {
4372 Some(language_registry.language_for_name("Git Commit").await?)
4373 }
4374 None => None,
4375 };
4376 let buffer = buffer_store
4377 .update(cx, |buffer_store, cx| {
4378 buffer_store.create_buffer(git_commit_language, false, cx)
4379 })
4380 .await?;
4381
4382 repository.update(cx, |repository, _| {
4383 repository.commit_message_buffer = Some(buffer.clone());
4384 })?;
4385 Ok(buffer)
4386 })
4387 }
4388
4389 pub fn checkout_files(
4390 &mut self,
4391 commit: &str,
4392 paths: Vec<RepoPath>,
4393 cx: &mut Context<Self>,
4394 ) -> Task<Result<()>> {
4395 let commit = commit.to_string();
4396 let id = self.id;
4397
4398 self.spawn_job_with_tracking(
4399 paths.clone(),
4400 pending_op::GitStatus::Reverted,
4401 cx,
4402 async move |this, cx| {
4403 this.update(cx, |this, _cx| {
4404 this.send_job(
4405 Some(format!("git checkout {}", commit).into()),
4406 move |git_repo, _| async move {
4407 match git_repo {
4408 RepositoryState::Local(LocalRepositoryState {
4409 backend,
4410 environment,
4411 ..
4412 }) => {
4413 backend
4414 .checkout_files(commit, paths, environment.clone())
4415 .await
4416 }
4417 RepositoryState::Remote(RemoteRepositoryState {
4418 project_id,
4419 client,
4420 }) => {
4421 client
4422 .request(proto::GitCheckoutFiles {
4423 project_id: project_id.0,
4424 repository_id: id.to_proto(),
4425 commit,
4426 paths: paths
4427 .into_iter()
4428 .map(|p| p.to_proto())
4429 .collect(),
4430 })
4431 .await?;
4432
4433 Ok(())
4434 }
4435 }
4436 },
4437 )
4438 })?
4439 .await?
4440 },
4441 )
4442 }
4443
4444 pub fn reset(
4445 &mut self,
4446 commit: String,
4447 reset_mode: ResetMode,
4448 _cx: &mut App,
4449 ) -> oneshot::Receiver<Result<()>> {
4450 let id = self.id;
4451
4452 self.send_job(None, move |git_repo, _| async move {
4453 match git_repo {
4454 RepositoryState::Local(LocalRepositoryState {
4455 backend,
4456 environment,
4457 ..
4458 }) => backend.reset(commit, reset_mode, environment).await,
4459 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4460 client
4461 .request(proto::GitReset {
4462 project_id: project_id.0,
4463 repository_id: id.to_proto(),
4464 commit,
4465 mode: match reset_mode {
4466 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4467 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4468 },
4469 })
4470 .await?;
4471
4472 Ok(())
4473 }
4474 }
4475 })
4476 }
4477
4478 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4479 let id = self.id;
4480 self.send_job(None, move |git_repo, _cx| async move {
4481 match git_repo {
4482 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4483 backend.show(commit).await
4484 }
4485 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4486 let resp = client
4487 .request(proto::GitShow {
4488 project_id: project_id.0,
4489 repository_id: id.to_proto(),
4490 commit,
4491 })
4492 .await?;
4493
4494 Ok(CommitDetails {
4495 sha: resp.sha.into(),
4496 message: resp.message.into(),
4497 commit_timestamp: resp.commit_timestamp,
4498 author_email: resp.author_email.into(),
4499 author_name: resp.author_name.into(),
4500 })
4501 }
4502 }
4503 })
4504 }
4505
4506 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4507 let id = self.id;
4508 self.send_job(None, move |git_repo, cx| async move {
4509 match git_repo {
4510 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4511 backend.load_commit(commit, cx).await
4512 }
4513 RepositoryState::Remote(RemoteRepositoryState {
4514 client, project_id, ..
4515 }) => {
4516 let response = client
4517 .request(proto::LoadCommitDiff {
4518 project_id: project_id.0,
4519 repository_id: id.to_proto(),
4520 commit,
4521 })
4522 .await?;
4523 Ok(CommitDiff {
4524 files: response
4525 .files
4526 .into_iter()
4527 .map(|file| {
4528 Ok(CommitFile {
4529 path: RepoPath::from_proto(&file.path)?,
4530 old_text: file.old_text,
4531 new_text: file.new_text,
4532 is_binary: file.is_binary,
4533 })
4534 })
4535 .collect::<Result<Vec<_>>>()?,
4536 })
4537 }
4538 }
4539 })
4540 }
4541
4542 pub fn file_history(
4543 &mut self,
4544 path: RepoPath,
4545 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4546 self.file_history_paginated(path, 0, None)
4547 }
4548
4549 pub fn file_history_paginated(
4550 &mut self,
4551 path: RepoPath,
4552 skip: usize,
4553 limit: Option<usize>,
4554 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4555 let id = self.id;
4556 self.send_job(None, move |git_repo, _cx| async move {
4557 match git_repo {
4558 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4559 backend.file_history_paginated(path, skip, limit).await
4560 }
4561 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4562 let response = client
4563 .request(proto::GitFileHistory {
4564 project_id: project_id.0,
4565 repository_id: id.to_proto(),
4566 path: path.to_proto(),
4567 skip: skip as u64,
4568 limit: limit.map(|l| l as u64),
4569 })
4570 .await?;
4571 Ok(git::repository::FileHistory {
4572 entries: response
4573 .entries
4574 .into_iter()
4575 .map(|entry| git::repository::FileHistoryEntry {
4576 sha: entry.sha.into(),
4577 subject: entry.subject.into(),
4578 message: entry.message.into(),
4579 commit_timestamp: entry.commit_timestamp,
4580 author_name: entry.author_name.into(),
4581 author_email: entry.author_email.into(),
4582 })
4583 .collect(),
4584 path: RepoPath::from_proto(&response.path)?,
4585 })
4586 }
4587 }
4588 })
4589 }
4590
4591 pub fn get_graph_data(
4592 &self,
4593 log_source: LogSource,
4594 log_order: LogOrder,
4595 ) -> Option<&InitialGitGraphData> {
4596 self.initial_graph_data.get(&(log_source, log_order))
4597 }
4598
4599 pub fn search_commits(
4600 &mut self,
4601 log_source: LogSource,
4602 search_args: SearchCommitArgs,
4603 request_tx: smol::channel::Sender<Oid>,
4604 cx: &mut Context<Self>,
4605 ) {
4606 let repository_state = self.repository_state.clone();
4607
4608 cx.background_spawn(async move {
4609 let repo_state = repository_state.await;
4610
4611 match repo_state {
4612 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4613 backend
4614 .search_commits(log_source, search_args, request_tx)
4615 .await
4616 .log_err();
4617 }
4618 Ok(RepositoryState::Remote(_)) => {}
4619 Err(_) => {}
4620 };
4621 })
4622 .detach();
4623 }
4624
4625 pub fn graph_data(
4626 &mut self,
4627 log_source: LogSource,
4628 log_order: LogOrder,
4629 range: Range<usize>,
4630 cx: &mut Context<Self>,
4631 ) -> GraphDataResponse<'_> {
4632 let initial_commit_data = self
4633 .initial_graph_data
4634 .entry((log_source.clone(), log_order))
4635 .or_insert_with(|| {
4636 let state = self.repository_state.clone();
4637 let log_source = log_source.clone();
4638
4639 let fetch_task = cx.spawn(async move |repository, cx| {
4640 let state = state.await;
4641 let result = match state {
4642 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4643 Self::local_git_graph_data(
4644 repository.clone(),
4645 backend,
4646 log_source.clone(),
4647 log_order,
4648 cx,
4649 )
4650 .await
4651 }
4652 Ok(RepositoryState::Remote(_)) => {
4653 Err("Git graph is not supported for collab yet".into())
4654 }
4655 Err(e) => Err(SharedString::from(e)),
4656 };
4657
4658 if let Err(fetch_task_error) = result {
4659 repository
4660 .update(cx, |repository, _| {
4661 if let Some(data) = repository
4662 .initial_graph_data
4663 .get_mut(&(log_source, log_order))
4664 {
4665 data.error = Some(fetch_task_error);
4666 } else {
4667 debug_panic!(
4668 "This task would be dropped if this entry doesn't exist"
4669 );
4670 }
4671 })
4672 .ok();
4673 }
4674 });
4675
4676 InitialGitGraphData {
4677 fetch_task,
4678 error: None,
4679 commit_data: Vec::new(),
4680 commit_oid_to_index: HashMap::default(),
4681 }
4682 });
4683
4684 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4685 let max_end = initial_commit_data.commit_data.len();
4686
4687 GraphDataResponse {
4688 commits: &initial_commit_data.commit_data
4689 [range.start.min(max_start)..range.end.min(max_end)],
4690 is_loading: !initial_commit_data.fetch_task.is_ready(),
4691 error: initial_commit_data.error.clone(),
4692 }
4693 }
4694
4695 async fn local_git_graph_data(
4696 this: WeakEntity<Self>,
4697 backend: Arc<dyn GitRepository>,
4698 log_source: LogSource,
4699 log_order: LogOrder,
4700 cx: &mut AsyncApp,
4701 ) -> Result<(), SharedString> {
4702 let (request_tx, request_rx) =
4703 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4704
4705 let task = cx.background_executor().spawn({
4706 let log_source = log_source.clone();
4707 async move {
4708 backend
4709 .initial_graph_data(log_source, log_order, request_tx)
4710 .await
4711 .map_err(|err| SharedString::from(err.to_string()))
4712 }
4713 });
4714
4715 let graph_data_key = (log_source, log_order);
4716
4717 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4718 this.update(cx, |repository, cx| {
4719 let graph_data = repository
4720 .initial_graph_data
4721 .entry(graph_data_key.clone())
4722 .and_modify(|graph_data| {
4723 for commit_data in initial_graph_commit_data {
4724 graph_data
4725 .commit_oid_to_index
4726 .insert(commit_data.sha, graph_data.commit_data.len());
4727 graph_data.commit_data.push(commit_data);
4728 }
4729 cx.emit(RepositoryEvent::GraphEvent(
4730 graph_data_key.clone(),
4731 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4732 ));
4733 });
4734
4735 match &graph_data {
4736 Entry::Occupied(_) => {}
4737 Entry::Vacant(_) => {
4738 debug_panic!("This task should be dropped if data doesn't exist");
4739 }
4740 }
4741 })
4742 .ok();
4743 }
4744
4745 task.await?;
4746 Ok(())
4747 }
4748
4749 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4750 if !self.commit_data.contains_key(&sha) {
4751 match &self.graph_commit_data_handler {
4752 GraphCommitHandlerState::Open(handler) => {
4753 if handler.commit_data_request.try_send(sha).is_ok() {
4754 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4755 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4756 }
4757 }
4758 GraphCommitHandlerState::Closed => {
4759 self.open_graph_commit_data_handler(cx);
4760 }
4761 GraphCommitHandlerState::Starting => {}
4762 }
4763 }
4764
4765 self.commit_data
4766 .get(&sha)
4767 .unwrap_or(&CommitDataState::Loading)
4768 }
4769
4770 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4771 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4772
4773 let state = self.repository_state.clone();
4774 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4775 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4776
4777 let foreground_task = cx.spawn(async move |this, cx| {
4778 while let Ok((sha, commit_data)) = result_rx.recv().await {
4779 let result = this.update(cx, |this, cx| {
4780 let old_value = this
4781 .commit_data
4782 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4783 debug_assert!(
4784 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4785 "We should never overwrite commit data"
4786 );
4787
4788 cx.notify();
4789 });
4790 if result.is_err() {
4791 break;
4792 }
4793 }
4794
4795 this.update(cx, |this, _cx| {
4796 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4797 })
4798 .ok();
4799 });
4800
4801 let request_tx_for_handler = request_tx;
4802 let background_executor = cx.background_executor().clone();
4803
4804 cx.background_spawn(async move {
4805 let backend = match state.await {
4806 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4807 Ok(RepositoryState::Remote(_)) => {
4808 log::error!("commit_data_reader not supported for remote repositories");
4809 return;
4810 }
4811 Err(error) => {
4812 log::error!("failed to get repository state: {error}");
4813 return;
4814 }
4815 };
4816
4817 let reader = match backend.commit_data_reader() {
4818 Ok(reader) => reader,
4819 Err(error) => {
4820 log::error!("failed to create commit data reader: {error:?}");
4821 return;
4822 }
4823 };
4824
4825 loop {
4826 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4827
4828 futures::select_biased! {
4829 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4830 let Ok(sha) = sha else {
4831 break;
4832 };
4833
4834 match reader.read(sha).await {
4835 Ok(commit_data) => {
4836 if result_tx.send((sha, commit_data)).await.is_err() {
4837 break;
4838 }
4839 }
4840 Err(error) => {
4841 log::error!("failed to read commit data for {sha}: {error:?}");
4842 }
4843 }
4844 }
4845 _ = futures::FutureExt::fuse(timeout) => {
4846 break;
4847 }
4848 }
4849 }
4850
4851 drop(result_tx);
4852 })
4853 .detach();
4854
4855 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4856 _task: foreground_task,
4857 commit_data_request: request_tx_for_handler,
4858 });
4859 }
4860
4861 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4862 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4863 }
4864
4865 fn save_buffers<'a>(
4866 &self,
4867 entries: impl IntoIterator<Item = &'a RepoPath>,
4868 cx: &mut Context<Self>,
4869 ) -> Vec<Task<anyhow::Result<()>>> {
4870 let mut save_futures = Vec::new();
4871 if let Some(buffer_store) = self.buffer_store(cx) {
4872 buffer_store.update(cx, |buffer_store, cx| {
4873 for path in entries {
4874 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4875 continue;
4876 };
4877 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4878 && buffer
4879 .read(cx)
4880 .file()
4881 .is_some_and(|file| file.disk_state().exists())
4882 && buffer.read(cx).has_unsaved_edits()
4883 {
4884 save_futures.push(buffer_store.save_buffer(buffer, cx));
4885 }
4886 }
4887 })
4888 }
4889 save_futures
4890 }
4891
4892 pub fn stage_entries(
4893 &mut self,
4894 entries: Vec<RepoPath>,
4895 cx: &mut Context<Self>,
4896 ) -> Task<anyhow::Result<()>> {
4897 self.stage_or_unstage_entries(true, entries, cx)
4898 }
4899
4900 pub fn unstage_entries(
4901 &mut self,
4902 entries: Vec<RepoPath>,
4903 cx: &mut Context<Self>,
4904 ) -> Task<anyhow::Result<()>> {
4905 self.stage_or_unstage_entries(false, entries, cx)
4906 }
4907
4908 fn stage_or_unstage_entries(
4909 &mut self,
4910 stage: bool,
4911 entries: Vec<RepoPath>,
4912 cx: &mut Context<Self>,
4913 ) -> Task<anyhow::Result<()>> {
4914 if entries.is_empty() {
4915 return Task::ready(Ok(()));
4916 }
4917 let Some(git_store) = self.git_store.upgrade() else {
4918 return Task::ready(Ok(()));
4919 };
4920 let id = self.id;
4921 let save_tasks = self.save_buffers(&entries, cx);
4922 let paths = entries
4923 .iter()
4924 .map(|p| p.as_unix_str())
4925 .collect::<Vec<_>>()
4926 .join(" ");
4927 let status = if stage {
4928 format!("git add {paths}")
4929 } else {
4930 format!("git reset {paths}")
4931 };
4932 let job_key = GitJobKey::WriteIndex(entries.clone());
4933
4934 self.spawn_job_with_tracking(
4935 entries.clone(),
4936 if stage {
4937 pending_op::GitStatus::Staged
4938 } else {
4939 pending_op::GitStatus::Unstaged
4940 },
4941 cx,
4942 async move |this, cx| {
4943 for save_task in save_tasks {
4944 save_task.await?;
4945 }
4946
4947 this.update(cx, |this, cx| {
4948 let weak_this = cx.weak_entity();
4949 this.send_keyed_job(
4950 Some(job_key),
4951 Some(status.into()),
4952 move |git_repo, mut cx| async move {
4953 let hunk_staging_operation_counts = weak_this
4954 .update(&mut cx, |this, cx| {
4955 let mut hunk_staging_operation_counts = HashMap::default();
4956 for path in &entries {
4957 let Some(project_path) =
4958 this.repo_path_to_project_path(path, cx)
4959 else {
4960 continue;
4961 };
4962 let Some(buffer) = git_store
4963 .read(cx)
4964 .buffer_store
4965 .read(cx)
4966 .get_by_path(&project_path)
4967 else {
4968 continue;
4969 };
4970 let Some(diff_state) = git_store
4971 .read(cx)
4972 .diffs
4973 .get(&buffer.read(cx).remote_id())
4974 .cloned()
4975 else {
4976 continue;
4977 };
4978 let Some(uncommitted_diff) =
4979 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4980 |uncommitted_diff| uncommitted_diff.upgrade(),
4981 )
4982 else {
4983 continue;
4984 };
4985 let buffer_snapshot = buffer.read(cx).text_snapshot();
4986 let file_exists = buffer
4987 .read(cx)
4988 .file()
4989 .is_some_and(|file| file.disk_state().exists());
4990 let hunk_staging_operation_count =
4991 diff_state.update(cx, |diff_state, cx| {
4992 uncommitted_diff.update(
4993 cx,
4994 |uncommitted_diff, cx| {
4995 uncommitted_diff
4996 .stage_or_unstage_all_hunks(
4997 stage,
4998 &buffer_snapshot,
4999 file_exists,
5000 cx,
5001 );
5002 },
5003 );
5004
5005 diff_state.hunk_staging_operation_count += 1;
5006 diff_state.hunk_staging_operation_count
5007 });
5008 hunk_staging_operation_counts.insert(
5009 diff_state.downgrade(),
5010 hunk_staging_operation_count,
5011 );
5012 }
5013 hunk_staging_operation_counts
5014 })
5015 .unwrap_or_default();
5016
5017 let result = match git_repo {
5018 RepositoryState::Local(LocalRepositoryState {
5019 backend,
5020 environment,
5021 ..
5022 }) => {
5023 if stage {
5024 backend.stage_paths(entries, environment.clone()).await
5025 } else {
5026 backend.unstage_paths(entries, environment.clone()).await
5027 }
5028 }
5029 RepositoryState::Remote(RemoteRepositoryState {
5030 project_id,
5031 client,
5032 }) => {
5033 if stage {
5034 client
5035 .request(proto::Stage {
5036 project_id: project_id.0,
5037 repository_id: id.to_proto(),
5038 paths: entries
5039 .into_iter()
5040 .map(|repo_path| repo_path.to_proto())
5041 .collect(),
5042 })
5043 .await
5044 .context("sending stage request")
5045 .map(|_| ())
5046 } else {
5047 client
5048 .request(proto::Unstage {
5049 project_id: project_id.0,
5050 repository_id: id.to_proto(),
5051 paths: entries
5052 .into_iter()
5053 .map(|repo_path| repo_path.to_proto())
5054 .collect(),
5055 })
5056 .await
5057 .context("sending unstage request")
5058 .map(|_| ())
5059 }
5060 }
5061 };
5062
5063 for (diff_state, hunk_staging_operation_count) in
5064 hunk_staging_operation_counts
5065 {
5066 diff_state
5067 .update(&mut cx, |diff_state, cx| {
5068 if result.is_ok() {
5069 diff_state.hunk_staging_operation_count_as_of_write =
5070 hunk_staging_operation_count;
5071 } else if let Some(uncommitted_diff) =
5072 &diff_state.uncommitted_diff
5073 {
5074 uncommitted_diff
5075 .update(cx, |uncommitted_diff, cx| {
5076 uncommitted_diff.clear_pending_hunks(cx);
5077 })
5078 .ok();
5079 }
5080 })
5081 .ok();
5082 }
5083
5084 result
5085 },
5086 )
5087 })?
5088 .await?
5089 },
5090 )
5091 }
5092
5093 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5094 let snapshot = self.snapshot.clone();
5095 let pending_ops = self.pending_ops.clone();
5096 let to_stage = cx.background_spawn(async move {
5097 snapshot
5098 .status()
5099 .filter_map(|entry| {
5100 if let Some(ops) =
5101 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5102 {
5103 if ops.staging() || ops.staged() {
5104 None
5105 } else {
5106 Some(entry.repo_path)
5107 }
5108 } else if entry.status.staging().is_fully_staged() {
5109 None
5110 } else {
5111 Some(entry.repo_path)
5112 }
5113 })
5114 .collect()
5115 });
5116
5117 cx.spawn(async move |this, cx| {
5118 let to_stage = to_stage.await;
5119 this.update(cx, |this, cx| {
5120 this.stage_or_unstage_entries(true, to_stage, cx)
5121 })?
5122 .await
5123 })
5124 }
5125
5126 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5127 let snapshot = self.snapshot.clone();
5128 let pending_ops = self.pending_ops.clone();
5129 let to_unstage = cx.background_spawn(async move {
5130 snapshot
5131 .status()
5132 .filter_map(|entry| {
5133 if let Some(ops) =
5134 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5135 {
5136 if !ops.staging() && !ops.staged() {
5137 None
5138 } else {
5139 Some(entry.repo_path)
5140 }
5141 } else if entry.status.staging().is_fully_unstaged() {
5142 None
5143 } else {
5144 Some(entry.repo_path)
5145 }
5146 })
5147 .collect()
5148 });
5149
5150 cx.spawn(async move |this, cx| {
5151 let to_unstage = to_unstage.await;
5152 this.update(cx, |this, cx| {
5153 this.stage_or_unstage_entries(false, to_unstage, cx)
5154 })?
5155 .await
5156 })
5157 }
5158
5159 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5160 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5161
5162 self.stash_entries(to_stash, cx)
5163 }
5164
5165 pub fn stash_entries(
5166 &mut self,
5167 entries: Vec<RepoPath>,
5168 cx: &mut Context<Self>,
5169 ) -> Task<anyhow::Result<()>> {
5170 let id = self.id;
5171
5172 cx.spawn(async move |this, cx| {
5173 this.update(cx, |this, _| {
5174 this.send_job(None, move |git_repo, _cx| async move {
5175 match git_repo {
5176 RepositoryState::Local(LocalRepositoryState {
5177 backend,
5178 environment,
5179 ..
5180 }) => backend.stash_paths(entries, environment).await,
5181 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5182 client
5183 .request(proto::Stash {
5184 project_id: project_id.0,
5185 repository_id: id.to_proto(),
5186 paths: entries
5187 .into_iter()
5188 .map(|repo_path| repo_path.to_proto())
5189 .collect(),
5190 })
5191 .await?;
5192 Ok(())
5193 }
5194 }
5195 })
5196 })?
5197 .await??;
5198 Ok(())
5199 })
5200 }
5201
5202 pub fn stash_pop(
5203 &mut self,
5204 index: Option<usize>,
5205 cx: &mut Context<Self>,
5206 ) -> Task<anyhow::Result<()>> {
5207 let id = self.id;
5208 cx.spawn(async move |this, cx| {
5209 this.update(cx, |this, _| {
5210 this.send_job(None, move |git_repo, _cx| async move {
5211 match git_repo {
5212 RepositoryState::Local(LocalRepositoryState {
5213 backend,
5214 environment,
5215 ..
5216 }) => backend.stash_pop(index, environment).await,
5217 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5218 client
5219 .request(proto::StashPop {
5220 project_id: project_id.0,
5221 repository_id: id.to_proto(),
5222 stash_index: index.map(|i| i as u64),
5223 })
5224 .await
5225 .context("sending stash pop request")?;
5226 Ok(())
5227 }
5228 }
5229 })
5230 })?
5231 .await??;
5232 Ok(())
5233 })
5234 }
5235
5236 pub fn stash_apply(
5237 &mut self,
5238 index: Option<usize>,
5239 cx: &mut Context<Self>,
5240 ) -> Task<anyhow::Result<()>> {
5241 let id = self.id;
5242 cx.spawn(async move |this, cx| {
5243 this.update(cx, |this, _| {
5244 this.send_job(None, move |git_repo, _cx| async move {
5245 match git_repo {
5246 RepositoryState::Local(LocalRepositoryState {
5247 backend,
5248 environment,
5249 ..
5250 }) => backend.stash_apply(index, environment).await,
5251 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5252 client
5253 .request(proto::StashApply {
5254 project_id: project_id.0,
5255 repository_id: id.to_proto(),
5256 stash_index: index.map(|i| i as u64),
5257 })
5258 .await
5259 .context("sending stash apply request")?;
5260 Ok(())
5261 }
5262 }
5263 })
5264 })?
5265 .await??;
5266 Ok(())
5267 })
5268 }
5269
5270 pub fn stash_drop(
5271 &mut self,
5272 index: Option<usize>,
5273 cx: &mut Context<Self>,
5274 ) -> oneshot::Receiver<anyhow::Result<()>> {
5275 let id = self.id;
5276 let updates_tx = self
5277 .git_store()
5278 .and_then(|git_store| match &git_store.read(cx).state {
5279 GitStoreState::Local { downstream, .. } => downstream
5280 .as_ref()
5281 .map(|downstream| downstream.updates_tx.clone()),
5282 _ => None,
5283 });
5284 let this = cx.weak_entity();
5285 self.send_job(None, move |git_repo, mut cx| async move {
5286 match git_repo {
5287 RepositoryState::Local(LocalRepositoryState {
5288 backend,
5289 environment,
5290 ..
5291 }) => {
5292 // TODO would be nice to not have to do this manually
5293 let result = backend.stash_drop(index, environment).await;
5294 if result.is_ok()
5295 && let Ok(stash_entries) = backend.stash_entries().await
5296 {
5297 let snapshot = this.update(&mut cx, |this, cx| {
5298 this.snapshot.stash_entries = stash_entries;
5299 cx.emit(RepositoryEvent::StashEntriesChanged);
5300 this.snapshot.clone()
5301 })?;
5302 if let Some(updates_tx) = updates_tx {
5303 updates_tx
5304 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5305 .ok();
5306 }
5307 }
5308
5309 result
5310 }
5311 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5312 client
5313 .request(proto::StashDrop {
5314 project_id: project_id.0,
5315 repository_id: id.to_proto(),
5316 stash_index: index.map(|i| i as u64),
5317 })
5318 .await
5319 .context("sending stash pop request")?;
5320 Ok(())
5321 }
5322 }
5323 })
5324 }
5325
5326 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5327 let id = self.id;
5328 self.send_job(
5329 Some(format!("git hook {}", hook.as_str()).into()),
5330 move |git_repo, _cx| async move {
5331 match git_repo {
5332 RepositoryState::Local(LocalRepositoryState {
5333 backend,
5334 environment,
5335 ..
5336 }) => backend.run_hook(hook, environment.clone()).await,
5337 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5338 client
5339 .request(proto::RunGitHook {
5340 project_id: project_id.0,
5341 repository_id: id.to_proto(),
5342 hook: hook.to_proto(),
5343 })
5344 .await?;
5345
5346 Ok(())
5347 }
5348 }
5349 },
5350 )
5351 }
5352
5353 pub fn commit(
5354 &mut self,
5355 message: SharedString,
5356 name_and_email: Option<(SharedString, SharedString)>,
5357 options: CommitOptions,
5358 askpass: AskPassDelegate,
5359 cx: &mut App,
5360 ) -> oneshot::Receiver<Result<()>> {
5361 let id = self.id;
5362 let askpass_delegates = self.askpass_delegates.clone();
5363 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5364
5365 let rx = self.run_hook(RunHook::PreCommit, cx);
5366
5367 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5368 rx.await??;
5369
5370 match git_repo {
5371 RepositoryState::Local(LocalRepositoryState {
5372 backend,
5373 environment,
5374 ..
5375 }) => {
5376 backend
5377 .commit(message, name_and_email, options, askpass, environment)
5378 .await
5379 }
5380 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5381 askpass_delegates.lock().insert(askpass_id, askpass);
5382 let _defer = util::defer(|| {
5383 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5384 debug_assert!(askpass_delegate.is_some());
5385 });
5386 let (name, email) = name_and_email.unzip();
5387 client
5388 .request(proto::Commit {
5389 project_id: project_id.0,
5390 repository_id: id.to_proto(),
5391 message: String::from(message),
5392 name: name.map(String::from),
5393 email: email.map(String::from),
5394 options: Some(proto::commit::CommitOptions {
5395 amend: options.amend,
5396 signoff: options.signoff,
5397 }),
5398 askpass_id,
5399 })
5400 .await?;
5401
5402 Ok(())
5403 }
5404 }
5405 })
5406 }
5407
5408 pub fn fetch(
5409 &mut self,
5410 fetch_options: FetchOptions,
5411 askpass: AskPassDelegate,
5412 _cx: &mut App,
5413 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5414 let askpass_delegates = self.askpass_delegates.clone();
5415 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5416 let id = self.id;
5417
5418 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5419 match git_repo {
5420 RepositoryState::Local(LocalRepositoryState {
5421 backend,
5422 environment,
5423 ..
5424 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5425 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5426 askpass_delegates.lock().insert(askpass_id, askpass);
5427 let _defer = util::defer(|| {
5428 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5429 debug_assert!(askpass_delegate.is_some());
5430 });
5431
5432 let response = client
5433 .request(proto::Fetch {
5434 project_id: project_id.0,
5435 repository_id: id.to_proto(),
5436 askpass_id,
5437 remote: fetch_options.to_proto(),
5438 })
5439 .await?;
5440
5441 Ok(RemoteCommandOutput {
5442 stdout: response.stdout,
5443 stderr: response.stderr,
5444 })
5445 }
5446 }
5447 })
5448 }
5449
5450 pub fn push(
5451 &mut self,
5452 branch: SharedString,
5453 remote_branch: SharedString,
5454 remote: SharedString,
5455 options: Option<PushOptions>,
5456 askpass: AskPassDelegate,
5457 cx: &mut Context<Self>,
5458 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5459 let askpass_delegates = self.askpass_delegates.clone();
5460 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5461 let id = self.id;
5462
5463 let args = options
5464 .map(|option| match option {
5465 PushOptions::SetUpstream => " --set-upstream",
5466 PushOptions::Force => " --force-with-lease",
5467 })
5468 .unwrap_or("");
5469
5470 let updates_tx = self
5471 .git_store()
5472 .and_then(|git_store| match &git_store.read(cx).state {
5473 GitStoreState::Local { downstream, .. } => downstream
5474 .as_ref()
5475 .map(|downstream| downstream.updates_tx.clone()),
5476 _ => None,
5477 });
5478
5479 let this = cx.weak_entity();
5480 self.send_job(
5481 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5482 move |git_repo, mut cx| async move {
5483 match git_repo {
5484 RepositoryState::Local(LocalRepositoryState {
5485 backend,
5486 environment,
5487 ..
5488 }) => {
5489 let result = backend
5490 .push(
5491 branch.to_string(),
5492 remote_branch.to_string(),
5493 remote.to_string(),
5494 options,
5495 askpass,
5496 environment.clone(),
5497 cx.clone(),
5498 )
5499 .await;
5500 // TODO would be nice to not have to do this manually
5501 if result.is_ok() {
5502 let branches = backend.branches().await?;
5503 let branch = branches.into_iter().find(|branch| branch.is_head);
5504 log::info!("head branch after scan is {branch:?}");
5505 let snapshot = this.update(&mut cx, |this, cx| {
5506 this.snapshot.branch = branch;
5507 cx.emit(RepositoryEvent::BranchChanged);
5508 this.snapshot.clone()
5509 })?;
5510 if let Some(updates_tx) = updates_tx {
5511 updates_tx
5512 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5513 .ok();
5514 }
5515 }
5516 result
5517 }
5518 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5519 askpass_delegates.lock().insert(askpass_id, askpass);
5520 let _defer = util::defer(|| {
5521 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5522 debug_assert!(askpass_delegate.is_some());
5523 });
5524 let response = client
5525 .request(proto::Push {
5526 project_id: project_id.0,
5527 repository_id: id.to_proto(),
5528 askpass_id,
5529 branch_name: branch.to_string(),
5530 remote_branch_name: remote_branch.to_string(),
5531 remote_name: remote.to_string(),
5532 options: options.map(|options| match options {
5533 PushOptions::Force => proto::push::PushOptions::Force,
5534 PushOptions::SetUpstream => {
5535 proto::push::PushOptions::SetUpstream
5536 }
5537 }
5538 as i32),
5539 })
5540 .await?;
5541
5542 Ok(RemoteCommandOutput {
5543 stdout: response.stdout,
5544 stderr: response.stderr,
5545 })
5546 }
5547 }
5548 },
5549 )
5550 }
5551
5552 pub fn pull(
5553 &mut self,
5554 branch: Option<SharedString>,
5555 remote: SharedString,
5556 rebase: bool,
5557 askpass: AskPassDelegate,
5558 _cx: &mut App,
5559 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5560 let askpass_delegates = self.askpass_delegates.clone();
5561 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5562 let id = self.id;
5563
5564 let mut status = "git pull".to_string();
5565 if rebase {
5566 status.push_str(" --rebase");
5567 }
5568 status.push_str(&format!(" {}", remote));
5569 if let Some(b) = &branch {
5570 status.push_str(&format!(" {}", b));
5571 }
5572
5573 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5574 match git_repo {
5575 RepositoryState::Local(LocalRepositoryState {
5576 backend,
5577 environment,
5578 ..
5579 }) => {
5580 backend
5581 .pull(
5582 branch.as_ref().map(|b| b.to_string()),
5583 remote.to_string(),
5584 rebase,
5585 askpass,
5586 environment.clone(),
5587 cx,
5588 )
5589 .await
5590 }
5591 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5592 askpass_delegates.lock().insert(askpass_id, askpass);
5593 let _defer = util::defer(|| {
5594 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5595 debug_assert!(askpass_delegate.is_some());
5596 });
5597 let response = client
5598 .request(proto::Pull {
5599 project_id: project_id.0,
5600 repository_id: id.to_proto(),
5601 askpass_id,
5602 rebase,
5603 branch_name: branch.as_ref().map(|b| b.to_string()),
5604 remote_name: remote.to_string(),
5605 })
5606 .await?;
5607
5608 Ok(RemoteCommandOutput {
5609 stdout: response.stdout,
5610 stderr: response.stderr,
5611 })
5612 }
5613 }
5614 })
5615 }
5616
5617 fn spawn_set_index_text_job(
5618 &mut self,
5619 path: RepoPath,
5620 content: Option<String>,
5621 hunk_staging_operation_count: Option<usize>,
5622 cx: &mut Context<Self>,
5623 ) -> oneshot::Receiver<anyhow::Result<()>> {
5624 let id = self.id;
5625 let this = cx.weak_entity();
5626 let git_store = self.git_store.clone();
5627 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5628 self.send_keyed_job(
5629 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5630 None,
5631 move |git_repo, mut cx| async move {
5632 log::debug!(
5633 "start updating index text for buffer {}",
5634 path.as_unix_str()
5635 );
5636
5637 match git_repo {
5638 RepositoryState::Local(LocalRepositoryState {
5639 fs,
5640 backend,
5641 environment,
5642 ..
5643 }) => {
5644 let executable = match fs.metadata(&abs_path).await {
5645 Ok(Some(meta)) => meta.is_executable,
5646 Ok(None) => false,
5647 Err(_err) => false,
5648 };
5649 backend
5650 .set_index_text(path.clone(), content, environment.clone(), executable)
5651 .await?;
5652 }
5653 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5654 client
5655 .request(proto::SetIndexText {
5656 project_id: project_id.0,
5657 repository_id: id.to_proto(),
5658 path: path.to_proto(),
5659 text: content,
5660 })
5661 .await?;
5662 }
5663 }
5664 log::debug!(
5665 "finish updating index text for buffer {}",
5666 path.as_unix_str()
5667 );
5668
5669 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5670 let project_path = this
5671 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5672 .ok()
5673 .flatten();
5674 git_store
5675 .update(&mut cx, |git_store, cx| {
5676 let buffer_id = git_store
5677 .buffer_store
5678 .read(cx)
5679 .get_by_path(&project_path?)?
5680 .read(cx)
5681 .remote_id();
5682 let diff_state = git_store.diffs.get(&buffer_id)?;
5683 diff_state.update(cx, |diff_state, _| {
5684 diff_state.hunk_staging_operation_count_as_of_write =
5685 hunk_staging_operation_count;
5686 });
5687 Some(())
5688 })
5689 .context("Git store dropped")?;
5690 }
5691 Ok(())
5692 },
5693 )
5694 }
5695
5696 pub fn create_remote(
5697 &mut self,
5698 remote_name: String,
5699 remote_url: String,
5700 ) -> oneshot::Receiver<Result<()>> {
5701 let id = self.id;
5702 self.send_job(
5703 Some(format!("git remote add {remote_name} {remote_url}").into()),
5704 move |repo, _cx| async move {
5705 match repo {
5706 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5707 backend.create_remote(remote_name, remote_url).await
5708 }
5709 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5710 client
5711 .request(proto::GitCreateRemote {
5712 project_id: project_id.0,
5713 repository_id: id.to_proto(),
5714 remote_name,
5715 remote_url,
5716 })
5717 .await?;
5718
5719 Ok(())
5720 }
5721 }
5722 },
5723 )
5724 }
5725
5726 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5727 let id = self.id;
5728 self.send_job(
5729 Some(format!("git remove remote {remote_name}").into()),
5730 move |repo, _cx| async move {
5731 match repo {
5732 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5733 backend.remove_remote(remote_name).await
5734 }
5735 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5736 client
5737 .request(proto::GitRemoveRemote {
5738 project_id: project_id.0,
5739 repository_id: id.to_proto(),
5740 remote_name,
5741 })
5742 .await?;
5743
5744 Ok(())
5745 }
5746 }
5747 },
5748 )
5749 }
5750
5751 pub fn get_remotes(
5752 &mut self,
5753 branch_name: Option<String>,
5754 is_push: bool,
5755 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5756 let id = self.id;
5757 self.send_job(None, move |repo, _cx| async move {
5758 match repo {
5759 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5760 let remote = if let Some(branch_name) = branch_name {
5761 if is_push {
5762 backend.get_push_remote(branch_name).await?
5763 } else {
5764 backend.get_branch_remote(branch_name).await?
5765 }
5766 } else {
5767 None
5768 };
5769
5770 match remote {
5771 Some(remote) => Ok(vec![remote]),
5772 None => backend.get_all_remotes().await,
5773 }
5774 }
5775 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5776 let response = client
5777 .request(proto::GetRemotes {
5778 project_id: project_id.0,
5779 repository_id: id.to_proto(),
5780 branch_name,
5781 is_push,
5782 })
5783 .await?;
5784
5785 let remotes = response
5786 .remotes
5787 .into_iter()
5788 .map(|remotes| Remote {
5789 name: remotes.name.into(),
5790 })
5791 .collect();
5792
5793 Ok(remotes)
5794 }
5795 }
5796 })
5797 }
5798
5799 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5800 let id = self.id;
5801 self.send_job(None, move |repo, _| async move {
5802 match repo {
5803 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5804 backend.branches().await
5805 }
5806 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5807 let response = client
5808 .request(proto::GitGetBranches {
5809 project_id: project_id.0,
5810 repository_id: id.to_proto(),
5811 })
5812 .await?;
5813
5814 let branches = response
5815 .branches
5816 .into_iter()
5817 .map(|branch| proto_to_branch(&branch))
5818 .collect();
5819
5820 Ok(branches)
5821 }
5822 }
5823 })
5824 }
5825
5826 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5827 /// returns the pathed for the linked worktree.
5828 ///
5829 /// Returns None if this is the main checkout.
5830 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5831 if self.work_directory_abs_path != self.original_repo_abs_path {
5832 Some(&self.work_directory_abs_path)
5833 } else {
5834 None
5835 }
5836 }
5837
5838 pub fn path_for_new_linked_worktree(
5839 &self,
5840 branch_name: &str,
5841 worktree_directory_setting: &str,
5842 ) -> Result<PathBuf> {
5843 let original_repo = self.original_repo_abs_path.clone();
5844 let project_name = original_repo
5845 .file_name()
5846 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5847 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5848 Ok(directory.join(branch_name).join(project_name))
5849 }
5850
5851 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5852 let id = self.id;
5853 self.send_job(None, move |repo, _| async move {
5854 match repo {
5855 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5856 backend.worktrees().await
5857 }
5858 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5859 let response = client
5860 .request(proto::GitGetWorktrees {
5861 project_id: project_id.0,
5862 repository_id: id.to_proto(),
5863 })
5864 .await?;
5865
5866 let worktrees = response
5867 .worktrees
5868 .into_iter()
5869 .map(|worktree| proto_to_worktree(&worktree))
5870 .collect();
5871
5872 Ok(worktrees)
5873 }
5874 }
5875 })
5876 }
5877
5878 pub fn create_worktree(
5879 &mut self,
5880 branch_name: String,
5881 path: PathBuf,
5882 commit: Option<String>,
5883 ) -> oneshot::Receiver<Result<()>> {
5884 let id = self.id;
5885 self.send_job(
5886 Some(format!("git worktree add: {}", branch_name).into()),
5887 move |repo, _cx| async move {
5888 match repo {
5889 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5890 backend.create_worktree(branch_name, path, commit).await
5891 }
5892 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5893 client
5894 .request(proto::GitCreateWorktree {
5895 project_id: project_id.0,
5896 repository_id: id.to_proto(),
5897 name: branch_name,
5898 directory: path.to_string_lossy().to_string(),
5899 commit,
5900 })
5901 .await?;
5902
5903 Ok(())
5904 }
5905 }
5906 },
5907 )
5908 }
5909
5910 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5911 let id = self.id;
5912 self.send_job(
5913 Some(format!("git worktree remove: {}", path.display()).into()),
5914 move |repo, _cx| async move {
5915 match repo {
5916 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5917 backend.remove_worktree(path, force).await
5918 }
5919 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5920 client
5921 .request(proto::GitRemoveWorktree {
5922 project_id: project_id.0,
5923 repository_id: id.to_proto(),
5924 path: path.to_string_lossy().to_string(),
5925 force,
5926 })
5927 .await?;
5928
5929 Ok(())
5930 }
5931 }
5932 },
5933 )
5934 }
5935
5936 pub fn rename_worktree(
5937 &mut self,
5938 old_path: PathBuf,
5939 new_path: PathBuf,
5940 ) -> oneshot::Receiver<Result<()>> {
5941 let id = self.id;
5942 self.send_job(
5943 Some(format!("git worktree move: {}", old_path.display()).into()),
5944 move |repo, _cx| async move {
5945 match repo {
5946 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5947 backend.rename_worktree(old_path, new_path).await
5948 }
5949 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5950 client
5951 .request(proto::GitRenameWorktree {
5952 project_id: project_id.0,
5953 repository_id: id.to_proto(),
5954 old_path: old_path.to_string_lossy().to_string(),
5955 new_path: new_path.to_string_lossy().to_string(),
5956 })
5957 .await?;
5958
5959 Ok(())
5960 }
5961 }
5962 },
5963 )
5964 }
5965
5966 pub fn default_branch(
5967 &mut self,
5968 include_remote_name: bool,
5969 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5970 let id = self.id;
5971 self.send_job(None, move |repo, _| async move {
5972 match repo {
5973 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5974 backend.default_branch(include_remote_name).await
5975 }
5976 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5977 let response = client
5978 .request(proto::GetDefaultBranch {
5979 project_id: project_id.0,
5980 repository_id: id.to_proto(),
5981 })
5982 .await?;
5983
5984 anyhow::Ok(response.branch.map(SharedString::from))
5985 }
5986 }
5987 })
5988 }
5989
5990 pub fn diff_tree(
5991 &mut self,
5992 diff_type: DiffTreeType,
5993 _cx: &App,
5994 ) -> oneshot::Receiver<Result<TreeDiff>> {
5995 let repository_id = self.snapshot.id;
5996 self.send_job(None, move |repo, _cx| async move {
5997 match repo {
5998 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5999 backend.diff_tree(diff_type).await
6000 }
6001 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6002 let response = client
6003 .request(proto::GetTreeDiff {
6004 project_id: project_id.0,
6005 repository_id: repository_id.0,
6006 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6007 base: diff_type.base().to_string(),
6008 head: diff_type.head().to_string(),
6009 })
6010 .await?;
6011
6012 let entries = response
6013 .entries
6014 .into_iter()
6015 .filter_map(|entry| {
6016 let status = match entry.status() {
6017 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6018 proto::tree_diff_status::Status::Modified => {
6019 TreeDiffStatus::Modified {
6020 old: git::Oid::from_str(
6021 &entry.oid.context("missing oid").log_err()?,
6022 )
6023 .log_err()?,
6024 }
6025 }
6026 proto::tree_diff_status::Status::Deleted => {
6027 TreeDiffStatus::Deleted {
6028 old: git::Oid::from_str(
6029 &entry.oid.context("missing oid").log_err()?,
6030 )
6031 .log_err()?,
6032 }
6033 }
6034 };
6035 Some((
6036 RepoPath::from_rel_path(
6037 &RelPath::from_proto(&entry.path).log_err()?,
6038 ),
6039 status,
6040 ))
6041 })
6042 .collect();
6043
6044 Ok(TreeDiff { entries })
6045 }
6046 }
6047 })
6048 }
6049
6050 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6051 let id = self.id;
6052 self.send_job(None, move |repo, _cx| async move {
6053 match repo {
6054 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6055 backend.diff(diff_type).await
6056 }
6057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6058 let (proto_diff_type, merge_base_ref) = match &diff_type {
6059 DiffType::HeadToIndex => {
6060 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6061 }
6062 DiffType::HeadToWorktree => {
6063 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6064 }
6065 DiffType::MergeBase { base_ref } => (
6066 proto::git_diff::DiffType::MergeBase.into(),
6067 Some(base_ref.to_string()),
6068 ),
6069 };
6070 let response = client
6071 .request(proto::GitDiff {
6072 project_id: project_id.0,
6073 repository_id: id.to_proto(),
6074 diff_type: proto_diff_type,
6075 merge_base_ref,
6076 })
6077 .await?;
6078
6079 Ok(response.diff)
6080 }
6081 }
6082 })
6083 }
6084
6085 pub fn create_branch(
6086 &mut self,
6087 branch_name: String,
6088 base_branch: Option<String>,
6089 ) -> oneshot::Receiver<Result<()>> {
6090 let id = self.id;
6091 let status_msg = if let Some(ref base) = base_branch {
6092 format!("git switch -c {branch_name} {base}").into()
6093 } else {
6094 format!("git switch -c {branch_name}").into()
6095 };
6096 self.send_job(Some(status_msg), move |repo, _cx| async move {
6097 match repo {
6098 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6099 backend.create_branch(branch_name, base_branch).await
6100 }
6101 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6102 client
6103 .request(proto::GitCreateBranch {
6104 project_id: project_id.0,
6105 repository_id: id.to_proto(),
6106 branch_name,
6107 })
6108 .await?;
6109
6110 Ok(())
6111 }
6112 }
6113 })
6114 }
6115
6116 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6117 let id = self.id;
6118 self.send_job(
6119 Some(format!("git switch {branch_name}").into()),
6120 move |repo, _cx| async move {
6121 match repo {
6122 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6123 backend.change_branch(branch_name).await
6124 }
6125 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6126 client
6127 .request(proto::GitChangeBranch {
6128 project_id: project_id.0,
6129 repository_id: id.to_proto(),
6130 branch_name,
6131 })
6132 .await?;
6133
6134 Ok(())
6135 }
6136 }
6137 },
6138 )
6139 }
6140
6141 pub fn delete_branch(
6142 &mut self,
6143 is_remote: bool,
6144 branch_name: String,
6145 ) -> oneshot::Receiver<Result<()>> {
6146 let id = self.id;
6147 self.send_job(
6148 Some(
6149 format!(
6150 "git branch {} {}",
6151 if is_remote { "-dr" } else { "-d" },
6152 branch_name
6153 )
6154 .into(),
6155 ),
6156 move |repo, _cx| async move {
6157 match repo {
6158 RepositoryState::Local(state) => {
6159 state.backend.delete_branch(is_remote, branch_name).await
6160 }
6161 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6162 client
6163 .request(proto::GitDeleteBranch {
6164 project_id: project_id.0,
6165 repository_id: id.to_proto(),
6166 is_remote,
6167 branch_name,
6168 })
6169 .await?;
6170
6171 Ok(())
6172 }
6173 }
6174 },
6175 )
6176 }
6177
6178 pub fn rename_branch(
6179 &mut self,
6180 branch: String,
6181 new_name: String,
6182 ) -> oneshot::Receiver<Result<()>> {
6183 let id = self.id;
6184 self.send_job(
6185 Some(format!("git branch -m {branch} {new_name}").into()),
6186 move |repo, _cx| async move {
6187 match repo {
6188 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6189 backend.rename_branch(branch, new_name).await
6190 }
6191 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6192 client
6193 .request(proto::GitRenameBranch {
6194 project_id: project_id.0,
6195 repository_id: id.to_proto(),
6196 branch,
6197 new_name,
6198 })
6199 .await?;
6200
6201 Ok(())
6202 }
6203 }
6204 },
6205 )
6206 }
6207
6208 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6209 let id = self.id;
6210 self.send_job(None, move |repo, _cx| async move {
6211 match repo {
6212 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6213 backend.check_for_pushed_commit().await
6214 }
6215 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6216 let response = client
6217 .request(proto::CheckForPushedCommits {
6218 project_id: project_id.0,
6219 repository_id: id.to_proto(),
6220 })
6221 .await?;
6222
6223 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6224
6225 Ok(branches)
6226 }
6227 }
6228 })
6229 }
6230
6231 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6232 self.send_job(None, |repo, _cx| async move {
6233 match repo {
6234 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6235 backend.checkpoint().await
6236 }
6237 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6238 }
6239 })
6240 }
6241
6242 pub fn restore_checkpoint(
6243 &mut self,
6244 checkpoint: GitRepositoryCheckpoint,
6245 ) -> oneshot::Receiver<Result<()>> {
6246 self.send_job(None, move |repo, _cx| async move {
6247 match repo {
6248 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6249 backend.restore_checkpoint(checkpoint).await
6250 }
6251 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6252 }
6253 })
6254 }
6255
6256 pub(crate) fn apply_remote_update(
6257 &mut self,
6258 update: proto::UpdateRepository,
6259 cx: &mut Context<Self>,
6260 ) -> Result<()> {
6261 if let Some(main_path) = &update.original_repo_abs_path {
6262 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6263 }
6264
6265 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6266 let new_head_commit = update
6267 .head_commit_details
6268 .as_ref()
6269 .map(proto_to_commit_details);
6270 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6271 cx.emit(RepositoryEvent::BranchChanged)
6272 }
6273 self.snapshot.branch = new_branch;
6274 self.snapshot.head_commit = new_head_commit;
6275
6276 // We don't store any merge head state for downstream projects; the upstream
6277 // will track it and we will just get the updated conflicts
6278 let new_merge_heads = TreeMap::from_ordered_entries(
6279 update
6280 .current_merge_conflicts
6281 .into_iter()
6282 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6283 );
6284 let conflicts_changed =
6285 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6286 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6287 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6288 let new_stash_entries = GitStash {
6289 entries: update
6290 .stash_entries
6291 .iter()
6292 .filter_map(|entry| proto_to_stash(entry).ok())
6293 .collect(),
6294 };
6295 if self.snapshot.stash_entries != new_stash_entries {
6296 cx.emit(RepositoryEvent::StashEntriesChanged)
6297 }
6298 self.snapshot.stash_entries = new_stash_entries;
6299 let new_linked_worktrees: Arc<[GitWorktree]> = update
6300 .linked_worktrees
6301 .iter()
6302 .map(proto_to_worktree)
6303 .collect();
6304 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6305 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6306 }
6307 self.snapshot.linked_worktrees = new_linked_worktrees;
6308 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6309 self.snapshot.remote_origin_url = update.remote_origin_url;
6310
6311 let edits = update
6312 .removed_statuses
6313 .into_iter()
6314 .filter_map(|path| {
6315 Some(sum_tree::Edit::Remove(PathKey(
6316 RelPath::from_proto(&path).log_err()?,
6317 )))
6318 })
6319 .chain(
6320 update
6321 .updated_statuses
6322 .into_iter()
6323 .filter_map(|updated_status| {
6324 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6325 }),
6326 )
6327 .collect::<Vec<_>>();
6328 if conflicts_changed || !edits.is_empty() {
6329 cx.emit(RepositoryEvent::StatusesChanged);
6330 }
6331 self.snapshot.statuses_by_path.edit(edits, ());
6332
6333 if update.is_last_update {
6334 self.snapshot.scan_id = update.scan_id;
6335 }
6336 self.clear_pending_ops(cx);
6337 Ok(())
6338 }
6339
6340 pub fn compare_checkpoints(
6341 &mut self,
6342 left: GitRepositoryCheckpoint,
6343 right: GitRepositoryCheckpoint,
6344 ) -> oneshot::Receiver<Result<bool>> {
6345 self.send_job(None, move |repo, _cx| async move {
6346 match repo {
6347 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6348 backend.compare_checkpoints(left, right).await
6349 }
6350 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6351 }
6352 })
6353 }
6354
6355 pub fn diff_checkpoints(
6356 &mut self,
6357 base_checkpoint: GitRepositoryCheckpoint,
6358 target_checkpoint: GitRepositoryCheckpoint,
6359 ) -> oneshot::Receiver<Result<String>> {
6360 self.send_job(None, move |repo, _cx| async move {
6361 match repo {
6362 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6363 backend
6364 .diff_checkpoints(base_checkpoint, target_checkpoint)
6365 .await
6366 }
6367 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6368 }
6369 })
6370 }
6371
6372 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6373 let updated = SumTree::from_iter(
6374 self.pending_ops.iter().filter_map(|ops| {
6375 let inner_ops: Vec<PendingOp> =
6376 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6377 if inner_ops.is_empty() {
6378 None
6379 } else {
6380 Some(PendingOps {
6381 repo_path: ops.repo_path.clone(),
6382 ops: inner_ops,
6383 })
6384 }
6385 }),
6386 (),
6387 );
6388
6389 if updated != self.pending_ops {
6390 cx.emit(RepositoryEvent::PendingOpsChanged {
6391 pending_ops: self.pending_ops.clone(),
6392 })
6393 }
6394
6395 self.pending_ops = updated;
6396 }
6397
6398 fn schedule_scan(
6399 &mut self,
6400 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6401 cx: &mut Context<Self>,
6402 ) {
6403 let this = cx.weak_entity();
6404 let _ = self.send_keyed_job(
6405 Some(GitJobKey::ReloadGitState),
6406 None,
6407 |state, mut cx| async move {
6408 log::debug!("run scheduled git status scan");
6409
6410 let Some(this) = this.upgrade() else {
6411 return Ok(());
6412 };
6413 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6414 bail!("not a local repository")
6415 };
6416 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6417 this.update(&mut cx, |this, cx| {
6418 this.clear_pending_ops(cx);
6419 });
6420 if let Some(updates_tx) = updates_tx {
6421 updates_tx
6422 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6423 .ok();
6424 }
6425 Ok(())
6426 },
6427 );
6428 }
6429
6430 fn spawn_local_git_worker(
6431 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6432 cx: &mut Context<Self>,
6433 ) -> mpsc::UnboundedSender<GitJob> {
6434 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6435
6436 cx.spawn(async move |_, cx| {
6437 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6438 if let Some(git_hosting_provider_registry) =
6439 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6440 {
6441 git_hosting_providers::register_additional_providers(
6442 git_hosting_provider_registry,
6443 state.backend.clone(),
6444 )
6445 .await;
6446 }
6447 let state = RepositoryState::Local(state);
6448 let mut jobs = VecDeque::new();
6449 loop {
6450 while let Ok(Some(next_job)) = job_rx.try_next() {
6451 jobs.push_back(next_job);
6452 }
6453
6454 if let Some(job) = jobs.pop_front() {
6455 if let Some(current_key) = &job.key
6456 && jobs
6457 .iter()
6458 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6459 {
6460 continue;
6461 }
6462 (job.job)(state.clone(), cx).await;
6463 } else if let Some(job) = job_rx.next().await {
6464 jobs.push_back(job);
6465 } else {
6466 break;
6467 }
6468 }
6469 anyhow::Ok(())
6470 })
6471 .detach_and_log_err(cx);
6472
6473 job_tx
6474 }
6475
6476 fn spawn_remote_git_worker(
6477 state: RemoteRepositoryState,
6478 cx: &mut Context<Self>,
6479 ) -> mpsc::UnboundedSender<GitJob> {
6480 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6481
6482 cx.spawn(async move |_, cx| {
6483 let state = RepositoryState::Remote(state);
6484 let mut jobs = VecDeque::new();
6485 loop {
6486 while let Ok(Some(next_job)) = job_rx.try_next() {
6487 jobs.push_back(next_job);
6488 }
6489
6490 if let Some(job) = jobs.pop_front() {
6491 if let Some(current_key) = &job.key
6492 && jobs
6493 .iter()
6494 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6495 {
6496 continue;
6497 }
6498 (job.job)(state.clone(), cx).await;
6499 } else if let Some(job) = job_rx.next().await {
6500 jobs.push_back(job);
6501 } else {
6502 break;
6503 }
6504 }
6505 anyhow::Ok(())
6506 })
6507 .detach_and_log_err(cx);
6508
6509 job_tx
6510 }
6511
6512 fn load_staged_text(
6513 &mut self,
6514 buffer_id: BufferId,
6515 repo_path: RepoPath,
6516 cx: &App,
6517 ) -> Task<Result<Option<String>>> {
6518 let rx = self.send_job(None, move |state, _| async move {
6519 match state {
6520 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6521 anyhow::Ok(backend.load_index_text(repo_path).await)
6522 }
6523 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6524 let response = client
6525 .request(proto::OpenUnstagedDiff {
6526 project_id: project_id.to_proto(),
6527 buffer_id: buffer_id.to_proto(),
6528 })
6529 .await?;
6530 Ok(response.staged_text)
6531 }
6532 }
6533 });
6534 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6535 }
6536
6537 fn load_committed_text(
6538 &mut self,
6539 buffer_id: BufferId,
6540 repo_path: RepoPath,
6541 cx: &App,
6542 ) -> Task<Result<DiffBasesChange>> {
6543 let rx = self.send_job(None, move |state, _| async move {
6544 match state {
6545 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6546 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6547 let staged_text = backend.load_index_text(repo_path).await;
6548 let diff_bases_change = if committed_text == staged_text {
6549 DiffBasesChange::SetBoth(committed_text)
6550 } else {
6551 DiffBasesChange::SetEach {
6552 index: staged_text,
6553 head: committed_text,
6554 }
6555 };
6556 anyhow::Ok(diff_bases_change)
6557 }
6558 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6559 use proto::open_uncommitted_diff_response::Mode;
6560
6561 let response = client
6562 .request(proto::OpenUncommittedDiff {
6563 project_id: project_id.to_proto(),
6564 buffer_id: buffer_id.to_proto(),
6565 })
6566 .await?;
6567 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6568 let bases = match mode {
6569 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6570 Mode::IndexAndHead => DiffBasesChange::SetEach {
6571 head: response.committed_text,
6572 index: response.staged_text,
6573 },
6574 };
6575 Ok(bases)
6576 }
6577 }
6578 });
6579
6580 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6581 }
6582
6583 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6584 let repository_id = self.snapshot.id;
6585 let rx = self.send_job(None, move |state, _| async move {
6586 match state {
6587 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6588 backend.load_blob_content(oid).await
6589 }
6590 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6591 let response = client
6592 .request(proto::GetBlobContent {
6593 project_id: project_id.to_proto(),
6594 repository_id: repository_id.0,
6595 oid: oid.to_string(),
6596 })
6597 .await?;
6598 Ok(response.content)
6599 }
6600 }
6601 });
6602 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6603 }
6604
6605 fn paths_changed(
6606 &mut self,
6607 paths: Vec<RepoPath>,
6608 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6609 cx: &mut Context<Self>,
6610 ) {
6611 if !paths.is_empty() {
6612 self.paths_needing_status_update.push(paths);
6613 }
6614
6615 let this = cx.weak_entity();
6616 let _ = self.send_keyed_job(
6617 Some(GitJobKey::RefreshStatuses),
6618 None,
6619 |state, mut cx| async move {
6620 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6621 (
6622 this.snapshot.clone(),
6623 mem::take(&mut this.paths_needing_status_update),
6624 )
6625 })?;
6626 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6627 bail!("not a local repository")
6628 };
6629
6630 if changed_paths.is_empty() {
6631 return Ok(());
6632 }
6633
6634 let has_head = prev_snapshot.head_commit.is_some();
6635
6636 let stash_entries = backend.stash_entries().await?;
6637 let changed_path_statuses = cx
6638 .background_spawn(async move {
6639 let mut changed_paths =
6640 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6641 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6642
6643 let status_task = backend.status(&changed_paths_vec);
6644 let diff_stat_future = if has_head {
6645 backend.diff_stat(&changed_paths_vec)
6646 } else {
6647 future::ready(Ok(status::GitDiffStat {
6648 entries: Arc::default(),
6649 }))
6650 .boxed()
6651 };
6652
6653 let (statuses, diff_stats) =
6654 futures::future::try_join(status_task, diff_stat_future).await?;
6655
6656 let diff_stats: HashMap<RepoPath, DiffStat> =
6657 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6658
6659 let mut changed_path_statuses = Vec::new();
6660 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6661 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6662
6663 for (repo_path, status) in &*statuses.entries {
6664 let current_diff_stat = diff_stats.get(repo_path).copied();
6665
6666 changed_paths.remove(repo_path);
6667 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6668 && cursor.item().is_some_and(|entry| {
6669 entry.status == *status && entry.diff_stat == current_diff_stat
6670 })
6671 {
6672 continue;
6673 }
6674
6675 changed_path_statuses.push(Edit::Insert(StatusEntry {
6676 repo_path: repo_path.clone(),
6677 status: *status,
6678 diff_stat: current_diff_stat,
6679 }));
6680 }
6681 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6682 for path in changed_paths.into_iter() {
6683 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6684 changed_path_statuses
6685 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6686 }
6687 }
6688 anyhow::Ok(changed_path_statuses)
6689 })
6690 .await?;
6691
6692 this.update(&mut cx, |this, cx| {
6693 if this.snapshot.stash_entries != stash_entries {
6694 cx.emit(RepositoryEvent::StashEntriesChanged);
6695 this.snapshot.stash_entries = stash_entries;
6696 }
6697
6698 if !changed_path_statuses.is_empty() {
6699 cx.emit(RepositoryEvent::StatusesChanged);
6700 this.snapshot
6701 .statuses_by_path
6702 .edit(changed_path_statuses, ());
6703 this.snapshot.scan_id += 1;
6704 }
6705
6706 if let Some(updates_tx) = updates_tx {
6707 updates_tx
6708 .unbounded_send(DownstreamUpdate::UpdateRepository(
6709 this.snapshot.clone(),
6710 ))
6711 .ok();
6712 }
6713 })
6714 },
6715 );
6716 }
6717
6718 /// currently running git command and when it started
6719 pub fn current_job(&self) -> Option<JobInfo> {
6720 self.active_jobs.values().next().cloned()
6721 }
6722
6723 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6724 self.send_job(None, |_, _| async {})
6725 }
6726
6727 fn spawn_job_with_tracking<AsyncFn>(
6728 &mut self,
6729 paths: Vec<RepoPath>,
6730 git_status: pending_op::GitStatus,
6731 cx: &mut Context<Self>,
6732 f: AsyncFn,
6733 ) -> Task<Result<()>>
6734 where
6735 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6736 {
6737 let ids = self.new_pending_ops_for_paths(paths, git_status);
6738
6739 cx.spawn(async move |this, cx| {
6740 let (job_status, result) = match f(this.clone(), cx).await {
6741 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6742 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6743 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6744 };
6745
6746 this.update(cx, |this, _| {
6747 let mut edits = Vec::with_capacity(ids.len());
6748 for (id, entry) in ids {
6749 if let Some(mut ops) = this
6750 .pending_ops
6751 .get(&PathKey(entry.as_ref().clone()), ())
6752 .cloned()
6753 {
6754 if let Some(op) = ops.op_by_id_mut(id) {
6755 op.job_status = job_status;
6756 }
6757 edits.push(sum_tree::Edit::Insert(ops));
6758 }
6759 }
6760 this.pending_ops.edit(edits, ());
6761 })?;
6762
6763 result
6764 })
6765 }
6766
6767 fn new_pending_ops_for_paths(
6768 &mut self,
6769 paths: Vec<RepoPath>,
6770 git_status: pending_op::GitStatus,
6771 ) -> Vec<(PendingOpId, RepoPath)> {
6772 let mut edits = Vec::with_capacity(paths.len());
6773 let mut ids = Vec::with_capacity(paths.len());
6774 for path in paths {
6775 let mut ops = self
6776 .pending_ops
6777 .get(&PathKey(path.as_ref().clone()), ())
6778 .cloned()
6779 .unwrap_or_else(|| PendingOps::new(&path));
6780 let id = ops.max_id() + 1;
6781 ops.ops.push(PendingOp {
6782 id,
6783 git_status,
6784 job_status: pending_op::JobStatus::Running,
6785 });
6786 edits.push(sum_tree::Edit::Insert(ops));
6787 ids.push((id, path));
6788 }
6789 self.pending_ops.edit(edits, ());
6790 ids
6791 }
6792 pub fn default_remote_url(&self) -> Option<String> {
6793 self.remote_upstream_url
6794 .clone()
6795 .or(self.remote_origin_url.clone())
6796 }
6797}
6798
6799/// If `path` is a git linked worktree checkout, resolves it to the main
6800/// repository's working directory path. Returns `None` if `path` is a normal
6801/// repository, not a git repo, or if resolution fails.
6802///
6803/// Resolution works by:
6804/// 1. Reading the `.git` file to get the `gitdir:` pointer
6805/// 2. Following that to the worktree-specific git directory
6806/// 3. Reading the `commondir` file to find the shared `.git` directory
6807/// 4. Deriving the main repo's working directory from the common dir
6808pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6809 let dot_git = path.join(".git");
6810 let metadata = fs.metadata(&dot_git).await.ok()??;
6811 if metadata.is_dir {
6812 return None; // Normal repo, not a linked worktree
6813 }
6814 // It's a .git file — parse the gitdir: pointer
6815 let content = fs.load(&dot_git).await.ok()?;
6816 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6817 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6818 // Read commondir to find the main .git directory
6819 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6820 let common_dir = fs
6821 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6822 .await
6823 .ok()?;
6824 Some(git::repository::original_repo_path_from_common_dir(
6825 &common_dir,
6826 ))
6827}
6828
6829/// Validates that the resolved worktree directory is acceptable:
6830/// - The setting must not be an absolute path.
6831/// - The resolved path must be either a subdirectory of the working
6832/// directory or a subdirectory of its parent (i.e., a sibling).
6833///
6834/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6835pub fn worktrees_directory_for_repo(
6836 original_repo_abs_path: &Path,
6837 worktree_directory_setting: &str,
6838) -> Result<PathBuf> {
6839 // Check the original setting before trimming, since a path like "///"
6840 // is absolute but becomes "" after stripping trailing separators.
6841 // Also check for leading `/` or `\` explicitly, because on Windows
6842 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6843 // would slip through even though it's clearly not a relative path.
6844 if Path::new(worktree_directory_setting).is_absolute()
6845 || worktree_directory_setting.starts_with('/')
6846 || worktree_directory_setting.starts_with('\\')
6847 {
6848 anyhow::bail!(
6849 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6850 );
6851 }
6852
6853 if worktree_directory_setting.is_empty() {
6854 anyhow::bail!("git.worktree_directory must not be empty");
6855 }
6856
6857 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6858 if trimmed == ".." {
6859 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6860 }
6861
6862 let joined = original_repo_abs_path.join(trimmed);
6863 let resolved = util::normalize_path(&joined);
6864 let resolved = if resolved.starts_with(original_repo_abs_path) {
6865 resolved
6866 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
6867 resolved.join(repo_dir_name)
6868 } else {
6869 resolved
6870 };
6871
6872 let parent = original_repo_abs_path
6873 .parent()
6874 .unwrap_or(original_repo_abs_path);
6875
6876 if !resolved.starts_with(parent) {
6877 anyhow::bail!(
6878 "git.worktree_directory resolved to {resolved:?}, which is outside \
6879 the project root and its parent directory. It must resolve to a \
6880 subdirectory of {original_repo_abs_path:?} or a sibling of it."
6881 );
6882 }
6883
6884 Ok(resolved)
6885}
6886
6887/// Returns a short name for a linked worktree suitable for UI display
6888///
6889/// Uses the main worktree path to come up with a short name that disambiguates
6890/// the linked worktree from the main worktree.
6891pub fn linked_worktree_short_name(
6892 main_worktree_path: &Path,
6893 linked_worktree_path: &Path,
6894) -> Option<SharedString> {
6895 if main_worktree_path == linked_worktree_path {
6896 return None;
6897 }
6898
6899 let project_name = main_worktree_path.file_name()?.to_str()?;
6900 let directory_name = linked_worktree_path.file_name()?.to_str()?;
6901 let name = if directory_name != project_name {
6902 directory_name.to_string()
6903 } else {
6904 linked_worktree_path
6905 .parent()?
6906 .file_name()?
6907 .to_str()?
6908 .to_string()
6909 };
6910 Some(name.into())
6911}
6912
6913fn get_permalink_in_rust_registry_src(
6914 provider_registry: Arc<GitHostingProviderRegistry>,
6915 path: PathBuf,
6916 selection: Range<u32>,
6917) -> Result<url::Url> {
6918 #[derive(Deserialize)]
6919 struct CargoVcsGit {
6920 sha1: String,
6921 }
6922
6923 #[derive(Deserialize)]
6924 struct CargoVcsInfo {
6925 git: CargoVcsGit,
6926 path_in_vcs: String,
6927 }
6928
6929 #[derive(Deserialize)]
6930 struct CargoPackage {
6931 repository: String,
6932 }
6933
6934 #[derive(Deserialize)]
6935 struct CargoToml {
6936 package: CargoPackage,
6937 }
6938
6939 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6940 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6941 Some((dir, json))
6942 }) else {
6943 bail!("No .cargo_vcs_info.json found in parent directories")
6944 };
6945 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6946 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6947 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6948 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6949 .context("parsing package.repository field of manifest")?;
6950 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6951 let permalink = provider.build_permalink(
6952 remote,
6953 BuildPermalinkParams::new(
6954 &cargo_vcs_info.git.sha1,
6955 &RepoPath::from_rel_path(
6956 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6957 ),
6958 Some(selection),
6959 ),
6960 );
6961 Ok(permalink)
6962}
6963
6964fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6965 let Some(blame) = blame else {
6966 return proto::BlameBufferResponse {
6967 blame_response: None,
6968 };
6969 };
6970
6971 let entries = blame
6972 .entries
6973 .into_iter()
6974 .map(|entry| proto::BlameEntry {
6975 sha: entry.sha.as_bytes().into(),
6976 start_line: entry.range.start,
6977 end_line: entry.range.end,
6978 original_line_number: entry.original_line_number,
6979 author: entry.author,
6980 author_mail: entry.author_mail,
6981 author_time: entry.author_time,
6982 author_tz: entry.author_tz,
6983 committer: entry.committer_name,
6984 committer_mail: entry.committer_email,
6985 committer_time: entry.committer_time,
6986 committer_tz: entry.committer_tz,
6987 summary: entry.summary,
6988 previous: entry.previous,
6989 filename: entry.filename,
6990 })
6991 .collect::<Vec<_>>();
6992
6993 let messages = blame
6994 .messages
6995 .into_iter()
6996 .map(|(oid, message)| proto::CommitMessage {
6997 oid: oid.as_bytes().into(),
6998 message,
6999 })
7000 .collect::<Vec<_>>();
7001
7002 proto::BlameBufferResponse {
7003 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7004 }
7005}
7006
7007fn deserialize_blame_buffer_response(
7008 response: proto::BlameBufferResponse,
7009) -> Option<git::blame::Blame> {
7010 let response = response.blame_response?;
7011 let entries = response
7012 .entries
7013 .into_iter()
7014 .filter_map(|entry| {
7015 Some(git::blame::BlameEntry {
7016 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7017 range: entry.start_line..entry.end_line,
7018 original_line_number: entry.original_line_number,
7019 committer_name: entry.committer,
7020 committer_time: entry.committer_time,
7021 committer_tz: entry.committer_tz,
7022 committer_email: entry.committer_mail,
7023 author: entry.author,
7024 author_mail: entry.author_mail,
7025 author_time: entry.author_time,
7026 author_tz: entry.author_tz,
7027 summary: entry.summary,
7028 previous: entry.previous,
7029 filename: entry.filename,
7030 })
7031 })
7032 .collect::<Vec<_>>();
7033
7034 let messages = response
7035 .messages
7036 .into_iter()
7037 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7038 .collect::<HashMap<_, _>>();
7039
7040 Some(Blame { entries, messages })
7041}
7042
7043fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7044 proto::Branch {
7045 is_head: branch.is_head,
7046 ref_name: branch.ref_name.to_string(),
7047 unix_timestamp: branch
7048 .most_recent_commit
7049 .as_ref()
7050 .map(|commit| commit.commit_timestamp as u64),
7051 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7052 ref_name: upstream.ref_name.to_string(),
7053 tracking: upstream
7054 .tracking
7055 .status()
7056 .map(|upstream| proto::UpstreamTracking {
7057 ahead: upstream.ahead as u64,
7058 behind: upstream.behind as u64,
7059 }),
7060 }),
7061 most_recent_commit: branch
7062 .most_recent_commit
7063 .as_ref()
7064 .map(|commit| proto::CommitSummary {
7065 sha: commit.sha.to_string(),
7066 subject: commit.subject.to_string(),
7067 commit_timestamp: commit.commit_timestamp,
7068 author_name: commit.author_name.to_string(),
7069 }),
7070 }
7071}
7072
7073fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7074 proto::Worktree {
7075 path: worktree.path.to_string_lossy().to_string(),
7076 ref_name: worktree
7077 .ref_name
7078 .as_ref()
7079 .map(|s| s.to_string())
7080 .unwrap_or_default(),
7081 sha: worktree.sha.to_string(),
7082 is_main: worktree.is_main,
7083 }
7084}
7085
7086fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7087 git::repository::Worktree {
7088 path: PathBuf::from(proto.path.clone()),
7089 ref_name: Some(SharedString::from(&proto.ref_name)),
7090 sha: proto.sha.clone().into(),
7091 is_main: proto.is_main,
7092 }
7093}
7094
7095fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7096 git::repository::Branch {
7097 is_head: proto.is_head,
7098 ref_name: proto.ref_name.clone().into(),
7099 upstream: proto
7100 .upstream
7101 .as_ref()
7102 .map(|upstream| git::repository::Upstream {
7103 ref_name: upstream.ref_name.to_string().into(),
7104 tracking: upstream
7105 .tracking
7106 .as_ref()
7107 .map(|tracking| {
7108 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7109 ahead: tracking.ahead as u32,
7110 behind: tracking.behind as u32,
7111 })
7112 })
7113 .unwrap_or(git::repository::UpstreamTracking::Gone),
7114 }),
7115 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7116 git::repository::CommitSummary {
7117 sha: commit.sha.to_string().into(),
7118 subject: commit.subject.to_string().into(),
7119 commit_timestamp: commit.commit_timestamp,
7120 author_name: commit.author_name.to_string().into(),
7121 has_parent: true,
7122 }
7123 }),
7124 }
7125}
7126
7127fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7128 proto::GitCommitDetails {
7129 sha: commit.sha.to_string(),
7130 message: commit.message.to_string(),
7131 commit_timestamp: commit.commit_timestamp,
7132 author_email: commit.author_email.to_string(),
7133 author_name: commit.author_name.to_string(),
7134 }
7135}
7136
7137fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7138 CommitDetails {
7139 sha: proto.sha.clone().into(),
7140 message: proto.message.clone().into(),
7141 commit_timestamp: proto.commit_timestamp,
7142 author_email: proto.author_email.clone().into(),
7143 author_name: proto.author_name.clone().into(),
7144 }
7145}
7146
7147/// This snapshot computes the repository state on the foreground thread while
7148/// running the git commands on the background thread. We update branch, head,
7149/// remotes, and worktrees first so the UI can react sooner, then compute file
7150/// state and emit those events immediately after.
7151async fn compute_snapshot(
7152 this: Entity<Repository>,
7153 backend: Arc<dyn GitRepository>,
7154 cx: &mut AsyncApp,
7155) -> Result<RepositorySnapshot> {
7156 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7157 this.paths_needing_status_update.clear();
7158 (
7159 this.id,
7160 this.work_directory_abs_path.clone(),
7161 this.snapshot.clone(),
7162 )
7163 });
7164
7165 let head_commit_future = {
7166 let backend = backend.clone();
7167 async move {
7168 Ok(match backend.head_sha().await {
7169 Some(head_sha) => backend.show(head_sha).await.log_err(),
7170 None => None,
7171 })
7172 }
7173 };
7174 let (branches, head_commit, all_worktrees) = cx
7175 .background_spawn({
7176 let backend = backend.clone();
7177 async move {
7178 futures::future::try_join3(
7179 backend.branches(),
7180 head_commit_future,
7181 backend.worktrees(),
7182 )
7183 .await
7184 }
7185 })
7186 .await?;
7187 let branch = branches.into_iter().find(|branch| branch.is_head);
7188
7189 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7190 .into_iter()
7191 .filter(|wt| wt.path != *work_directory_abs_path)
7192 .collect();
7193
7194 let (remote_origin_url, remote_upstream_url) = cx
7195 .background_spawn({
7196 let backend = backend.clone();
7197 async move {
7198 Ok::<_, anyhow::Error>(
7199 futures::future::join(
7200 backend.remote_url("origin"),
7201 backend.remote_url("upstream"),
7202 )
7203 .await,
7204 )
7205 }
7206 })
7207 .await?;
7208
7209 let snapshot = this.update(cx, |this, cx| {
7210 let branch_changed =
7211 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7212 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7213
7214 this.snapshot = RepositorySnapshot {
7215 id,
7216 work_directory_abs_path,
7217 branch,
7218 head_commit,
7219 remote_origin_url,
7220 remote_upstream_url,
7221 linked_worktrees,
7222 scan_id: prev_snapshot.scan_id + 1,
7223 ..prev_snapshot
7224 };
7225
7226 if branch_changed {
7227 cx.emit(RepositoryEvent::BranchChanged);
7228 }
7229
7230 if worktrees_changed {
7231 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7232 }
7233
7234 this.snapshot.clone()
7235 });
7236
7237 let (statuses, diff_stats, stash_entries) = cx
7238 .background_spawn({
7239 let backend = backend.clone();
7240 let snapshot = snapshot.clone();
7241 async move {
7242 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7243 if snapshot.head_commit.is_some() {
7244 backend.diff_stat(&[])
7245 } else {
7246 future::ready(Ok(status::GitDiffStat {
7247 entries: Arc::default(),
7248 }))
7249 .boxed()
7250 };
7251 futures::future::try_join3(
7252 backend.status(&[RepoPath::from_rel_path(
7253 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7254 )]),
7255 diff_stat_future,
7256 backend.stash_entries(),
7257 )
7258 .await
7259 }
7260 })
7261 .await?;
7262
7263 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7264 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7265 let mut conflicted_paths = Vec::new();
7266 let statuses_by_path = SumTree::from_iter(
7267 statuses.entries.iter().map(|(repo_path, status)| {
7268 if status.is_conflicted() {
7269 conflicted_paths.push(repo_path.clone());
7270 }
7271 StatusEntry {
7272 repo_path: repo_path.clone(),
7273 status: *status,
7274 diff_stat: diff_stat_map.get(repo_path).copied(),
7275 }
7276 }),
7277 (),
7278 );
7279
7280 let merge_details = cx
7281 .background_spawn({
7282 let backend = backend.clone();
7283 let mut merge_details = snapshot.merge.clone();
7284 async move {
7285 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7286 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7287 }
7288 })
7289 .await?;
7290 let (merge_details, conflicts_changed) = merge_details;
7291 log::debug!("new merge details: {merge_details:?}");
7292
7293 Ok(this.update(cx, |this, cx| {
7294 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7295 cx.emit(RepositoryEvent::StatusesChanged);
7296 }
7297 if stash_entries != this.snapshot.stash_entries {
7298 cx.emit(RepositoryEvent::StashEntriesChanged);
7299 }
7300
7301 this.snapshot.scan_id += 1;
7302 this.snapshot.merge = merge_details;
7303 this.snapshot.statuses_by_path = statuses_by_path;
7304 this.snapshot.stash_entries = stash_entries;
7305
7306 this.snapshot.clone()
7307 }))
7308}
7309
7310fn status_from_proto(
7311 simple_status: i32,
7312 status: Option<proto::GitFileStatus>,
7313) -> anyhow::Result<FileStatus> {
7314 use proto::git_file_status::Variant;
7315
7316 let Some(variant) = status.and_then(|status| status.variant) else {
7317 let code = proto::GitStatus::from_i32(simple_status)
7318 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7319 let result = match code {
7320 proto::GitStatus::Added => TrackedStatus {
7321 worktree_status: StatusCode::Added,
7322 index_status: StatusCode::Unmodified,
7323 }
7324 .into(),
7325 proto::GitStatus::Modified => TrackedStatus {
7326 worktree_status: StatusCode::Modified,
7327 index_status: StatusCode::Unmodified,
7328 }
7329 .into(),
7330 proto::GitStatus::Conflict => UnmergedStatus {
7331 first_head: UnmergedStatusCode::Updated,
7332 second_head: UnmergedStatusCode::Updated,
7333 }
7334 .into(),
7335 proto::GitStatus::Deleted => TrackedStatus {
7336 worktree_status: StatusCode::Deleted,
7337 index_status: StatusCode::Unmodified,
7338 }
7339 .into(),
7340 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7341 };
7342 return Ok(result);
7343 };
7344
7345 let result = match variant {
7346 Variant::Untracked(_) => FileStatus::Untracked,
7347 Variant::Ignored(_) => FileStatus::Ignored,
7348 Variant::Unmerged(unmerged) => {
7349 let [first_head, second_head] =
7350 [unmerged.first_head, unmerged.second_head].map(|head| {
7351 let code = proto::GitStatus::from_i32(head)
7352 .with_context(|| format!("Invalid git status code: {head}"))?;
7353 let result = match code {
7354 proto::GitStatus::Added => UnmergedStatusCode::Added,
7355 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7356 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7357 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7358 };
7359 Ok(result)
7360 });
7361 let [first_head, second_head] = [first_head?, second_head?];
7362 UnmergedStatus {
7363 first_head,
7364 second_head,
7365 }
7366 .into()
7367 }
7368 Variant::Tracked(tracked) => {
7369 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7370 .map(|status| {
7371 let code = proto::GitStatus::from_i32(status)
7372 .with_context(|| format!("Invalid git status code: {status}"))?;
7373 let result = match code {
7374 proto::GitStatus::Modified => StatusCode::Modified,
7375 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7376 proto::GitStatus::Added => StatusCode::Added,
7377 proto::GitStatus::Deleted => StatusCode::Deleted,
7378 proto::GitStatus::Renamed => StatusCode::Renamed,
7379 proto::GitStatus::Copied => StatusCode::Copied,
7380 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7381 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7382 };
7383 Ok(result)
7384 });
7385 let [index_status, worktree_status] = [index_status?, worktree_status?];
7386 TrackedStatus {
7387 index_status,
7388 worktree_status,
7389 }
7390 .into()
7391 }
7392 };
7393 Ok(result)
7394}
7395
7396fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7397 use proto::git_file_status::{Tracked, Unmerged, Variant};
7398
7399 let variant = match status {
7400 FileStatus::Untracked => Variant::Untracked(Default::default()),
7401 FileStatus::Ignored => Variant::Ignored(Default::default()),
7402 FileStatus::Unmerged(UnmergedStatus {
7403 first_head,
7404 second_head,
7405 }) => Variant::Unmerged(Unmerged {
7406 first_head: unmerged_status_to_proto(first_head),
7407 second_head: unmerged_status_to_proto(second_head),
7408 }),
7409 FileStatus::Tracked(TrackedStatus {
7410 index_status,
7411 worktree_status,
7412 }) => Variant::Tracked(Tracked {
7413 index_status: tracked_status_to_proto(index_status),
7414 worktree_status: tracked_status_to_proto(worktree_status),
7415 }),
7416 };
7417 proto::GitFileStatus {
7418 variant: Some(variant),
7419 }
7420}
7421
7422fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7423 match code {
7424 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7425 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7426 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7427 }
7428}
7429
7430fn tracked_status_to_proto(code: StatusCode) -> i32 {
7431 match code {
7432 StatusCode::Added => proto::GitStatus::Added as _,
7433 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7434 StatusCode::Modified => proto::GitStatus::Modified as _,
7435 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7436 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7437 StatusCode::Copied => proto::GitStatus::Copied as _,
7438 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7439 }
7440}