1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_load_commit_diff);
564 client.add_entity_request_handler(Self::handle_file_history);
565 client.add_entity_request_handler(Self::handle_checkout_files);
566 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
567 client.add_entity_request_handler(Self::handle_set_index_text);
568 client.add_entity_request_handler(Self::handle_askpass);
569 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
570 client.add_entity_request_handler(Self::handle_git_diff);
571 client.add_entity_request_handler(Self::handle_tree_diff);
572 client.add_entity_request_handler(Self::handle_get_blob_content);
573 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
574 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
575 client.add_entity_message_handler(Self::handle_update_diff_bases);
576 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
577 client.add_entity_request_handler(Self::handle_blame_buffer);
578 client.add_entity_message_handler(Self::handle_update_repository);
579 client.add_entity_message_handler(Self::handle_remove_repository);
580 client.add_entity_request_handler(Self::handle_git_clone);
581 client.add_entity_request_handler(Self::handle_get_worktrees);
582 client.add_entity_request_handler(Self::handle_create_worktree);
583 client.add_entity_request_handler(Self::handle_remove_worktree);
584 client.add_entity_request_handler(Self::handle_rename_worktree);
585 }
586
587 pub fn is_local(&self) -> bool {
588 matches!(self.state, GitStoreState::Local { .. })
589 }
590 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
591 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
592 let id = repo.read(cx).id;
593 if self.active_repo_id != Some(id) {
594 self.active_repo_id = Some(id);
595 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
596 }
597 }
598 }
599
600 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
601 match &mut self.state {
602 GitStoreState::Remote {
603 downstream: downstream_client,
604 ..
605 } => {
606 for repo in self.repositories.values() {
607 let update = repo.read(cx).snapshot.initial_update(project_id);
608 for update in split_repository_update(update) {
609 client.send(update).log_err();
610 }
611 }
612 *downstream_client = Some((client, ProjectId(project_id)));
613 }
614 GitStoreState::Local {
615 downstream: downstream_client,
616 ..
617 } => {
618 let mut snapshots = HashMap::default();
619 let (updates_tx, mut updates_rx) = mpsc::unbounded();
620 for repo in self.repositories.values() {
621 updates_tx
622 .unbounded_send(DownstreamUpdate::UpdateRepository(
623 repo.read(cx).snapshot.clone(),
624 ))
625 .ok();
626 }
627 *downstream_client = Some(LocalDownstreamState {
628 client: client.clone(),
629 project_id: ProjectId(project_id),
630 updates_tx,
631 _task: cx.spawn(async move |this, cx| {
632 cx.background_spawn(async move {
633 while let Some(update) = updates_rx.next().await {
634 match update {
635 DownstreamUpdate::UpdateRepository(snapshot) => {
636 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
637 {
638 let update =
639 snapshot.build_update(old_snapshot, project_id);
640 *old_snapshot = snapshot;
641 for update in split_repository_update(update) {
642 client.send(update)?;
643 }
644 } else {
645 let update = snapshot.initial_update(project_id);
646 for update in split_repository_update(update) {
647 client.send(update)?;
648 }
649 snapshots.insert(snapshot.id, snapshot);
650 }
651 }
652 DownstreamUpdate::RemoveRepository(id) => {
653 client.send(proto::RemoveRepository {
654 project_id,
655 id: id.to_proto(),
656 })?;
657 }
658 }
659 }
660 anyhow::Ok(())
661 })
662 .await
663 .ok();
664 this.update(cx, |this, _| {
665 if let GitStoreState::Local {
666 downstream: downstream_client,
667 ..
668 } = &mut this.state
669 {
670 downstream_client.take();
671 } else {
672 unreachable!("unshared called on remote store");
673 }
674 })
675 }),
676 });
677 }
678 }
679 }
680
681 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
682 match &mut self.state {
683 GitStoreState::Local {
684 downstream: downstream_client,
685 ..
686 } => {
687 downstream_client.take();
688 }
689 GitStoreState::Remote {
690 downstream: downstream_client,
691 ..
692 } => {
693 downstream_client.take();
694 }
695 }
696 self.shared_diffs.clear();
697 }
698
699 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
700 self.shared_diffs.remove(peer_id);
701 }
702
703 pub fn active_repository(&self) -> Option<Entity<Repository>> {
704 self.active_repo_id
705 .as_ref()
706 .map(|id| self.repositories[id].clone())
707 }
708
709 pub fn open_unstaged_diff(
710 &mut self,
711 buffer: Entity<Buffer>,
712 cx: &mut Context<Self>,
713 ) -> Task<Result<Entity<BufferDiff>>> {
714 let buffer_id = buffer.read(cx).remote_id();
715 if let Some(diff_state) = self.diffs.get(&buffer_id)
716 && let Some(unstaged_diff) = diff_state
717 .read(cx)
718 .unstaged_diff
719 .as_ref()
720 .and_then(|weak| weak.upgrade())
721 {
722 if let Some(task) =
723 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
724 {
725 return cx.background_executor().spawn(async move {
726 task.await;
727 Ok(unstaged_diff)
728 });
729 }
730 return Task::ready(Ok(unstaged_diff));
731 }
732
733 let Some((repo, repo_path)) =
734 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
735 else {
736 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
737 };
738
739 let task = self
740 .loading_diffs
741 .entry((buffer_id, DiffKind::Unstaged))
742 .or_insert_with(|| {
743 let staged_text = repo.update(cx, |repo, cx| {
744 repo.load_staged_text(buffer_id, repo_path, cx)
745 });
746 cx.spawn(async move |this, cx| {
747 Self::open_diff_internal(
748 this,
749 DiffKind::Unstaged,
750 staged_text.await.map(DiffBasesChange::SetIndex),
751 buffer,
752 cx,
753 )
754 .await
755 .map_err(Arc::new)
756 })
757 .shared()
758 })
759 .clone();
760
761 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
762 }
763
764 pub fn open_diff_since(
765 &mut self,
766 oid: Option<git::Oid>,
767 buffer: Entity<Buffer>,
768 repo: Entity<Repository>,
769 cx: &mut Context<Self>,
770 ) -> Task<Result<Entity<BufferDiff>>> {
771 let buffer_id = buffer.read(cx).remote_id();
772
773 if let Some(diff_state) = self.diffs.get(&buffer_id)
774 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
775 {
776 if let Some(task) =
777 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
778 {
779 return cx.background_executor().spawn(async move {
780 task.await;
781 Ok(oid_diff)
782 });
783 }
784 return Task::ready(Ok(oid_diff));
785 }
786
787 let diff_kind = DiffKind::SinceOid(oid);
788 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
789 let task = task.clone();
790 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
791 }
792
793 let task = cx
794 .spawn(async move |this, cx| {
795 let result: Result<Entity<BufferDiff>> = async {
796 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
797 let language_registry =
798 buffer.update(cx, |buffer, _| buffer.language_registry());
799 let content: Option<Arc<str>> = match oid {
800 None => None,
801 Some(oid) => Some(
802 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
803 .await?
804 .into(),
805 ),
806 };
807 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
808
809 buffer_diff
810 .update(cx, |buffer_diff, cx| {
811 buffer_diff.language_changed(
812 buffer_snapshot.language().cloned(),
813 language_registry,
814 cx,
815 );
816 buffer_diff.set_base_text(
817 content.clone(),
818 buffer_snapshot.language().cloned(),
819 buffer_snapshot.text,
820 cx,
821 )
822 })
823 .await?;
824 let unstaged_diff = this
825 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
826 .await?;
827 buffer_diff.update(cx, |buffer_diff, _| {
828 buffer_diff.set_secondary_diff(unstaged_diff);
829 });
830
831 this.update(cx, |this, cx| {
832 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
833 .detach();
834
835 this.loading_diffs.remove(&(buffer_id, diff_kind));
836
837 let git_store = cx.weak_entity();
838 let diff_state = this
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842
843 diff_state.update(cx, |state, _| {
844 if let Some(oid) = oid {
845 if let Some(content) = content {
846 state.oid_texts.insert(oid, content);
847 }
848 }
849 state.oid_diffs.insert(oid, buffer_diff.downgrade());
850 });
851 })?;
852
853 Ok(buffer_diff)
854 }
855 .await;
856 result.map_err(Arc::new)
857 })
858 .shared();
859
860 self.loading_diffs
861 .insert((buffer_id, diff_kind), task.clone());
862 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
863 }
864
865 #[ztracing::instrument(skip_all)]
866 pub fn open_uncommitted_diff(
867 &mut self,
868 buffer: Entity<Buffer>,
869 cx: &mut Context<Self>,
870 ) -> Task<Result<Entity<BufferDiff>>> {
871 let buffer_id = buffer.read(cx).remote_id();
872
873 if let Some(diff_state) = self.diffs.get(&buffer_id)
874 && let Some(uncommitted_diff) = diff_state
875 .read(cx)
876 .uncommitted_diff
877 .as_ref()
878 .and_then(|weak| weak.upgrade())
879 {
880 if let Some(task) =
881 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
882 {
883 return cx.background_executor().spawn(async move {
884 task.await;
885 Ok(uncommitted_diff)
886 });
887 }
888 return Task::ready(Ok(uncommitted_diff));
889 }
890
891 let Some((repo, repo_path)) =
892 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
893 else {
894 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
895 };
896
897 let task = self
898 .loading_diffs
899 .entry((buffer_id, DiffKind::Uncommitted))
900 .or_insert_with(|| {
901 let changes = repo.update(cx, |repo, cx| {
902 repo.load_committed_text(buffer_id, repo_path, cx)
903 });
904
905 // todo(lw): hot foreground spawn
906 cx.spawn(async move |this, cx| {
907 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
908 .await
909 .map_err(Arc::new)
910 })
911 .shared()
912 })
913 .clone();
914
915 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
916 }
917
918 #[ztracing::instrument(skip_all)]
919 async fn open_diff_internal(
920 this: WeakEntity<Self>,
921 kind: DiffKind,
922 texts: Result<DiffBasesChange>,
923 buffer_entity: Entity<Buffer>,
924 cx: &mut AsyncApp,
925 ) -> Result<Entity<BufferDiff>> {
926 let diff_bases_change = match texts {
927 Err(e) => {
928 this.update(cx, |this, cx| {
929 let buffer = buffer_entity.read(cx);
930 let buffer_id = buffer.remote_id();
931 this.loading_diffs.remove(&(buffer_id, kind));
932 })?;
933 return Err(e);
934 }
935 Ok(change) => change,
936 };
937
938 this.update(cx, |this, cx| {
939 let buffer = buffer_entity.read(cx);
940 let buffer_id = buffer.remote_id();
941 let language = buffer.language().cloned();
942 let language_registry = buffer.language_registry();
943 let text_snapshot = buffer.text_snapshot();
944 this.loading_diffs.remove(&(buffer_id, kind));
945
946 let git_store = cx.weak_entity();
947 let diff_state = this
948 .diffs
949 .entry(buffer_id)
950 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
951
952 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953
954 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
955 diff_state.update(cx, |diff_state, cx| {
956 diff_state.language_changed = true;
957 diff_state.language = language;
958 diff_state.language_registry = language_registry;
959
960 match kind {
961 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
962 DiffKind::Uncommitted => {
963 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
964 diff
965 } else {
966 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
967 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
968 unstaged_diff
969 };
970
971 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
972 diff_state.uncommitted_diff = Some(diff.downgrade())
973 }
974 DiffKind::SinceOid(_) => {
975 unreachable!("open_diff_internal is not used for OID diffs")
976 }
977 }
978
979 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
980 let rx = diff_state.wait_for_recalculation();
981
982 anyhow::Ok(async move {
983 if let Some(rx) = rx {
984 rx.await;
985 }
986 Ok(diff)
987 })
988 })
989 })??
990 .await
991 }
992
993 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
994 let diff_state = self.diffs.get(&buffer_id)?;
995 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
996 }
997
998 pub fn get_uncommitted_diff(
999 &self,
1000 buffer_id: BufferId,
1001 cx: &App,
1002 ) -> Option<Entity<BufferDiff>> {
1003 let diff_state = self.diffs.get(&buffer_id)?;
1004 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1005 }
1006
1007 pub fn get_diff_since_oid(
1008 &self,
1009 buffer_id: BufferId,
1010 oid: Option<git::Oid>,
1011 cx: &App,
1012 ) -> Option<Entity<BufferDiff>> {
1013 let diff_state = self.diffs.get(&buffer_id)?;
1014 diff_state.read(cx).oid_diff(oid)
1015 }
1016
1017 pub fn open_conflict_set(
1018 &mut self,
1019 buffer: Entity<Buffer>,
1020 cx: &mut Context<Self>,
1021 ) -> Entity<ConflictSet> {
1022 log::debug!("open conflict set");
1023 let buffer_id = buffer.read(cx).remote_id();
1024
1025 if let Some(git_state) = self.diffs.get(&buffer_id)
1026 && let Some(conflict_set) = git_state
1027 .read(cx)
1028 .conflict_set
1029 .as_ref()
1030 .and_then(|weak| weak.upgrade())
1031 {
1032 let conflict_set = conflict_set;
1033 let buffer_snapshot = buffer.read(cx).text_snapshot();
1034
1035 git_state.update(cx, |state, cx| {
1036 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1037 });
1038
1039 return conflict_set;
1040 }
1041
1042 let is_unmerged = self
1043 .repository_and_path_for_buffer_id(buffer_id, cx)
1044 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1045 let git_store = cx.weak_entity();
1046 let buffer_git_state = self
1047 .diffs
1048 .entry(buffer_id)
1049 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1050 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1051
1052 self._subscriptions
1053 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1054 cx.emit(GitStoreEvent::ConflictsUpdated);
1055 }));
1056
1057 buffer_git_state.update(cx, |state, cx| {
1058 state.conflict_set = Some(conflict_set.downgrade());
1059 let buffer_snapshot = buffer.read(cx).text_snapshot();
1060 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1061 });
1062
1063 conflict_set
1064 }
1065
1066 pub fn project_path_git_status(
1067 &self,
1068 project_path: &ProjectPath,
1069 cx: &App,
1070 ) -> Option<FileStatus> {
1071 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1072 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1073 }
1074
1075 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1076 let mut work_directory_abs_paths = Vec::new();
1077 let mut checkpoints = Vec::new();
1078 for repository in self.repositories.values() {
1079 repository.update(cx, |repository, _| {
1080 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1081 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1082 });
1083 }
1084
1085 cx.background_executor().spawn(async move {
1086 let checkpoints = future::try_join_all(checkpoints).await?;
1087 Ok(GitStoreCheckpoint {
1088 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1089 .into_iter()
1090 .zip(checkpoints)
1091 .collect(),
1092 })
1093 })
1094 }
1095
1096 pub fn restore_checkpoint(
1097 &self,
1098 checkpoint: GitStoreCheckpoint,
1099 cx: &mut App,
1100 ) -> Task<Result<()>> {
1101 let repositories_by_work_dir_abs_path = self
1102 .repositories
1103 .values()
1104 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1105 .collect::<HashMap<_, _>>();
1106
1107 let mut tasks = Vec::new();
1108 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1109 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1110 let restore = repository.update(cx, |repository, _| {
1111 repository.restore_checkpoint(checkpoint)
1112 });
1113 tasks.push(async move { restore.await? });
1114 }
1115 }
1116 cx.background_spawn(async move {
1117 future::try_join_all(tasks).await?;
1118 Ok(())
1119 })
1120 }
1121
1122 /// Compares two checkpoints, returning true if they are equal.
1123 pub fn compare_checkpoints(
1124 &self,
1125 left: GitStoreCheckpoint,
1126 mut right: GitStoreCheckpoint,
1127 cx: &mut App,
1128 ) -> Task<Result<bool>> {
1129 let repositories_by_work_dir_abs_path = self
1130 .repositories
1131 .values()
1132 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1133 .collect::<HashMap<_, _>>();
1134
1135 let mut tasks = Vec::new();
1136 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1137 if let Some(right_checkpoint) = right
1138 .checkpoints_by_work_dir_abs_path
1139 .remove(&work_dir_abs_path)
1140 {
1141 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1142 {
1143 let compare = repository.update(cx, |repository, _| {
1144 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1145 });
1146
1147 tasks.push(async move { compare.await? });
1148 }
1149 } else {
1150 return Task::ready(Ok(false));
1151 }
1152 }
1153 cx.background_spawn(async move {
1154 Ok(future::try_join_all(tasks)
1155 .await?
1156 .into_iter()
1157 .all(|result| result))
1158 })
1159 }
1160
1161 /// Blames a buffer.
1162 pub fn blame_buffer(
1163 &self,
1164 buffer: &Entity<Buffer>,
1165 version: Option<clock::Global>,
1166 cx: &mut Context<Self>,
1167 ) -> Task<Result<Option<Blame>>> {
1168 let buffer = buffer.read(cx);
1169 let Some((repo, repo_path)) =
1170 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1171 else {
1172 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1173 };
1174 let content = match &version {
1175 Some(version) => buffer.rope_for_version(version),
1176 None => buffer.as_rope().clone(),
1177 };
1178 let line_ending = buffer.line_ending();
1179 let version = version.unwrap_or(buffer.version());
1180 let buffer_id = buffer.remote_id();
1181
1182 let repo = repo.downgrade();
1183 cx.spawn(async move |_, cx| {
1184 let repository_state = repo
1185 .update(cx, |repo, _| repo.repository_state.clone())?
1186 .await
1187 .map_err(|err| anyhow::anyhow!(err))?;
1188 match repository_state {
1189 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1190 .blame(repo_path.clone(), content, line_ending)
1191 .await
1192 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1193 .map(Some),
1194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1195 let response = client
1196 .request(proto::BlameBuffer {
1197 project_id: project_id.to_proto(),
1198 buffer_id: buffer_id.into(),
1199 version: serialize_version(&version),
1200 })
1201 .await?;
1202 Ok(deserialize_blame_buffer_response(response))
1203 }
1204 }
1205 })
1206 }
1207
1208 pub fn file_history(
1209 &self,
1210 repo: &Entity<Repository>,
1211 path: RepoPath,
1212 cx: &mut App,
1213 ) -> Task<Result<git::repository::FileHistory>> {
1214 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1215
1216 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1217 }
1218
1219 pub fn file_history_paginated(
1220 &self,
1221 repo: &Entity<Repository>,
1222 path: RepoPath,
1223 skip: usize,
1224 limit: Option<usize>,
1225 cx: &mut App,
1226 ) -> Task<Result<git::repository::FileHistory>> {
1227 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1228
1229 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1230 }
1231
1232 pub fn get_permalink_to_line(
1233 &self,
1234 buffer: &Entity<Buffer>,
1235 selection: Range<u32>,
1236 cx: &mut App,
1237 ) -> Task<Result<url::Url>> {
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 return Task::ready(Err(anyhow!("buffer has no file")));
1240 };
1241
1242 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1243 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1244 cx,
1245 ) else {
1246 // If we're not in a Git repo, check whether this is a Rust source
1247 // file in the Cargo registry (presumably opened with go-to-definition
1248 // from a normal Rust file). If so, we can put together a permalink
1249 // using crate metadata.
1250 if buffer
1251 .read(cx)
1252 .language()
1253 .is_none_or(|lang| lang.name() != "Rust")
1254 {
1255 return Task::ready(Err(anyhow!("no permalink available")));
1256 }
1257 let file_path = file.worktree.read(cx).absolutize(&file.path);
1258 return cx.spawn(async move |cx| {
1259 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1260 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1261 .context("no permalink available")
1262 });
1263 };
1264
1265 let buffer_id = buffer.read(cx).remote_id();
1266 let branch = repo.read(cx).branch.clone();
1267 let remote = branch
1268 .as_ref()
1269 .and_then(|b| b.upstream.as_ref())
1270 .and_then(|b| b.remote_name())
1271 .unwrap_or("origin")
1272 .to_string();
1273
1274 let rx = repo.update(cx, |repo, _| {
1275 repo.send_job(None, move |state, cx| async move {
1276 match state {
1277 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1278 let origin_url = backend
1279 .remote_url(&remote)
1280 .await
1281 .with_context(|| format!("remote \"{remote}\" not found"))?;
1282
1283 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1284
1285 let provider_registry =
1286 cx.update(GitHostingProviderRegistry::default_global);
1287
1288 let (provider, remote) =
1289 parse_git_remote_url(provider_registry, &origin_url)
1290 .context("parsing Git remote URL")?;
1291
1292 Ok(provider.build_permalink(
1293 remote,
1294 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1295 ))
1296 }
1297 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1298 let response = client
1299 .request(proto::GetPermalinkToLine {
1300 project_id: project_id.to_proto(),
1301 buffer_id: buffer_id.into(),
1302 selection: Some(proto::Range {
1303 start: selection.start as u64,
1304 end: selection.end as u64,
1305 }),
1306 })
1307 .await?;
1308
1309 url::Url::parse(&response.permalink).context("failed to parse permalink")
1310 }
1311 }
1312 })
1313 });
1314 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1315 }
1316
1317 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1318 match &self.state {
1319 GitStoreState::Local {
1320 downstream: downstream_client,
1321 ..
1322 } => downstream_client
1323 .as_ref()
1324 .map(|state| (state.client.clone(), state.project_id)),
1325 GitStoreState::Remote {
1326 downstream: downstream_client,
1327 ..
1328 } => downstream_client.clone(),
1329 }
1330 }
1331
1332 fn upstream_client(&self) -> Option<AnyProtoClient> {
1333 match &self.state {
1334 GitStoreState::Local { .. } => None,
1335 GitStoreState::Remote {
1336 upstream_client, ..
1337 } => Some(upstream_client.clone()),
1338 }
1339 }
1340
1341 fn on_worktree_store_event(
1342 &mut self,
1343 worktree_store: Entity<WorktreeStore>,
1344 event: &WorktreeStoreEvent,
1345 cx: &mut Context<Self>,
1346 ) {
1347 let GitStoreState::Local {
1348 project_environment,
1349 downstream,
1350 next_repository_id,
1351 fs,
1352 } = &self.state
1353 else {
1354 return;
1355 };
1356
1357 match event {
1358 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1359 if let Some(worktree) = self
1360 .worktree_store
1361 .read(cx)
1362 .worktree_for_id(*worktree_id, cx)
1363 {
1364 let paths_by_git_repo =
1365 self.process_updated_entries(&worktree, updated_entries, cx);
1366 let downstream = downstream
1367 .as_ref()
1368 .map(|downstream| downstream.updates_tx.clone());
1369 cx.spawn(async move |_, cx| {
1370 let paths_by_git_repo = paths_by_git_repo.await;
1371 for (repo, paths) in paths_by_git_repo {
1372 repo.update(cx, |repo, cx| {
1373 repo.paths_changed(paths, downstream.clone(), cx);
1374 });
1375 }
1376 })
1377 .detach();
1378 }
1379 }
1380 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1381 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1382 else {
1383 return;
1384 };
1385 if !worktree.read(cx).is_visible() {
1386 log::debug!(
1387 "not adding repositories for local worktree {:?} because it's not visible",
1388 worktree.read(cx).abs_path()
1389 );
1390 return;
1391 }
1392 self.update_repositories_from_worktree(
1393 *worktree_id,
1394 project_environment.clone(),
1395 next_repository_id.clone(),
1396 downstream
1397 .as_ref()
1398 .map(|downstream| downstream.updates_tx.clone()),
1399 changed_repos.clone(),
1400 fs.clone(),
1401 cx,
1402 );
1403 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1404 }
1405 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1406 let repos_without_worktree: Vec<RepositoryId> = self
1407 .worktree_ids
1408 .iter_mut()
1409 .filter_map(|(repo_id, worktree_ids)| {
1410 worktree_ids.remove(worktree_id);
1411 if worktree_ids.is_empty() {
1412 Some(*repo_id)
1413 } else {
1414 None
1415 }
1416 })
1417 .collect();
1418 let is_active_repo_removed = repos_without_worktree
1419 .iter()
1420 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1421
1422 for repo_id in repos_without_worktree {
1423 self.repositories.remove(&repo_id);
1424 self.worktree_ids.remove(&repo_id);
1425 if let Some(updates_tx) =
1426 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1427 {
1428 updates_tx
1429 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1430 .ok();
1431 }
1432 }
1433
1434 if is_active_repo_removed {
1435 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1436 self.active_repo_id = Some(repo_id);
1437 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1438 } else {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 }
1443 }
1444 _ => {}
1445 }
1446 }
1447 fn on_repository_event(
1448 &mut self,
1449 repo: Entity<Repository>,
1450 event: &RepositoryEvent,
1451 cx: &mut Context<Self>,
1452 ) {
1453 let id = repo.read(cx).id;
1454 let repo_snapshot = repo.read(cx).snapshot.clone();
1455 for (buffer_id, diff) in self.diffs.iter() {
1456 if let Some((buffer_repo, repo_path)) =
1457 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1458 && buffer_repo == repo
1459 {
1460 diff.update(cx, |diff, cx| {
1461 if let Some(conflict_set) = &diff.conflict_set {
1462 let conflict_status_changed =
1463 conflict_set.update(cx, |conflict_set, cx| {
1464 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1465 conflict_set.set_has_conflict(has_conflict, cx)
1466 })?;
1467 if conflict_status_changed {
1468 let buffer_store = self.buffer_store.read(cx);
1469 if let Some(buffer) = buffer_store.get(*buffer_id) {
1470 let _ = diff
1471 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1472 }
1473 }
1474 }
1475 anyhow::Ok(())
1476 })
1477 .ok();
1478 }
1479 }
1480 cx.emit(GitStoreEvent::RepositoryUpdated(
1481 id,
1482 event.clone(),
1483 self.active_repo_id == Some(id),
1484 ))
1485 }
1486
1487 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1488 cx.emit(GitStoreEvent::JobsUpdated)
1489 }
1490
1491 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1492 fn update_repositories_from_worktree(
1493 &mut self,
1494 worktree_id: WorktreeId,
1495 project_environment: Entity<ProjectEnvironment>,
1496 next_repository_id: Arc<AtomicU64>,
1497 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1498 updated_git_repositories: UpdatedGitRepositoriesSet,
1499 fs: Arc<dyn Fs>,
1500 cx: &mut Context<Self>,
1501 ) {
1502 let mut removed_ids = Vec::new();
1503 for update in updated_git_repositories.iter() {
1504 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1505 let existing_work_directory_abs_path =
1506 repo.read(cx).work_directory_abs_path.clone();
1507 Some(&existing_work_directory_abs_path)
1508 == update.old_work_directory_abs_path.as_ref()
1509 || Some(&existing_work_directory_abs_path)
1510 == update.new_work_directory_abs_path.as_ref()
1511 }) {
1512 let repo_id = *id;
1513 if let Some(new_work_directory_abs_path) =
1514 update.new_work_directory_abs_path.clone()
1515 {
1516 self.worktree_ids
1517 .entry(repo_id)
1518 .or_insert_with(HashSet::new)
1519 .insert(worktree_id);
1520 existing.update(cx, |existing, cx| {
1521 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1522 existing.schedule_scan(updates_tx.clone(), cx);
1523 });
1524 } else {
1525 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1526 worktree_ids.remove(&worktree_id);
1527 if worktree_ids.is_empty() {
1528 removed_ids.push(repo_id);
1529 }
1530 }
1531 }
1532 } else if let UpdatedGitRepository {
1533 new_work_directory_abs_path: Some(work_directory_abs_path),
1534 dot_git_abs_path: Some(dot_git_abs_path),
1535 repository_dir_abs_path: Some(repository_dir_abs_path),
1536 common_dir_abs_path: Some(common_dir_abs_path),
1537 ..
1538 } = update
1539 {
1540 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1541 work_directory_abs_path,
1542 common_dir_abs_path,
1543 repository_dir_abs_path,
1544 )
1545 .into();
1546 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1547 let is_trusted = TrustedWorktrees::try_get_global(cx)
1548 .map(|trusted_worktrees| {
1549 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1550 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1551 })
1552 })
1553 .unwrap_or(false);
1554 let git_store = cx.weak_entity();
1555 let repo = cx.new(|cx| {
1556 let mut repo = Repository::local(
1557 id,
1558 work_directory_abs_path.clone(),
1559 original_repo_abs_path.clone(),
1560 dot_git_abs_path.clone(),
1561 project_environment.downgrade(),
1562 fs.clone(),
1563 is_trusted,
1564 git_store,
1565 cx,
1566 );
1567 if let Some(updates_tx) = updates_tx.as_ref() {
1568 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1569 updates_tx
1570 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1571 .ok();
1572 }
1573 repo.schedule_scan(updates_tx.clone(), cx);
1574 repo
1575 });
1576 self._subscriptions
1577 .push(cx.subscribe(&repo, Self::on_repository_event));
1578 self._subscriptions
1579 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1580 self.repositories.insert(id, repo);
1581 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1582 cx.emit(GitStoreEvent::RepositoryAdded);
1583 self.active_repo_id.get_or_insert_with(|| {
1584 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1585 id
1586 });
1587 }
1588 }
1589
1590 for id in removed_ids {
1591 if self.active_repo_id == Some(id) {
1592 self.active_repo_id = None;
1593 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1594 }
1595 self.repositories.remove(&id);
1596 if let Some(updates_tx) = updates_tx.as_ref() {
1597 updates_tx
1598 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1599 .ok();
1600 }
1601 }
1602 }
1603
1604 fn on_trusted_worktrees_event(
1605 &mut self,
1606 _: Entity<TrustedWorktreesStore>,
1607 event: &TrustedWorktreesEvent,
1608 cx: &mut Context<Self>,
1609 ) {
1610 if !matches!(self.state, GitStoreState::Local { .. }) {
1611 return;
1612 }
1613
1614 let (is_trusted, event_paths) = match event {
1615 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1616 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1617 };
1618
1619 for (repo_id, worktree_ids) in &self.worktree_ids {
1620 if worktree_ids
1621 .iter()
1622 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1623 {
1624 if let Some(repo) = self.repositories.get(repo_id) {
1625 let repository_state = repo.read(cx).repository_state.clone();
1626 cx.background_spawn(async move {
1627 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1628 state.backend.set_trusted(is_trusted);
1629 }
1630 })
1631 .detach();
1632 }
1633 }
1634 }
1635 }
1636
1637 fn on_buffer_store_event(
1638 &mut self,
1639 _: Entity<BufferStore>,
1640 event: &BufferStoreEvent,
1641 cx: &mut Context<Self>,
1642 ) {
1643 match event {
1644 BufferStoreEvent::BufferAdded(buffer) => {
1645 cx.subscribe(buffer, |this, buffer, event, cx| {
1646 if let BufferEvent::LanguageChanged(_) = event {
1647 let buffer_id = buffer.read(cx).remote_id();
1648 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1649 diff_state.update(cx, |diff_state, cx| {
1650 diff_state.buffer_language_changed(buffer, cx);
1651 });
1652 }
1653 }
1654 })
1655 .detach();
1656 }
1657 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1658 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1659 diffs.remove(buffer_id);
1660 }
1661 }
1662 BufferStoreEvent::BufferDropped(buffer_id) => {
1663 self.diffs.remove(buffer_id);
1664 for diffs in self.shared_diffs.values_mut() {
1665 diffs.remove(buffer_id);
1666 }
1667 }
1668 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1669 // Whenever a buffer's file path changes, it's possible that the
1670 // new path is actually a path that is being tracked by a git
1671 // repository. In that case, we'll want to update the buffer's
1672 // `BufferDiffState`, in case it already has one.
1673 let buffer_id = buffer.read(cx).remote_id();
1674 let diff_state = self.diffs.get(&buffer_id);
1675 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1676
1677 if let Some(diff_state) = diff_state
1678 && let Some((repo, repo_path)) = repo
1679 {
1680 let buffer = buffer.clone();
1681 let diff_state = diff_state.clone();
1682
1683 cx.spawn(async move |_git_store, cx| {
1684 async {
1685 let diff_bases_change = repo
1686 .update(cx, |repo, cx| {
1687 repo.load_committed_text(buffer_id, repo_path, cx)
1688 })
1689 .await?;
1690
1691 diff_state.update(cx, |diff_state, cx| {
1692 let buffer_snapshot = buffer.read(cx).text_snapshot();
1693 diff_state.diff_bases_changed(
1694 buffer_snapshot,
1695 Some(diff_bases_change),
1696 cx,
1697 );
1698 });
1699 anyhow::Ok(())
1700 }
1701 .await
1702 .log_err();
1703 })
1704 .detach();
1705 }
1706 }
1707 }
1708 }
1709
1710 pub fn recalculate_buffer_diffs(
1711 &mut self,
1712 buffers: Vec<Entity<Buffer>>,
1713 cx: &mut Context<Self>,
1714 ) -> impl Future<Output = ()> + use<> {
1715 let mut futures = Vec::new();
1716 for buffer in buffers {
1717 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1718 let buffer = buffer.read(cx).text_snapshot();
1719 diff_state.update(cx, |diff_state, cx| {
1720 diff_state.recalculate_diffs(buffer.clone(), cx);
1721 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1722 });
1723 futures.push(diff_state.update(cx, |diff_state, cx| {
1724 diff_state
1725 .reparse_conflict_markers(buffer, cx)
1726 .map(|_| {})
1727 .boxed()
1728 }));
1729 }
1730 }
1731 async move {
1732 futures::future::join_all(futures).await;
1733 }
1734 }
1735
1736 fn on_buffer_diff_event(
1737 &mut self,
1738 diff: Entity<buffer_diff::BufferDiff>,
1739 event: &BufferDiffEvent,
1740 cx: &mut Context<Self>,
1741 ) {
1742 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1743 let buffer_id = diff.read(cx).buffer_id;
1744 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1745 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1746 diff_state.hunk_staging_operation_count += 1;
1747 diff_state.hunk_staging_operation_count
1748 });
1749 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1750 let recv = repo.update(cx, |repo, cx| {
1751 log::debug!("hunks changed for {}", path.as_unix_str());
1752 repo.spawn_set_index_text_job(
1753 path,
1754 new_index_text.as_ref().map(|rope| rope.to_string()),
1755 Some(hunk_staging_operation_count),
1756 cx,
1757 )
1758 });
1759 let diff = diff.downgrade();
1760 cx.spawn(async move |this, cx| {
1761 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1762 diff.update(cx, |diff, cx| {
1763 diff.clear_pending_hunks(cx);
1764 })
1765 .ok();
1766 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1767 .ok();
1768 }
1769 })
1770 .detach();
1771 }
1772 }
1773 }
1774 }
1775
1776 fn local_worktree_git_repos_changed(
1777 &mut self,
1778 worktree: Entity<Worktree>,
1779 changed_repos: &UpdatedGitRepositoriesSet,
1780 cx: &mut Context<Self>,
1781 ) {
1782 log::debug!("local worktree repos changed");
1783 debug_assert!(worktree.read(cx).is_local());
1784
1785 for repository in self.repositories.values() {
1786 repository.update(cx, |repository, cx| {
1787 let repo_abs_path = &repository.work_directory_abs_path;
1788 if changed_repos.iter().any(|update| {
1789 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1790 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1791 }) {
1792 repository.reload_buffer_diff_bases(cx);
1793 }
1794 });
1795 }
1796 }
1797
1798 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1799 &self.repositories
1800 }
1801
1802 /// Returns the original (main) repository working directory for the given worktree.
1803 /// For normal checkouts this equals the worktree's own path; for linked
1804 /// worktrees it points back to the original repo.
1805 pub fn original_repo_path_for_worktree(
1806 &self,
1807 worktree_id: WorktreeId,
1808 cx: &App,
1809 ) -> Option<Arc<Path>> {
1810 self.active_repo_id
1811 .iter()
1812 .chain(self.worktree_ids.keys())
1813 .find(|repo_id| {
1814 self.worktree_ids
1815 .get(repo_id)
1816 .is_some_and(|ids| ids.contains(&worktree_id))
1817 })
1818 .and_then(|repo_id| self.repositories.get(repo_id))
1819 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1820 }
1821
1822 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1823 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1824 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1825 Some(status.status)
1826 }
1827
1828 pub fn repository_and_path_for_buffer_id(
1829 &self,
1830 buffer_id: BufferId,
1831 cx: &App,
1832 ) -> Option<(Entity<Repository>, RepoPath)> {
1833 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1834 let project_path = buffer.read(cx).project_path(cx)?;
1835 self.repository_and_path_for_project_path(&project_path, cx)
1836 }
1837
1838 pub fn repository_and_path_for_project_path(
1839 &self,
1840 path: &ProjectPath,
1841 cx: &App,
1842 ) -> Option<(Entity<Repository>, RepoPath)> {
1843 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1844 self.repositories
1845 .values()
1846 .filter_map(|repo| {
1847 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1848 Some((repo.clone(), repo_path))
1849 })
1850 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1851 }
1852
1853 pub fn git_init(
1854 &self,
1855 path: Arc<Path>,
1856 fallback_branch_name: String,
1857 cx: &App,
1858 ) -> Task<Result<()>> {
1859 match &self.state {
1860 GitStoreState::Local { fs, .. } => {
1861 let fs = fs.clone();
1862 cx.background_executor()
1863 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1864 }
1865 GitStoreState::Remote {
1866 upstream_client,
1867 upstream_project_id: project_id,
1868 ..
1869 } => {
1870 let client = upstream_client.clone();
1871 let project_id = *project_id;
1872 cx.background_executor().spawn(async move {
1873 client
1874 .request(proto::GitInit {
1875 project_id: project_id,
1876 abs_path: path.to_string_lossy().into_owned(),
1877 fallback_branch_name,
1878 })
1879 .await?;
1880 Ok(())
1881 })
1882 }
1883 }
1884 }
1885
1886 pub fn git_clone(
1887 &self,
1888 repo: String,
1889 path: impl Into<Arc<std::path::Path>>,
1890 cx: &App,
1891 ) -> Task<Result<()>> {
1892 let path = path.into();
1893 match &self.state {
1894 GitStoreState::Local { fs, .. } => {
1895 let fs = fs.clone();
1896 cx.background_executor()
1897 .spawn(async move { fs.git_clone(&repo, &path).await })
1898 }
1899 GitStoreState::Remote {
1900 upstream_client,
1901 upstream_project_id,
1902 ..
1903 } => {
1904 if upstream_client.is_via_collab() {
1905 return Task::ready(Err(anyhow!(
1906 "Git Clone isn't supported for project guests"
1907 )));
1908 }
1909 let request = upstream_client.request(proto::GitClone {
1910 project_id: *upstream_project_id,
1911 abs_path: path.to_string_lossy().into_owned(),
1912 remote_repo: repo,
1913 });
1914
1915 cx.background_spawn(async move {
1916 let result = request.await?;
1917
1918 match result.success {
1919 true => Ok(()),
1920 false => Err(anyhow!("Git Clone failed")),
1921 }
1922 })
1923 }
1924 }
1925 }
1926
1927 async fn handle_update_repository(
1928 this: Entity<Self>,
1929 envelope: TypedEnvelope<proto::UpdateRepository>,
1930 mut cx: AsyncApp,
1931 ) -> Result<()> {
1932 this.update(&mut cx, |this, cx| {
1933 let path_style = this.worktree_store.read(cx).path_style();
1934 let mut update = envelope.payload;
1935
1936 let id = RepositoryId::from_proto(update.id);
1937 let client = this.upstream_client().context("no upstream client")?;
1938
1939 let original_repo_abs_path: Option<Arc<Path>> = update
1940 .original_repo_abs_path
1941 .as_deref()
1942 .map(|p| Path::new(p).into());
1943
1944 let mut repo_subscription = None;
1945 let repo = this.repositories.entry(id).or_insert_with(|| {
1946 let git_store = cx.weak_entity();
1947 let repo = cx.new(|cx| {
1948 Repository::remote(
1949 id,
1950 Path::new(&update.abs_path).into(),
1951 original_repo_abs_path.clone(),
1952 path_style,
1953 ProjectId(update.project_id),
1954 client,
1955 git_store,
1956 cx,
1957 )
1958 });
1959 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1960 cx.emit(GitStoreEvent::RepositoryAdded);
1961 repo
1962 });
1963 this._subscriptions.extend(repo_subscription);
1964
1965 repo.update(cx, {
1966 let update = update.clone();
1967 |repo, cx| repo.apply_remote_update(update, cx)
1968 })?;
1969
1970 this.active_repo_id.get_or_insert_with(|| {
1971 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1972 id
1973 });
1974
1975 if let Some((client, project_id)) = this.downstream_client() {
1976 update.project_id = project_id.to_proto();
1977 client.send(update).log_err();
1978 }
1979 Ok(())
1980 })
1981 }
1982
1983 async fn handle_remove_repository(
1984 this: Entity<Self>,
1985 envelope: TypedEnvelope<proto::RemoveRepository>,
1986 mut cx: AsyncApp,
1987 ) -> Result<()> {
1988 this.update(&mut cx, |this, cx| {
1989 let mut update = envelope.payload;
1990 let id = RepositoryId::from_proto(update.id);
1991 this.repositories.remove(&id);
1992 if let Some((client, project_id)) = this.downstream_client() {
1993 update.project_id = project_id.to_proto();
1994 client.send(update).log_err();
1995 }
1996 if this.active_repo_id == Some(id) {
1997 this.active_repo_id = None;
1998 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1999 }
2000 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2001 });
2002 Ok(())
2003 }
2004
2005 async fn handle_git_init(
2006 this: Entity<Self>,
2007 envelope: TypedEnvelope<proto::GitInit>,
2008 cx: AsyncApp,
2009 ) -> Result<proto::Ack> {
2010 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2011 let name = envelope.payload.fallback_branch_name;
2012 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2013 .await?;
2014
2015 Ok(proto::Ack {})
2016 }
2017
2018 async fn handle_git_clone(
2019 this: Entity<Self>,
2020 envelope: TypedEnvelope<proto::GitClone>,
2021 cx: AsyncApp,
2022 ) -> Result<proto::GitCloneResponse> {
2023 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2024 let repo_name = envelope.payload.remote_repo;
2025 let result = cx
2026 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2027 .await;
2028
2029 Ok(proto::GitCloneResponse {
2030 success: result.is_ok(),
2031 })
2032 }
2033
2034 async fn handle_fetch(
2035 this: Entity<Self>,
2036 envelope: TypedEnvelope<proto::Fetch>,
2037 mut cx: AsyncApp,
2038 ) -> Result<proto::RemoteMessageResponse> {
2039 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2040 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2041 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2042 let askpass_id = envelope.payload.askpass_id;
2043
2044 let askpass = make_remote_delegate(
2045 this,
2046 envelope.payload.project_id,
2047 repository_id,
2048 askpass_id,
2049 &mut cx,
2050 );
2051
2052 let remote_output = repository_handle
2053 .update(&mut cx, |repository_handle, cx| {
2054 repository_handle.fetch(fetch_options, askpass, cx)
2055 })
2056 .await??;
2057
2058 Ok(proto::RemoteMessageResponse {
2059 stdout: remote_output.stdout,
2060 stderr: remote_output.stderr,
2061 })
2062 }
2063
2064 async fn handle_push(
2065 this: Entity<Self>,
2066 envelope: TypedEnvelope<proto::Push>,
2067 mut cx: AsyncApp,
2068 ) -> Result<proto::RemoteMessageResponse> {
2069 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2070 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2071
2072 let askpass_id = envelope.payload.askpass_id;
2073 let askpass = make_remote_delegate(
2074 this,
2075 envelope.payload.project_id,
2076 repository_id,
2077 askpass_id,
2078 &mut cx,
2079 );
2080
2081 let options = envelope
2082 .payload
2083 .options
2084 .as_ref()
2085 .map(|_| match envelope.payload.options() {
2086 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2087 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2088 });
2089
2090 let branch_name = envelope.payload.branch_name.into();
2091 let remote_branch_name = envelope.payload.remote_branch_name.into();
2092 let remote_name = envelope.payload.remote_name.into();
2093
2094 let remote_output = repository_handle
2095 .update(&mut cx, |repository_handle, cx| {
2096 repository_handle.push(
2097 branch_name,
2098 remote_branch_name,
2099 remote_name,
2100 options,
2101 askpass,
2102 cx,
2103 )
2104 })
2105 .await??;
2106 Ok(proto::RemoteMessageResponse {
2107 stdout: remote_output.stdout,
2108 stderr: remote_output.stderr,
2109 })
2110 }
2111
2112 async fn handle_pull(
2113 this: Entity<Self>,
2114 envelope: TypedEnvelope<proto::Pull>,
2115 mut cx: AsyncApp,
2116 ) -> Result<proto::RemoteMessageResponse> {
2117 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2118 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2119 let askpass_id = envelope.payload.askpass_id;
2120 let askpass = make_remote_delegate(
2121 this,
2122 envelope.payload.project_id,
2123 repository_id,
2124 askpass_id,
2125 &mut cx,
2126 );
2127
2128 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2129 let remote_name = envelope.payload.remote_name.into();
2130 let rebase = envelope.payload.rebase;
2131
2132 let remote_message = repository_handle
2133 .update(&mut cx, |repository_handle, cx| {
2134 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2135 })
2136 .await??;
2137
2138 Ok(proto::RemoteMessageResponse {
2139 stdout: remote_message.stdout,
2140 stderr: remote_message.stderr,
2141 })
2142 }
2143
2144 async fn handle_stage(
2145 this: Entity<Self>,
2146 envelope: TypedEnvelope<proto::Stage>,
2147 mut cx: AsyncApp,
2148 ) -> Result<proto::Ack> {
2149 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2150 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2151
2152 let entries = envelope
2153 .payload
2154 .paths
2155 .into_iter()
2156 .map(|path| RepoPath::new(&path))
2157 .collect::<Result<Vec<_>>>()?;
2158
2159 repository_handle
2160 .update(&mut cx, |repository_handle, cx| {
2161 repository_handle.stage_entries(entries, cx)
2162 })
2163 .await?;
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_unstage(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::Unstage>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::Ack> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174
2175 let entries = envelope
2176 .payload
2177 .paths
2178 .into_iter()
2179 .map(|path| RepoPath::new(&path))
2180 .collect::<Result<Vec<_>>>()?;
2181
2182 repository_handle
2183 .update(&mut cx, |repository_handle, cx| {
2184 repository_handle.unstage_entries(entries, cx)
2185 })
2186 .await?;
2187
2188 Ok(proto::Ack {})
2189 }
2190
2191 async fn handle_stash(
2192 this: Entity<Self>,
2193 envelope: TypedEnvelope<proto::Stash>,
2194 mut cx: AsyncApp,
2195 ) -> Result<proto::Ack> {
2196 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2197 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2198
2199 let entries = envelope
2200 .payload
2201 .paths
2202 .into_iter()
2203 .map(|path| RepoPath::new(&path))
2204 .collect::<Result<Vec<_>>>()?;
2205
2206 repository_handle
2207 .update(&mut cx, |repository_handle, cx| {
2208 repository_handle.stash_entries(entries, cx)
2209 })
2210 .await?;
2211
2212 Ok(proto::Ack {})
2213 }
2214
2215 async fn handle_stash_pop(
2216 this: Entity<Self>,
2217 envelope: TypedEnvelope<proto::StashPop>,
2218 mut cx: AsyncApp,
2219 ) -> Result<proto::Ack> {
2220 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2221 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2222 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2223
2224 repository_handle
2225 .update(&mut cx, |repository_handle, cx| {
2226 repository_handle.stash_pop(stash_index, cx)
2227 })
2228 .await?;
2229
2230 Ok(proto::Ack {})
2231 }
2232
2233 async fn handle_stash_apply(
2234 this: Entity<Self>,
2235 envelope: TypedEnvelope<proto::StashApply>,
2236 mut cx: AsyncApp,
2237 ) -> Result<proto::Ack> {
2238 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2239 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2240 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2241
2242 repository_handle
2243 .update(&mut cx, |repository_handle, cx| {
2244 repository_handle.stash_apply(stash_index, cx)
2245 })
2246 .await?;
2247
2248 Ok(proto::Ack {})
2249 }
2250
2251 async fn handle_stash_drop(
2252 this: Entity<Self>,
2253 envelope: TypedEnvelope<proto::StashDrop>,
2254 mut cx: AsyncApp,
2255 ) -> Result<proto::Ack> {
2256 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2257 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2258 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2259
2260 repository_handle
2261 .update(&mut cx, |repository_handle, cx| {
2262 repository_handle.stash_drop(stash_index, cx)
2263 })
2264 .await??;
2265
2266 Ok(proto::Ack {})
2267 }
2268
2269 async fn handle_set_index_text(
2270 this: Entity<Self>,
2271 envelope: TypedEnvelope<proto::SetIndexText>,
2272 mut cx: AsyncApp,
2273 ) -> Result<proto::Ack> {
2274 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2275 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2276 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2277
2278 repository_handle
2279 .update(&mut cx, |repository_handle, cx| {
2280 repository_handle.spawn_set_index_text_job(
2281 repo_path,
2282 envelope.payload.text,
2283 None,
2284 cx,
2285 )
2286 })
2287 .await??;
2288 Ok(proto::Ack {})
2289 }
2290
2291 async fn handle_run_hook(
2292 this: Entity<Self>,
2293 envelope: TypedEnvelope<proto::RunGitHook>,
2294 mut cx: AsyncApp,
2295 ) -> Result<proto::Ack> {
2296 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2297 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2298 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2299 repository_handle
2300 .update(&mut cx, |repository_handle, cx| {
2301 repository_handle.run_hook(hook, cx)
2302 })
2303 .await??;
2304 Ok(proto::Ack {})
2305 }
2306
2307 async fn handle_commit(
2308 this: Entity<Self>,
2309 envelope: TypedEnvelope<proto::Commit>,
2310 mut cx: AsyncApp,
2311 ) -> Result<proto::Ack> {
2312 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2313 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2314 let askpass_id = envelope.payload.askpass_id;
2315
2316 let askpass = make_remote_delegate(
2317 this,
2318 envelope.payload.project_id,
2319 repository_id,
2320 askpass_id,
2321 &mut cx,
2322 );
2323
2324 let message = SharedString::from(envelope.payload.message);
2325 let name = envelope.payload.name.map(SharedString::from);
2326 let email = envelope.payload.email.map(SharedString::from);
2327 let options = envelope.payload.options.unwrap_or_default();
2328
2329 repository_handle
2330 .update(&mut cx, |repository_handle, cx| {
2331 repository_handle.commit(
2332 message,
2333 name.zip(email),
2334 CommitOptions {
2335 amend: options.amend,
2336 signoff: options.signoff,
2337 allow_empty: options.allow_empty,
2338 },
2339 askpass,
2340 cx,
2341 )
2342 })
2343 .await??;
2344 Ok(proto::Ack {})
2345 }
2346
2347 async fn handle_get_remotes(
2348 this: Entity<Self>,
2349 envelope: TypedEnvelope<proto::GetRemotes>,
2350 mut cx: AsyncApp,
2351 ) -> Result<proto::GetRemotesResponse> {
2352 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2353 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2354
2355 let branch_name = envelope.payload.branch_name;
2356 let is_push = envelope.payload.is_push;
2357
2358 let remotes = repository_handle
2359 .update(&mut cx, |repository_handle, _| {
2360 repository_handle.get_remotes(branch_name, is_push)
2361 })
2362 .await??;
2363
2364 Ok(proto::GetRemotesResponse {
2365 remotes: remotes
2366 .into_iter()
2367 .map(|remotes| proto::get_remotes_response::Remote {
2368 name: remotes.name.to_string(),
2369 })
2370 .collect::<Vec<_>>(),
2371 })
2372 }
2373
2374 async fn handle_get_worktrees(
2375 this: Entity<Self>,
2376 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2377 mut cx: AsyncApp,
2378 ) -> Result<proto::GitWorktreesResponse> {
2379 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2380 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2381
2382 let worktrees = repository_handle
2383 .update(&mut cx, |repository_handle, _| {
2384 repository_handle.worktrees()
2385 })
2386 .await??;
2387
2388 Ok(proto::GitWorktreesResponse {
2389 worktrees: worktrees
2390 .into_iter()
2391 .map(|worktree| worktree_to_proto(&worktree))
2392 .collect::<Vec<_>>(),
2393 })
2394 }
2395
2396 async fn handle_create_worktree(
2397 this: Entity<Self>,
2398 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2399 mut cx: AsyncApp,
2400 ) -> Result<proto::Ack> {
2401 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2402 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2403 let directory = PathBuf::from(envelope.payload.directory);
2404 let name = envelope.payload.name;
2405 let commit = envelope.payload.commit;
2406
2407 repository_handle
2408 .update(&mut cx, |repository_handle, _| {
2409 repository_handle.create_worktree(name, directory, commit)
2410 })
2411 .await??;
2412
2413 Ok(proto::Ack {})
2414 }
2415
2416 async fn handle_remove_worktree(
2417 this: Entity<Self>,
2418 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2419 mut cx: AsyncApp,
2420 ) -> Result<proto::Ack> {
2421 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2422 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2423 let path = PathBuf::from(envelope.payload.path);
2424 let force = envelope.payload.force;
2425
2426 repository_handle
2427 .update(&mut cx, |repository_handle, _| {
2428 repository_handle.remove_worktree(path, force)
2429 })
2430 .await??;
2431
2432 Ok(proto::Ack {})
2433 }
2434
2435 async fn handle_rename_worktree(
2436 this: Entity<Self>,
2437 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2438 mut cx: AsyncApp,
2439 ) -> Result<proto::Ack> {
2440 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2441 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2442 let old_path = PathBuf::from(envelope.payload.old_path);
2443 let new_path = PathBuf::from(envelope.payload.new_path);
2444
2445 repository_handle
2446 .update(&mut cx, |repository_handle, _| {
2447 repository_handle.rename_worktree(old_path, new_path)
2448 })
2449 .await??;
2450
2451 Ok(proto::Ack {})
2452 }
2453
2454 async fn handle_get_branches(
2455 this: Entity<Self>,
2456 envelope: TypedEnvelope<proto::GitGetBranches>,
2457 mut cx: AsyncApp,
2458 ) -> Result<proto::GitBranchesResponse> {
2459 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2460 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2461
2462 let branches = repository_handle
2463 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2464 .await??;
2465
2466 Ok(proto::GitBranchesResponse {
2467 branches: branches
2468 .into_iter()
2469 .map(|branch| branch_to_proto(&branch))
2470 .collect::<Vec<_>>(),
2471 })
2472 }
2473 async fn handle_get_default_branch(
2474 this: Entity<Self>,
2475 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2476 mut cx: AsyncApp,
2477 ) -> Result<proto::GetDefaultBranchResponse> {
2478 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2479 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2480
2481 let branch = repository_handle
2482 .update(&mut cx, |repository_handle, _| {
2483 repository_handle.default_branch(false)
2484 })
2485 .await??
2486 .map(Into::into);
2487
2488 Ok(proto::GetDefaultBranchResponse { branch })
2489 }
2490 async fn handle_create_branch(
2491 this: Entity<Self>,
2492 envelope: TypedEnvelope<proto::GitCreateBranch>,
2493 mut cx: AsyncApp,
2494 ) -> Result<proto::Ack> {
2495 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2496 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2497 let branch_name = envelope.payload.branch_name;
2498
2499 repository_handle
2500 .update(&mut cx, |repository_handle, _| {
2501 repository_handle.create_branch(branch_name, None)
2502 })
2503 .await??;
2504
2505 Ok(proto::Ack {})
2506 }
2507
2508 async fn handle_change_branch(
2509 this: Entity<Self>,
2510 envelope: TypedEnvelope<proto::GitChangeBranch>,
2511 mut cx: AsyncApp,
2512 ) -> Result<proto::Ack> {
2513 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2514 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2515 let branch_name = envelope.payload.branch_name;
2516
2517 repository_handle
2518 .update(&mut cx, |repository_handle, _| {
2519 repository_handle.change_branch(branch_name)
2520 })
2521 .await??;
2522
2523 Ok(proto::Ack {})
2524 }
2525
2526 async fn handle_rename_branch(
2527 this: Entity<Self>,
2528 envelope: TypedEnvelope<proto::GitRenameBranch>,
2529 mut cx: AsyncApp,
2530 ) -> Result<proto::Ack> {
2531 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2532 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2533 let branch = envelope.payload.branch;
2534 let new_name = envelope.payload.new_name;
2535
2536 repository_handle
2537 .update(&mut cx, |repository_handle, _| {
2538 repository_handle.rename_branch(branch, new_name)
2539 })
2540 .await??;
2541
2542 Ok(proto::Ack {})
2543 }
2544
2545 async fn handle_create_remote(
2546 this: Entity<Self>,
2547 envelope: TypedEnvelope<proto::GitCreateRemote>,
2548 mut cx: AsyncApp,
2549 ) -> Result<proto::Ack> {
2550 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2551 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2552 let remote_name = envelope.payload.remote_name;
2553 let remote_url = envelope.payload.remote_url;
2554
2555 repository_handle
2556 .update(&mut cx, |repository_handle, _| {
2557 repository_handle.create_remote(remote_name, remote_url)
2558 })
2559 .await??;
2560
2561 Ok(proto::Ack {})
2562 }
2563
2564 async fn handle_delete_branch(
2565 this: Entity<Self>,
2566 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2567 mut cx: AsyncApp,
2568 ) -> Result<proto::Ack> {
2569 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2570 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2571 let is_remote = envelope.payload.is_remote;
2572 let branch_name = envelope.payload.branch_name;
2573
2574 repository_handle
2575 .update(&mut cx, |repository_handle, _| {
2576 repository_handle.delete_branch(is_remote, branch_name)
2577 })
2578 .await??;
2579
2580 Ok(proto::Ack {})
2581 }
2582
2583 async fn handle_remove_remote(
2584 this: Entity<Self>,
2585 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2586 mut cx: AsyncApp,
2587 ) -> Result<proto::Ack> {
2588 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2589 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2590 let remote_name = envelope.payload.remote_name;
2591
2592 repository_handle
2593 .update(&mut cx, |repository_handle, _| {
2594 repository_handle.remove_remote(remote_name)
2595 })
2596 .await??;
2597
2598 Ok(proto::Ack {})
2599 }
2600
2601 async fn handle_show(
2602 this: Entity<Self>,
2603 envelope: TypedEnvelope<proto::GitShow>,
2604 mut cx: AsyncApp,
2605 ) -> Result<proto::GitCommitDetails> {
2606 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2607 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2608
2609 let commit = repository_handle
2610 .update(&mut cx, |repository_handle, _| {
2611 repository_handle.show(envelope.payload.commit)
2612 })
2613 .await??;
2614 Ok(proto::GitCommitDetails {
2615 sha: commit.sha.into(),
2616 message: commit.message.into(),
2617 commit_timestamp: commit.commit_timestamp,
2618 author_email: commit.author_email.into(),
2619 author_name: commit.author_name.into(),
2620 })
2621 }
2622
2623 async fn handle_load_commit_diff(
2624 this: Entity<Self>,
2625 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2626 mut cx: AsyncApp,
2627 ) -> Result<proto::LoadCommitDiffResponse> {
2628 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2629 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2630
2631 let commit_diff = repository_handle
2632 .update(&mut cx, |repository_handle, _| {
2633 repository_handle.load_commit_diff(envelope.payload.commit)
2634 })
2635 .await??;
2636 Ok(proto::LoadCommitDiffResponse {
2637 files: commit_diff
2638 .files
2639 .into_iter()
2640 .map(|file| proto::CommitFile {
2641 path: file.path.to_proto(),
2642 old_text: file.old_text,
2643 new_text: file.new_text,
2644 is_binary: file.is_binary,
2645 })
2646 .collect(),
2647 })
2648 }
2649
2650 async fn handle_file_history(
2651 this: Entity<Self>,
2652 envelope: TypedEnvelope<proto::GitFileHistory>,
2653 mut cx: AsyncApp,
2654 ) -> Result<proto::GitFileHistoryResponse> {
2655 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2656 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2657 let path = RepoPath::from_proto(&envelope.payload.path)?;
2658 let skip = envelope.payload.skip as usize;
2659 let limit = envelope.payload.limit.map(|l| l as usize);
2660
2661 let file_history = repository_handle
2662 .update(&mut cx, |repository_handle, _| {
2663 repository_handle.file_history_paginated(path, skip, limit)
2664 })
2665 .await??;
2666
2667 Ok(proto::GitFileHistoryResponse {
2668 entries: file_history
2669 .entries
2670 .into_iter()
2671 .map(|entry| proto::FileHistoryEntry {
2672 sha: entry.sha.to_string(),
2673 subject: entry.subject.to_string(),
2674 message: entry.message.to_string(),
2675 commit_timestamp: entry.commit_timestamp,
2676 author_name: entry.author_name.to_string(),
2677 author_email: entry.author_email.to_string(),
2678 })
2679 .collect(),
2680 path: file_history.path.to_proto(),
2681 })
2682 }
2683
2684 async fn handle_reset(
2685 this: Entity<Self>,
2686 envelope: TypedEnvelope<proto::GitReset>,
2687 mut cx: AsyncApp,
2688 ) -> Result<proto::Ack> {
2689 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2690 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2691
2692 let mode = match envelope.payload.mode() {
2693 git_reset::ResetMode::Soft => ResetMode::Soft,
2694 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2695 };
2696
2697 repository_handle
2698 .update(&mut cx, |repository_handle, cx| {
2699 repository_handle.reset(envelope.payload.commit, mode, cx)
2700 })
2701 .await??;
2702 Ok(proto::Ack {})
2703 }
2704
2705 async fn handle_checkout_files(
2706 this: Entity<Self>,
2707 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2708 mut cx: AsyncApp,
2709 ) -> Result<proto::Ack> {
2710 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2711 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2712 let paths = envelope
2713 .payload
2714 .paths
2715 .iter()
2716 .map(|s| RepoPath::from_proto(s))
2717 .collect::<Result<Vec<_>>>()?;
2718
2719 repository_handle
2720 .update(&mut cx, |repository_handle, cx| {
2721 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2722 })
2723 .await?;
2724 Ok(proto::Ack {})
2725 }
2726
2727 async fn handle_open_commit_message_buffer(
2728 this: Entity<Self>,
2729 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2730 mut cx: AsyncApp,
2731 ) -> Result<proto::OpenBufferResponse> {
2732 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2733 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2734 let buffer = repository
2735 .update(&mut cx, |repository, cx| {
2736 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2737 })
2738 .await?;
2739
2740 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2741 this.update(&mut cx, |this, cx| {
2742 this.buffer_store.update(cx, |buffer_store, cx| {
2743 buffer_store
2744 .create_buffer_for_peer(
2745 &buffer,
2746 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2747 cx,
2748 )
2749 .detach_and_log_err(cx);
2750 })
2751 });
2752
2753 Ok(proto::OpenBufferResponse {
2754 buffer_id: buffer_id.to_proto(),
2755 })
2756 }
2757
2758 async fn handle_askpass(
2759 this: Entity<Self>,
2760 envelope: TypedEnvelope<proto::AskPassRequest>,
2761 mut cx: AsyncApp,
2762 ) -> Result<proto::AskPassResponse> {
2763 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2764 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2765
2766 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2767 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2768 debug_panic!("no askpass found");
2769 anyhow::bail!("no askpass found");
2770 };
2771
2772 let response = askpass
2773 .ask_password(envelope.payload.prompt)
2774 .await
2775 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2776
2777 delegates
2778 .lock()
2779 .insert(envelope.payload.askpass_id, askpass);
2780
2781 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2782 Ok(proto::AskPassResponse {
2783 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2784 })
2785 }
2786
2787 async fn handle_check_for_pushed_commits(
2788 this: Entity<Self>,
2789 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2790 mut cx: AsyncApp,
2791 ) -> Result<proto::CheckForPushedCommitsResponse> {
2792 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2793 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2794
2795 let branches = repository_handle
2796 .update(&mut cx, |repository_handle, _| {
2797 repository_handle.check_for_pushed_commits()
2798 })
2799 .await??;
2800 Ok(proto::CheckForPushedCommitsResponse {
2801 pushed_to: branches
2802 .into_iter()
2803 .map(|commit| commit.to_string())
2804 .collect(),
2805 })
2806 }
2807
2808 async fn handle_git_diff(
2809 this: Entity<Self>,
2810 envelope: TypedEnvelope<proto::GitDiff>,
2811 mut cx: AsyncApp,
2812 ) -> Result<proto::GitDiffResponse> {
2813 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2814 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2815 let diff_type = match envelope.payload.diff_type() {
2816 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2817 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2818 proto::git_diff::DiffType::MergeBase => {
2819 let base_ref = envelope
2820 .payload
2821 .merge_base_ref
2822 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2823 DiffType::MergeBase {
2824 base_ref: base_ref.into(),
2825 }
2826 }
2827 };
2828
2829 let mut diff = repository_handle
2830 .update(&mut cx, |repository_handle, cx| {
2831 repository_handle.diff(diff_type, cx)
2832 })
2833 .await??;
2834 const ONE_MB: usize = 1_000_000;
2835 if diff.len() > ONE_MB {
2836 diff = diff.chars().take(ONE_MB).collect()
2837 }
2838
2839 Ok(proto::GitDiffResponse { diff })
2840 }
2841
2842 async fn handle_tree_diff(
2843 this: Entity<Self>,
2844 request: TypedEnvelope<proto::GetTreeDiff>,
2845 mut cx: AsyncApp,
2846 ) -> Result<proto::GetTreeDiffResponse> {
2847 let repository_id = RepositoryId(request.payload.repository_id);
2848 let diff_type = if request.payload.is_merge {
2849 DiffTreeType::MergeBase {
2850 base: request.payload.base.into(),
2851 head: request.payload.head.into(),
2852 }
2853 } else {
2854 DiffTreeType::Since {
2855 base: request.payload.base.into(),
2856 head: request.payload.head.into(),
2857 }
2858 };
2859
2860 let diff = this
2861 .update(&mut cx, |this, cx| {
2862 let repository = this.repositories().get(&repository_id)?;
2863 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2864 })
2865 .context("missing repository")?
2866 .await??;
2867
2868 Ok(proto::GetTreeDiffResponse {
2869 entries: diff
2870 .entries
2871 .into_iter()
2872 .map(|(path, status)| proto::TreeDiffStatus {
2873 path: path.as_ref().to_proto(),
2874 status: match status {
2875 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2876 TreeDiffStatus::Modified { .. } => {
2877 proto::tree_diff_status::Status::Modified.into()
2878 }
2879 TreeDiffStatus::Deleted { .. } => {
2880 proto::tree_diff_status::Status::Deleted.into()
2881 }
2882 },
2883 oid: match status {
2884 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2885 Some(old.to_string())
2886 }
2887 TreeDiffStatus::Added => None,
2888 },
2889 })
2890 .collect(),
2891 })
2892 }
2893
2894 async fn handle_get_blob_content(
2895 this: Entity<Self>,
2896 request: TypedEnvelope<proto::GetBlobContent>,
2897 mut cx: AsyncApp,
2898 ) -> Result<proto::GetBlobContentResponse> {
2899 let oid = git::Oid::from_str(&request.payload.oid)?;
2900 let repository_id = RepositoryId(request.payload.repository_id);
2901 let content = this
2902 .update(&mut cx, |this, cx| {
2903 let repository = this.repositories().get(&repository_id)?;
2904 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2905 })
2906 .context("missing repository")?
2907 .await?;
2908 Ok(proto::GetBlobContentResponse { content })
2909 }
2910
2911 async fn handle_open_unstaged_diff(
2912 this: Entity<Self>,
2913 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2914 mut cx: AsyncApp,
2915 ) -> Result<proto::OpenUnstagedDiffResponse> {
2916 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2917 let diff = this
2918 .update(&mut cx, |this, cx| {
2919 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2920 Some(this.open_unstaged_diff(buffer, cx))
2921 })
2922 .context("missing buffer")?
2923 .await?;
2924 this.update(&mut cx, |this, _| {
2925 let shared_diffs = this
2926 .shared_diffs
2927 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2928 .or_default();
2929 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2930 });
2931 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2932 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2933 }
2934
2935 async fn handle_open_uncommitted_diff(
2936 this: Entity<Self>,
2937 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2938 mut cx: AsyncApp,
2939 ) -> Result<proto::OpenUncommittedDiffResponse> {
2940 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2941 let diff = this
2942 .update(&mut cx, |this, cx| {
2943 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2944 Some(this.open_uncommitted_diff(buffer, cx))
2945 })
2946 .context("missing buffer")?
2947 .await?;
2948 this.update(&mut cx, |this, _| {
2949 let shared_diffs = this
2950 .shared_diffs
2951 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2952 .or_default();
2953 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2954 });
2955 Ok(diff.read_with(&cx, |diff, cx| {
2956 use proto::open_uncommitted_diff_response::Mode;
2957
2958 let unstaged_diff = diff.secondary_diff();
2959 let index_snapshot = unstaged_diff.and_then(|diff| {
2960 let diff = diff.read(cx);
2961 diff.base_text_exists().then(|| diff.base_text(cx))
2962 });
2963
2964 let mode;
2965 let staged_text;
2966 let committed_text;
2967 if diff.base_text_exists() {
2968 let committed_snapshot = diff.base_text(cx);
2969 committed_text = Some(committed_snapshot.text());
2970 if let Some(index_text) = index_snapshot {
2971 if index_text.remote_id() == committed_snapshot.remote_id() {
2972 mode = Mode::IndexMatchesHead;
2973 staged_text = None;
2974 } else {
2975 mode = Mode::IndexAndHead;
2976 staged_text = Some(index_text.text());
2977 }
2978 } else {
2979 mode = Mode::IndexAndHead;
2980 staged_text = None;
2981 }
2982 } else {
2983 mode = Mode::IndexAndHead;
2984 committed_text = None;
2985 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2986 }
2987
2988 proto::OpenUncommittedDiffResponse {
2989 committed_text,
2990 staged_text,
2991 mode: mode.into(),
2992 }
2993 }))
2994 }
2995
2996 async fn handle_update_diff_bases(
2997 this: Entity<Self>,
2998 request: TypedEnvelope<proto::UpdateDiffBases>,
2999 mut cx: AsyncApp,
3000 ) -> Result<()> {
3001 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3002 this.update(&mut cx, |this, cx| {
3003 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3004 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3005 {
3006 let buffer = buffer.read(cx).text_snapshot();
3007 diff_state.update(cx, |diff_state, cx| {
3008 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3009 })
3010 }
3011 });
3012 Ok(())
3013 }
3014
3015 async fn handle_blame_buffer(
3016 this: Entity<Self>,
3017 envelope: TypedEnvelope<proto::BlameBuffer>,
3018 mut cx: AsyncApp,
3019 ) -> Result<proto::BlameBufferResponse> {
3020 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3021 let version = deserialize_version(&envelope.payload.version);
3022 let buffer = this.read_with(&cx, |this, cx| {
3023 this.buffer_store.read(cx).get_existing(buffer_id)
3024 })?;
3025 buffer
3026 .update(&mut cx, |buffer, _| {
3027 buffer.wait_for_version(version.clone())
3028 })
3029 .await?;
3030 let blame = this
3031 .update(&mut cx, |this, cx| {
3032 this.blame_buffer(&buffer, Some(version), cx)
3033 })
3034 .await?;
3035 Ok(serialize_blame_buffer_response(blame))
3036 }
3037
3038 async fn handle_get_permalink_to_line(
3039 this: Entity<Self>,
3040 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3041 mut cx: AsyncApp,
3042 ) -> Result<proto::GetPermalinkToLineResponse> {
3043 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3044 // let version = deserialize_version(&envelope.payload.version);
3045 let selection = {
3046 let proto_selection = envelope
3047 .payload
3048 .selection
3049 .context("no selection to get permalink for defined")?;
3050 proto_selection.start as u32..proto_selection.end as u32
3051 };
3052 let buffer = this.read_with(&cx, |this, cx| {
3053 this.buffer_store.read(cx).get_existing(buffer_id)
3054 })?;
3055 let permalink = this
3056 .update(&mut cx, |this, cx| {
3057 this.get_permalink_to_line(&buffer, selection, cx)
3058 })
3059 .await?;
3060 Ok(proto::GetPermalinkToLineResponse {
3061 permalink: permalink.to_string(),
3062 })
3063 }
3064
3065 fn repository_for_request(
3066 this: &Entity<Self>,
3067 id: RepositoryId,
3068 cx: &mut AsyncApp,
3069 ) -> Result<Entity<Repository>> {
3070 this.read_with(cx, |this, _| {
3071 this.repositories
3072 .get(&id)
3073 .context("missing repository handle")
3074 .cloned()
3075 })
3076 }
3077
3078 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3079 self.repositories
3080 .iter()
3081 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3082 .collect()
3083 }
3084
3085 fn process_updated_entries(
3086 &self,
3087 worktree: &Entity<Worktree>,
3088 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3089 cx: &mut App,
3090 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3091 let path_style = worktree.read(cx).path_style();
3092 let mut repo_paths = self
3093 .repositories
3094 .values()
3095 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3096 .collect::<Vec<_>>();
3097 let mut entries: Vec<_> = updated_entries
3098 .iter()
3099 .map(|(path, _, _)| path.clone())
3100 .collect();
3101 entries.sort();
3102 let worktree = worktree.read(cx);
3103
3104 let entries = entries
3105 .into_iter()
3106 .map(|path| worktree.absolutize(&path))
3107 .collect::<Arc<[_]>>();
3108
3109 let executor = cx.background_executor().clone();
3110 cx.background_executor().spawn(async move {
3111 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3112 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3113 let mut tasks = FuturesOrdered::new();
3114 for (repo_path, repo) in repo_paths.into_iter().rev() {
3115 let entries = entries.clone();
3116 let task = executor.spawn(async move {
3117 // Find all repository paths that belong to this repo
3118 let mut ix = entries.partition_point(|path| path < &*repo_path);
3119 if ix == entries.len() {
3120 return None;
3121 };
3122
3123 let mut paths = Vec::new();
3124 // All paths prefixed by a given repo will constitute a continuous range.
3125 while let Some(path) = entries.get(ix)
3126 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3127 &repo_path, path, path_style,
3128 )
3129 {
3130 paths.push((repo_path, ix));
3131 ix += 1;
3132 }
3133 if paths.is_empty() {
3134 None
3135 } else {
3136 Some((repo, paths))
3137 }
3138 });
3139 tasks.push_back(task);
3140 }
3141
3142 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3143 let mut path_was_used = vec![false; entries.len()];
3144 let tasks = tasks.collect::<Vec<_>>().await;
3145 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3146 // We always want to assign a path to it's innermost repository.
3147 for t in tasks {
3148 let Some((repo, paths)) = t else {
3149 continue;
3150 };
3151 let entry = paths_by_git_repo.entry(repo).or_default();
3152 for (repo_path, ix) in paths {
3153 if path_was_used[ix] {
3154 continue;
3155 }
3156 path_was_used[ix] = true;
3157 entry.push(repo_path);
3158 }
3159 }
3160
3161 paths_by_git_repo
3162 })
3163 }
3164}
3165
3166impl BufferGitState {
3167 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3168 Self {
3169 unstaged_diff: Default::default(),
3170 uncommitted_diff: Default::default(),
3171 oid_diffs: Default::default(),
3172 recalculate_diff_task: Default::default(),
3173 language: Default::default(),
3174 language_registry: Default::default(),
3175 recalculating_tx: postage::watch::channel_with(false).0,
3176 hunk_staging_operation_count: 0,
3177 hunk_staging_operation_count_as_of_write: 0,
3178 head_text: Default::default(),
3179 index_text: Default::default(),
3180 oid_texts: Default::default(),
3181 head_changed: Default::default(),
3182 index_changed: Default::default(),
3183 language_changed: Default::default(),
3184 conflict_updated_futures: Default::default(),
3185 conflict_set: Default::default(),
3186 reparse_conflict_markers_task: Default::default(),
3187 }
3188 }
3189
3190 #[ztracing::instrument(skip_all)]
3191 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3192 self.language = buffer.read(cx).language().cloned();
3193 self.language_changed = true;
3194 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3195 }
3196
3197 fn reparse_conflict_markers(
3198 &mut self,
3199 buffer: text::BufferSnapshot,
3200 cx: &mut Context<Self>,
3201 ) -> oneshot::Receiver<()> {
3202 let (tx, rx) = oneshot::channel();
3203
3204 let Some(conflict_set) = self
3205 .conflict_set
3206 .as_ref()
3207 .and_then(|conflict_set| conflict_set.upgrade())
3208 else {
3209 return rx;
3210 };
3211
3212 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3213 if conflict_set.has_conflict {
3214 Some(conflict_set.snapshot())
3215 } else {
3216 None
3217 }
3218 });
3219
3220 if let Some(old_snapshot) = old_snapshot {
3221 self.conflict_updated_futures.push(tx);
3222 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3223 let (snapshot, changed_range) = cx
3224 .background_spawn(async move {
3225 let new_snapshot = ConflictSet::parse(&buffer);
3226 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3227 (new_snapshot, changed_range)
3228 })
3229 .await;
3230 this.update(cx, |this, cx| {
3231 if let Some(conflict_set) = &this.conflict_set {
3232 conflict_set
3233 .update(cx, |conflict_set, cx| {
3234 conflict_set.set_snapshot(snapshot, changed_range, cx);
3235 })
3236 .ok();
3237 }
3238 let futures = std::mem::take(&mut this.conflict_updated_futures);
3239 for tx in futures {
3240 tx.send(()).ok();
3241 }
3242 })
3243 }))
3244 }
3245
3246 rx
3247 }
3248
3249 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3250 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3251 }
3252
3253 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3254 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3255 }
3256
3257 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3258 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3259 }
3260
3261 fn handle_base_texts_updated(
3262 &mut self,
3263 buffer: text::BufferSnapshot,
3264 message: proto::UpdateDiffBases,
3265 cx: &mut Context<Self>,
3266 ) {
3267 use proto::update_diff_bases::Mode;
3268
3269 let Some(mode) = Mode::from_i32(message.mode) else {
3270 return;
3271 };
3272
3273 let diff_bases_change = match mode {
3274 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3275 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3276 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3277 Mode::IndexAndHead => DiffBasesChange::SetEach {
3278 index: message.staged_text,
3279 head: message.committed_text,
3280 },
3281 };
3282
3283 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3284 }
3285
3286 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3287 if *self.recalculating_tx.borrow() {
3288 let mut rx = self.recalculating_tx.subscribe();
3289 Some(async move {
3290 loop {
3291 let is_recalculating = rx.recv().await;
3292 if is_recalculating != Some(true) {
3293 break;
3294 }
3295 }
3296 })
3297 } else {
3298 None
3299 }
3300 }
3301
3302 fn diff_bases_changed(
3303 &mut self,
3304 buffer: text::BufferSnapshot,
3305 diff_bases_change: Option<DiffBasesChange>,
3306 cx: &mut Context<Self>,
3307 ) {
3308 match diff_bases_change {
3309 Some(DiffBasesChange::SetIndex(index)) => {
3310 self.index_text = index.map(|mut index| {
3311 text::LineEnding::normalize(&mut index);
3312 Arc::from(index.as_str())
3313 });
3314 self.index_changed = true;
3315 }
3316 Some(DiffBasesChange::SetHead(head)) => {
3317 self.head_text = head.map(|mut head| {
3318 text::LineEnding::normalize(&mut head);
3319 Arc::from(head.as_str())
3320 });
3321 self.head_changed = true;
3322 }
3323 Some(DiffBasesChange::SetBoth(text)) => {
3324 let text = text.map(|mut text| {
3325 text::LineEnding::normalize(&mut text);
3326 Arc::from(text.as_str())
3327 });
3328 self.head_text = text.clone();
3329 self.index_text = text;
3330 self.head_changed = true;
3331 self.index_changed = true;
3332 }
3333 Some(DiffBasesChange::SetEach { index, head }) => {
3334 self.index_text = index.map(|mut index| {
3335 text::LineEnding::normalize(&mut index);
3336 Arc::from(index.as_str())
3337 });
3338 self.index_changed = true;
3339 self.head_text = head.map(|mut head| {
3340 text::LineEnding::normalize(&mut head);
3341 Arc::from(head.as_str())
3342 });
3343 self.head_changed = true;
3344 }
3345 None => {}
3346 }
3347
3348 self.recalculate_diffs(buffer, cx)
3349 }
3350
3351 #[ztracing::instrument(skip_all)]
3352 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3353 *self.recalculating_tx.borrow_mut() = true;
3354
3355 let language = self.language.clone();
3356 let language_registry = self.language_registry.clone();
3357 let unstaged_diff = self.unstaged_diff();
3358 let uncommitted_diff = self.uncommitted_diff();
3359 let head = self.head_text.clone();
3360 let index = self.index_text.clone();
3361 let index_changed = self.index_changed;
3362 let head_changed = self.head_changed;
3363 let language_changed = self.language_changed;
3364 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3365 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3366 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3367 (None, None) => true,
3368 _ => false,
3369 };
3370
3371 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3372 .oid_diffs
3373 .iter()
3374 .filter_map(|(oid, weak)| {
3375 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3376 weak.upgrade().map(|diff| (*oid, diff, base_text))
3377 })
3378 .collect();
3379
3380 self.oid_diffs.retain(|oid, weak| {
3381 let alive = weak.upgrade().is_some();
3382 if !alive {
3383 if let Some(oid) = oid {
3384 self.oid_texts.remove(oid);
3385 }
3386 }
3387 alive
3388 });
3389 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3390 log::debug!(
3391 "start recalculating diffs for buffer {}",
3392 buffer.remote_id()
3393 );
3394
3395 let mut new_unstaged_diff = None;
3396 if let Some(unstaged_diff) = &unstaged_diff {
3397 new_unstaged_diff = Some(
3398 cx.update(|cx| {
3399 unstaged_diff.read(cx).update_diff(
3400 buffer.clone(),
3401 index,
3402 index_changed.then_some(false),
3403 language.clone(),
3404 cx,
3405 )
3406 })
3407 .await,
3408 );
3409 }
3410
3411 // Dropping BufferDiff can be expensive, so yield back to the event loop
3412 // for a bit
3413 yield_now().await;
3414
3415 let mut new_uncommitted_diff = None;
3416 if let Some(uncommitted_diff) = &uncommitted_diff {
3417 new_uncommitted_diff = if index_matches_head {
3418 new_unstaged_diff.clone()
3419 } else {
3420 Some(
3421 cx.update(|cx| {
3422 uncommitted_diff.read(cx).update_diff(
3423 buffer.clone(),
3424 head,
3425 head_changed.then_some(true),
3426 language.clone(),
3427 cx,
3428 )
3429 })
3430 .await,
3431 )
3432 }
3433 }
3434
3435 // Dropping BufferDiff can be expensive, so yield back to the event loop
3436 // for a bit
3437 yield_now().await;
3438
3439 let cancel = this.update(cx, |this, _| {
3440 // This checks whether all pending stage/unstage operations
3441 // have quiesced (i.e. both the corresponding write and the
3442 // read of that write have completed). If not, then we cancel
3443 // this recalculation attempt to avoid invalidating pending
3444 // state too quickly; another recalculation will come along
3445 // later and clear the pending state once the state of the index has settled.
3446 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3447 *this.recalculating_tx.borrow_mut() = false;
3448 true
3449 } else {
3450 false
3451 }
3452 })?;
3453 if cancel {
3454 log::debug!(
3455 concat!(
3456 "aborting recalculating diffs for buffer {}",
3457 "due to subsequent hunk operations",
3458 ),
3459 buffer.remote_id()
3460 );
3461 return Ok(());
3462 }
3463
3464 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3465 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3466 {
3467 let task = unstaged_diff.update(cx, |diff, cx| {
3468 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3469 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3470 // view multibuffers.
3471 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3472 });
3473 Some(task.await)
3474 } else {
3475 None
3476 };
3477
3478 yield_now().await;
3479
3480 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3481 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3482 {
3483 uncommitted_diff
3484 .update(cx, |diff, cx| {
3485 if language_changed {
3486 diff.language_changed(language.clone(), language_registry, cx);
3487 }
3488 diff.set_snapshot_with_secondary(
3489 new_uncommitted_diff,
3490 &buffer,
3491 unstaged_changed_range.flatten(),
3492 true,
3493 cx,
3494 )
3495 })
3496 .await;
3497 }
3498
3499 yield_now().await;
3500
3501 for (oid, oid_diff, base_text) in oid_diffs {
3502 let new_oid_diff = cx
3503 .update(|cx| {
3504 oid_diff.read(cx).update_diff(
3505 buffer.clone(),
3506 base_text,
3507 None,
3508 language.clone(),
3509 cx,
3510 )
3511 })
3512 .await;
3513
3514 oid_diff
3515 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3516 .await;
3517
3518 log::debug!(
3519 "finished recalculating oid diff for buffer {} oid {:?}",
3520 buffer.remote_id(),
3521 oid
3522 );
3523
3524 yield_now().await;
3525 }
3526
3527 log::debug!(
3528 "finished recalculating diffs for buffer {}",
3529 buffer.remote_id()
3530 );
3531
3532 if let Some(this) = this.upgrade() {
3533 this.update(cx, |this, _| {
3534 this.index_changed = false;
3535 this.head_changed = false;
3536 this.language_changed = false;
3537 *this.recalculating_tx.borrow_mut() = false;
3538 });
3539 }
3540
3541 Ok(())
3542 }));
3543 }
3544}
3545
3546fn make_remote_delegate(
3547 this: Entity<GitStore>,
3548 project_id: u64,
3549 repository_id: RepositoryId,
3550 askpass_id: u64,
3551 cx: &mut AsyncApp,
3552) -> AskPassDelegate {
3553 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3554 this.update(cx, |this, cx| {
3555 let Some((client, _)) = this.downstream_client() else {
3556 return;
3557 };
3558 let response = client.request(proto::AskPassRequest {
3559 project_id,
3560 repository_id: repository_id.to_proto(),
3561 askpass_id,
3562 prompt,
3563 });
3564 cx.spawn(async move |_, _| {
3565 let mut response = response.await?.response;
3566 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3567 .ok();
3568 response.zeroize();
3569 anyhow::Ok(())
3570 })
3571 .detach_and_log_err(cx);
3572 });
3573 })
3574}
3575
3576impl RepositoryId {
3577 pub fn to_proto(self) -> u64 {
3578 self.0
3579 }
3580
3581 pub fn from_proto(id: u64) -> Self {
3582 RepositoryId(id)
3583 }
3584}
3585
3586impl RepositorySnapshot {
3587 fn empty(
3588 id: RepositoryId,
3589 work_directory_abs_path: Arc<Path>,
3590 original_repo_abs_path: Option<Arc<Path>>,
3591 path_style: PathStyle,
3592 ) -> Self {
3593 Self {
3594 id,
3595 statuses_by_path: Default::default(),
3596 original_repo_abs_path: original_repo_abs_path
3597 .unwrap_or_else(|| work_directory_abs_path.clone()),
3598 work_directory_abs_path,
3599 branch: None,
3600 head_commit: None,
3601 scan_id: 0,
3602 merge: Default::default(),
3603 remote_origin_url: None,
3604 remote_upstream_url: None,
3605 stash_entries: Default::default(),
3606 linked_worktrees: Arc::from([]),
3607 path_style,
3608 }
3609 }
3610
3611 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3612 proto::UpdateRepository {
3613 branch_summary: self.branch.as_ref().map(branch_to_proto),
3614 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3615 updated_statuses: self
3616 .statuses_by_path
3617 .iter()
3618 .map(|entry| entry.to_proto())
3619 .collect(),
3620 removed_statuses: Default::default(),
3621 current_merge_conflicts: self
3622 .merge
3623 .merge_heads_by_conflicted_path
3624 .iter()
3625 .map(|(repo_path, _)| repo_path.to_proto())
3626 .collect(),
3627 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3628 project_id,
3629 id: self.id.to_proto(),
3630 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3631 entry_ids: vec![self.id.to_proto()],
3632 scan_id: self.scan_id,
3633 is_last_update: true,
3634 stash_entries: self
3635 .stash_entries
3636 .entries
3637 .iter()
3638 .map(stash_to_proto)
3639 .collect(),
3640 remote_upstream_url: self.remote_upstream_url.clone(),
3641 remote_origin_url: self.remote_origin_url.clone(),
3642 original_repo_abs_path: Some(
3643 self.original_repo_abs_path.to_string_lossy().into_owned(),
3644 ),
3645 linked_worktrees: self
3646 .linked_worktrees
3647 .iter()
3648 .map(worktree_to_proto)
3649 .collect(),
3650 }
3651 }
3652
3653 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3654 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3655 let mut removed_statuses: Vec<String> = Vec::new();
3656
3657 let mut new_statuses = self.statuses_by_path.iter().peekable();
3658 let mut old_statuses = old.statuses_by_path.iter().peekable();
3659
3660 let mut current_new_entry = new_statuses.next();
3661 let mut current_old_entry = old_statuses.next();
3662 loop {
3663 match (current_new_entry, current_old_entry) {
3664 (Some(new_entry), Some(old_entry)) => {
3665 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3666 Ordering::Less => {
3667 updated_statuses.push(new_entry.to_proto());
3668 current_new_entry = new_statuses.next();
3669 }
3670 Ordering::Equal => {
3671 if new_entry.status != old_entry.status
3672 || new_entry.diff_stat != old_entry.diff_stat
3673 {
3674 updated_statuses.push(new_entry.to_proto());
3675 }
3676 current_old_entry = old_statuses.next();
3677 current_new_entry = new_statuses.next();
3678 }
3679 Ordering::Greater => {
3680 removed_statuses.push(old_entry.repo_path.to_proto());
3681 current_old_entry = old_statuses.next();
3682 }
3683 }
3684 }
3685 (None, Some(old_entry)) => {
3686 removed_statuses.push(old_entry.repo_path.to_proto());
3687 current_old_entry = old_statuses.next();
3688 }
3689 (Some(new_entry), None) => {
3690 updated_statuses.push(new_entry.to_proto());
3691 current_new_entry = new_statuses.next();
3692 }
3693 (None, None) => break,
3694 }
3695 }
3696
3697 proto::UpdateRepository {
3698 branch_summary: self.branch.as_ref().map(branch_to_proto),
3699 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3700 updated_statuses,
3701 removed_statuses,
3702 current_merge_conflicts: self
3703 .merge
3704 .merge_heads_by_conflicted_path
3705 .iter()
3706 .map(|(path, _)| path.to_proto())
3707 .collect(),
3708 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3709 project_id,
3710 id: self.id.to_proto(),
3711 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3712 entry_ids: vec![],
3713 scan_id: self.scan_id,
3714 is_last_update: true,
3715 stash_entries: self
3716 .stash_entries
3717 .entries
3718 .iter()
3719 .map(stash_to_proto)
3720 .collect(),
3721 remote_upstream_url: self.remote_upstream_url.clone(),
3722 remote_origin_url: self.remote_origin_url.clone(),
3723 original_repo_abs_path: Some(
3724 self.original_repo_abs_path.to_string_lossy().into_owned(),
3725 ),
3726 linked_worktrees: self
3727 .linked_worktrees
3728 .iter()
3729 .map(worktree_to_proto)
3730 .collect(),
3731 }
3732 }
3733
3734 /// The main worktree is the original checkout that other worktrees were
3735 /// created from.
3736 ///
3737 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3738 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3739 ///
3740 /// Submodules also return `true` here, since they are not linked worktrees.
3741 pub fn is_main_worktree(&self) -> bool {
3742 self.work_directory_abs_path == self.original_repo_abs_path
3743 }
3744
3745 /// Returns true if this repository is a linked worktree, that is, one that
3746 /// was created from another worktree.
3747 ///
3748 /// Returns `false` for both the main worktree and submodules.
3749 pub fn is_linked_worktree(&self) -> bool {
3750 !self.is_main_worktree()
3751 }
3752
3753 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3754 &self.linked_worktrees
3755 }
3756
3757 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3758 self.statuses_by_path.iter().cloned()
3759 }
3760
3761 pub fn status_summary(&self) -> GitSummary {
3762 self.statuses_by_path.summary().item_summary
3763 }
3764
3765 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3766 self.statuses_by_path
3767 .get(&PathKey(path.as_ref().clone()), ())
3768 .cloned()
3769 }
3770
3771 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3772 self.statuses_by_path
3773 .get(&PathKey(path.as_ref().clone()), ())
3774 .and_then(|entry| entry.diff_stat)
3775 }
3776
3777 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3778 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3779 }
3780
3781 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3782 self.path_style
3783 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3784 .unwrap()
3785 .into()
3786 }
3787
3788 #[inline]
3789 fn abs_path_to_repo_path_inner(
3790 work_directory_abs_path: &Path,
3791 abs_path: &Path,
3792 path_style: PathStyle,
3793 ) -> Option<RepoPath> {
3794 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3795 Some(RepoPath::from_rel_path(&rel_path))
3796 }
3797
3798 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3799 self.merge
3800 .merge_heads_by_conflicted_path
3801 .contains_key(repo_path)
3802 }
3803
3804 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3805 let had_conflict_on_last_merge_head_change = self
3806 .merge
3807 .merge_heads_by_conflicted_path
3808 .contains_key(repo_path);
3809 let has_conflict_currently = self
3810 .status_for_path(repo_path)
3811 .is_some_and(|entry| entry.status.is_conflicted());
3812 had_conflict_on_last_merge_head_change || has_conflict_currently
3813 }
3814
3815 /// This is the name that will be displayed in the repository selector for this repository.
3816 pub fn display_name(&self) -> SharedString {
3817 self.work_directory_abs_path
3818 .file_name()
3819 .unwrap_or_default()
3820 .to_string_lossy()
3821 .to_string()
3822 .into()
3823 }
3824}
3825
3826pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3827 proto::StashEntry {
3828 oid: entry.oid.as_bytes().to_vec(),
3829 message: entry.message.clone(),
3830 branch: entry.branch.clone(),
3831 index: entry.index as u64,
3832 timestamp: entry.timestamp,
3833 }
3834}
3835
3836pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3837 Ok(StashEntry {
3838 oid: Oid::from_bytes(&entry.oid)?,
3839 message: entry.message.clone(),
3840 index: entry.index as usize,
3841 branch: entry.branch.clone(),
3842 timestamp: entry.timestamp,
3843 })
3844}
3845
3846impl MergeDetails {
3847 async fn update(
3848 &mut self,
3849 backend: &Arc<dyn GitRepository>,
3850 current_conflicted_paths: Vec<RepoPath>,
3851 ) -> Result<bool> {
3852 log::debug!("load merge details");
3853 self.message = backend.merge_message().await.map(SharedString::from);
3854 let heads = backend
3855 .revparse_batch(vec![
3856 "MERGE_HEAD".into(),
3857 "CHERRY_PICK_HEAD".into(),
3858 "REBASE_HEAD".into(),
3859 "REVERT_HEAD".into(),
3860 "APPLY_HEAD".into(),
3861 ])
3862 .await
3863 .log_err()
3864 .unwrap_or_default()
3865 .into_iter()
3866 .map(|opt| opt.map(SharedString::from))
3867 .collect::<Vec<_>>();
3868
3869 let mut conflicts_changed = false;
3870
3871 // Record the merge state for newly conflicted paths
3872 for path in ¤t_conflicted_paths {
3873 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3874 conflicts_changed = true;
3875 self.merge_heads_by_conflicted_path
3876 .insert(path.clone(), heads.clone());
3877 }
3878 }
3879
3880 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3881 self.merge_heads_by_conflicted_path
3882 .retain(|path, old_merge_heads| {
3883 let keep = current_conflicted_paths.contains(path)
3884 || (old_merge_heads == &heads
3885 && old_merge_heads.iter().any(|head| head.is_some()));
3886 if !keep {
3887 conflicts_changed = true;
3888 }
3889 keep
3890 });
3891
3892 Ok(conflicts_changed)
3893 }
3894}
3895
3896impl Repository {
3897 pub fn is_trusted(&self) -> bool {
3898 match self.repository_state.peek() {
3899 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3900 _ => false,
3901 }
3902 }
3903
3904 pub fn snapshot(&self) -> RepositorySnapshot {
3905 self.snapshot.clone()
3906 }
3907
3908 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3909 self.pending_ops.iter().cloned()
3910 }
3911
3912 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3913 self.pending_ops.summary().clone()
3914 }
3915
3916 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3917 self.pending_ops
3918 .get(&PathKey(path.as_ref().clone()), ())
3919 .cloned()
3920 }
3921
3922 fn local(
3923 id: RepositoryId,
3924 work_directory_abs_path: Arc<Path>,
3925 original_repo_abs_path: Arc<Path>,
3926 dot_git_abs_path: Arc<Path>,
3927 project_environment: WeakEntity<ProjectEnvironment>,
3928 fs: Arc<dyn Fs>,
3929 is_trusted: bool,
3930 git_store: WeakEntity<GitStore>,
3931 cx: &mut Context<Self>,
3932 ) -> Self {
3933 let snapshot = RepositorySnapshot::empty(
3934 id,
3935 work_directory_abs_path.clone(),
3936 Some(original_repo_abs_path),
3937 PathStyle::local(),
3938 );
3939 let state = cx
3940 .spawn(async move |_, cx| {
3941 LocalRepositoryState::new(
3942 work_directory_abs_path,
3943 dot_git_abs_path,
3944 project_environment,
3945 fs,
3946 is_trusted,
3947 cx,
3948 )
3949 .await
3950 .map_err(|err| err.to_string())
3951 })
3952 .shared();
3953 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3954 let state = cx
3955 .spawn(async move |_, _| {
3956 let state = state.await?;
3957 Ok(RepositoryState::Local(state))
3958 })
3959 .shared();
3960
3961 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3962 RepositoryEvent::BranchChanged => {
3963 if this.scan_id > 1 {
3964 this.initial_graph_data.clear();
3965 }
3966 }
3967 _ => {}
3968 })
3969 .detach();
3970
3971 Repository {
3972 this: cx.weak_entity(),
3973 git_store,
3974 snapshot,
3975 pending_ops: Default::default(),
3976 repository_state: state,
3977 commit_message_buffer: None,
3978 askpass_delegates: Default::default(),
3979 paths_needing_status_update: Default::default(),
3980 latest_askpass_id: 0,
3981 job_sender,
3982 job_id: 0,
3983 active_jobs: Default::default(),
3984 initial_graph_data: Default::default(),
3985 commit_data: Default::default(),
3986 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3987 }
3988 }
3989
3990 fn remote(
3991 id: RepositoryId,
3992 work_directory_abs_path: Arc<Path>,
3993 original_repo_abs_path: Option<Arc<Path>>,
3994 path_style: PathStyle,
3995 project_id: ProjectId,
3996 client: AnyProtoClient,
3997 git_store: WeakEntity<GitStore>,
3998 cx: &mut Context<Self>,
3999 ) -> Self {
4000 let snapshot = RepositorySnapshot::empty(
4001 id,
4002 work_directory_abs_path,
4003 original_repo_abs_path,
4004 path_style,
4005 );
4006 let repository_state = RemoteRepositoryState { project_id, client };
4007 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4008 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4009 Self {
4010 this: cx.weak_entity(),
4011 snapshot,
4012 commit_message_buffer: None,
4013 git_store,
4014 pending_ops: Default::default(),
4015 paths_needing_status_update: Default::default(),
4016 job_sender,
4017 repository_state,
4018 askpass_delegates: Default::default(),
4019 latest_askpass_id: 0,
4020 active_jobs: Default::default(),
4021 job_id: 0,
4022 initial_graph_data: Default::default(),
4023 commit_data: Default::default(),
4024 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4025 }
4026 }
4027
4028 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4029 self.git_store.upgrade()
4030 }
4031
4032 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4033 let this = cx.weak_entity();
4034 let git_store = self.git_store.clone();
4035 let _ = self.send_keyed_job(
4036 Some(GitJobKey::ReloadBufferDiffBases),
4037 None,
4038 |state, mut cx| async move {
4039 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4040 log::error!("tried to recompute diffs for a non-local repository");
4041 return Ok(());
4042 };
4043
4044 let Some(this) = this.upgrade() else {
4045 return Ok(());
4046 };
4047
4048 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4049 git_store.update(cx, |git_store, cx| {
4050 git_store
4051 .diffs
4052 .iter()
4053 .filter_map(|(buffer_id, diff_state)| {
4054 let buffer_store = git_store.buffer_store.read(cx);
4055 let buffer = buffer_store.get(*buffer_id)?;
4056 let file = File::from_dyn(buffer.read(cx).file())?;
4057 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4058 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4059 log::debug!(
4060 "start reload diff bases for repo path {}",
4061 repo_path.as_unix_str()
4062 );
4063 diff_state.update(cx, |diff_state, _| {
4064 let has_unstaged_diff = diff_state
4065 .unstaged_diff
4066 .as_ref()
4067 .is_some_and(|diff| diff.is_upgradable());
4068 let has_uncommitted_diff = diff_state
4069 .uncommitted_diff
4070 .as_ref()
4071 .is_some_and(|set| set.is_upgradable());
4072
4073 Some((
4074 buffer,
4075 repo_path,
4076 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4077 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4078 ))
4079 })
4080 })
4081 .collect::<Vec<_>>()
4082 })
4083 })?;
4084
4085 let buffer_diff_base_changes = cx
4086 .background_spawn(async move {
4087 let mut changes = Vec::new();
4088 for (buffer, repo_path, current_index_text, current_head_text) in
4089 &repo_diff_state_updates
4090 {
4091 let index_text = if current_index_text.is_some() {
4092 backend.load_index_text(repo_path.clone()).await
4093 } else {
4094 None
4095 };
4096 let head_text = if current_head_text.is_some() {
4097 backend.load_committed_text(repo_path.clone()).await
4098 } else {
4099 None
4100 };
4101
4102 let change =
4103 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4104 (Some(current_index), Some(current_head)) => {
4105 let index_changed =
4106 index_text.as_deref() != current_index.as_deref();
4107 let head_changed =
4108 head_text.as_deref() != current_head.as_deref();
4109 if index_changed && head_changed {
4110 if index_text == head_text {
4111 Some(DiffBasesChange::SetBoth(head_text))
4112 } else {
4113 Some(DiffBasesChange::SetEach {
4114 index: index_text,
4115 head: head_text,
4116 })
4117 }
4118 } else if index_changed {
4119 Some(DiffBasesChange::SetIndex(index_text))
4120 } else if head_changed {
4121 Some(DiffBasesChange::SetHead(head_text))
4122 } else {
4123 None
4124 }
4125 }
4126 (Some(current_index), None) => {
4127 let index_changed =
4128 index_text.as_deref() != current_index.as_deref();
4129 index_changed
4130 .then_some(DiffBasesChange::SetIndex(index_text))
4131 }
4132 (None, Some(current_head)) => {
4133 let head_changed =
4134 head_text.as_deref() != current_head.as_deref();
4135 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4136 }
4137 (None, None) => None,
4138 };
4139
4140 changes.push((buffer.clone(), change))
4141 }
4142 changes
4143 })
4144 .await;
4145
4146 git_store.update(&mut cx, |git_store, cx| {
4147 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4148 let buffer_snapshot = buffer.read(cx).text_snapshot();
4149 let buffer_id = buffer_snapshot.remote_id();
4150 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4151 continue;
4152 };
4153
4154 let downstream_client = git_store.downstream_client();
4155 diff_state.update(cx, |diff_state, cx| {
4156 use proto::update_diff_bases::Mode;
4157
4158 if let Some((diff_bases_change, (client, project_id))) =
4159 diff_bases_change.clone().zip(downstream_client)
4160 {
4161 let (staged_text, committed_text, mode) = match diff_bases_change {
4162 DiffBasesChange::SetIndex(index) => {
4163 (index, None, Mode::IndexOnly)
4164 }
4165 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4166 DiffBasesChange::SetEach { index, head } => {
4167 (index, head, Mode::IndexAndHead)
4168 }
4169 DiffBasesChange::SetBoth(text) => {
4170 (None, text, Mode::IndexMatchesHead)
4171 }
4172 };
4173 client
4174 .send(proto::UpdateDiffBases {
4175 project_id: project_id.to_proto(),
4176 buffer_id: buffer_id.to_proto(),
4177 staged_text,
4178 committed_text,
4179 mode: mode as i32,
4180 })
4181 .log_err();
4182 }
4183
4184 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4185 });
4186 }
4187 })
4188 },
4189 );
4190 }
4191
4192 pub fn send_job<F, Fut, R>(
4193 &mut self,
4194 status: Option<SharedString>,
4195 job: F,
4196 ) -> oneshot::Receiver<R>
4197 where
4198 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4199 Fut: Future<Output = R> + 'static,
4200 R: Send + 'static,
4201 {
4202 self.send_keyed_job(None, status, job)
4203 }
4204
4205 fn send_keyed_job<F, Fut, R>(
4206 &mut self,
4207 key: Option<GitJobKey>,
4208 status: Option<SharedString>,
4209 job: F,
4210 ) -> oneshot::Receiver<R>
4211 where
4212 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4213 Fut: Future<Output = R> + 'static,
4214 R: Send + 'static,
4215 {
4216 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4217 let job_id = post_inc(&mut self.job_id);
4218 let this = self.this.clone();
4219 self.job_sender
4220 .unbounded_send(GitJob {
4221 key,
4222 job: Box::new(move |state, cx: &mut AsyncApp| {
4223 let job = job(state, cx.clone());
4224 cx.spawn(async move |cx| {
4225 if let Some(s) = status.clone() {
4226 this.update(cx, |this, cx| {
4227 this.active_jobs.insert(
4228 job_id,
4229 JobInfo {
4230 start: Instant::now(),
4231 message: s.clone(),
4232 },
4233 );
4234
4235 cx.notify();
4236 })
4237 .ok();
4238 }
4239 let result = job.await;
4240
4241 this.update(cx, |this, cx| {
4242 this.active_jobs.remove(&job_id);
4243 cx.notify();
4244 })
4245 .ok();
4246
4247 result_tx.send(result).ok();
4248 })
4249 }),
4250 })
4251 .ok();
4252 result_rx
4253 }
4254
4255 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4256 let Some(git_store) = self.git_store.upgrade() else {
4257 return;
4258 };
4259 let entity = cx.entity();
4260 git_store.update(cx, |git_store, cx| {
4261 let Some((&id, _)) = git_store
4262 .repositories
4263 .iter()
4264 .find(|(_, handle)| *handle == &entity)
4265 else {
4266 return;
4267 };
4268 git_store.active_repo_id = Some(id);
4269 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4270 });
4271 }
4272
4273 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4274 self.snapshot.status()
4275 }
4276
4277 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4278 self.snapshot.diff_stat_for_path(path)
4279 }
4280
4281 pub fn cached_stash(&self) -> GitStash {
4282 self.snapshot.stash_entries.clone()
4283 }
4284
4285 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4286 let git_store = self.git_store.upgrade()?;
4287 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4288 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4289 let abs_path = SanitizedPath::new(&abs_path);
4290 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4291 Some(ProjectPath {
4292 worktree_id: worktree.read(cx).id(),
4293 path: relative_path,
4294 })
4295 }
4296
4297 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4298 let git_store = self.git_store.upgrade()?;
4299 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4300 let abs_path = worktree_store.absolutize(path, cx)?;
4301 self.snapshot.abs_path_to_repo_path(&abs_path)
4302 }
4303
4304 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4305 other
4306 .read(cx)
4307 .snapshot
4308 .work_directory_abs_path
4309 .starts_with(&self.snapshot.work_directory_abs_path)
4310 }
4311
4312 pub fn open_commit_buffer(
4313 &mut self,
4314 languages: Option<Arc<LanguageRegistry>>,
4315 buffer_store: Entity<BufferStore>,
4316 cx: &mut Context<Self>,
4317 ) -> Task<Result<Entity<Buffer>>> {
4318 let id = self.id;
4319 if let Some(buffer) = self.commit_message_buffer.clone() {
4320 return Task::ready(Ok(buffer));
4321 }
4322 let this = cx.weak_entity();
4323
4324 let rx = self.send_job(None, move |state, mut cx| async move {
4325 let Some(this) = this.upgrade() else {
4326 bail!("git store was dropped");
4327 };
4328 match state {
4329 RepositoryState::Local(..) => {
4330 this.update(&mut cx, |_, cx| {
4331 Self::open_local_commit_buffer(languages, buffer_store, cx)
4332 })
4333 .await
4334 }
4335 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4336 let request = client.request(proto::OpenCommitMessageBuffer {
4337 project_id: project_id.0,
4338 repository_id: id.to_proto(),
4339 });
4340 let response = request.await.context("requesting to open commit buffer")?;
4341 let buffer_id = BufferId::new(response.buffer_id)?;
4342 let buffer = buffer_store
4343 .update(&mut cx, |buffer_store, cx| {
4344 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4345 })
4346 .await?;
4347 if let Some(language_registry) = languages {
4348 let git_commit_language =
4349 language_registry.language_for_name("Git Commit").await?;
4350 buffer.update(&mut cx, |buffer, cx| {
4351 buffer.set_language(Some(git_commit_language), cx);
4352 });
4353 }
4354 this.update(&mut cx, |this, _| {
4355 this.commit_message_buffer = Some(buffer.clone());
4356 });
4357 Ok(buffer)
4358 }
4359 }
4360 });
4361
4362 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4363 }
4364
4365 fn open_local_commit_buffer(
4366 language_registry: Option<Arc<LanguageRegistry>>,
4367 buffer_store: Entity<BufferStore>,
4368 cx: &mut Context<Self>,
4369 ) -> Task<Result<Entity<Buffer>>> {
4370 cx.spawn(async move |repository, cx| {
4371 let git_commit_language = match language_registry {
4372 Some(language_registry) => {
4373 Some(language_registry.language_for_name("Git Commit").await?)
4374 }
4375 None => None,
4376 };
4377 let buffer = buffer_store
4378 .update(cx, |buffer_store, cx| {
4379 buffer_store.create_buffer(git_commit_language, false, cx)
4380 })
4381 .await?;
4382
4383 repository.update(cx, |repository, _| {
4384 repository.commit_message_buffer = Some(buffer.clone());
4385 })?;
4386 Ok(buffer)
4387 })
4388 }
4389
4390 pub fn checkout_files(
4391 &mut self,
4392 commit: &str,
4393 paths: Vec<RepoPath>,
4394 cx: &mut Context<Self>,
4395 ) -> Task<Result<()>> {
4396 let commit = commit.to_string();
4397 let id = self.id;
4398
4399 self.spawn_job_with_tracking(
4400 paths.clone(),
4401 pending_op::GitStatus::Reverted,
4402 cx,
4403 async move |this, cx| {
4404 this.update(cx, |this, _cx| {
4405 this.send_job(
4406 Some(format!("git checkout {}", commit).into()),
4407 move |git_repo, _| async move {
4408 match git_repo {
4409 RepositoryState::Local(LocalRepositoryState {
4410 backend,
4411 environment,
4412 ..
4413 }) => {
4414 backend
4415 .checkout_files(commit, paths, environment.clone())
4416 .await
4417 }
4418 RepositoryState::Remote(RemoteRepositoryState {
4419 project_id,
4420 client,
4421 }) => {
4422 client
4423 .request(proto::GitCheckoutFiles {
4424 project_id: project_id.0,
4425 repository_id: id.to_proto(),
4426 commit,
4427 paths: paths
4428 .into_iter()
4429 .map(|p| p.to_proto())
4430 .collect(),
4431 })
4432 .await?;
4433
4434 Ok(())
4435 }
4436 }
4437 },
4438 )
4439 })?
4440 .await?
4441 },
4442 )
4443 }
4444
4445 pub fn reset(
4446 &mut self,
4447 commit: String,
4448 reset_mode: ResetMode,
4449 _cx: &mut App,
4450 ) -> oneshot::Receiver<Result<()>> {
4451 let id = self.id;
4452
4453 self.send_job(None, move |git_repo, _| async move {
4454 match git_repo {
4455 RepositoryState::Local(LocalRepositoryState {
4456 backend,
4457 environment,
4458 ..
4459 }) => backend.reset(commit, reset_mode, environment).await,
4460 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4461 client
4462 .request(proto::GitReset {
4463 project_id: project_id.0,
4464 repository_id: id.to_proto(),
4465 commit,
4466 mode: match reset_mode {
4467 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4468 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4469 },
4470 })
4471 .await?;
4472
4473 Ok(())
4474 }
4475 }
4476 })
4477 }
4478
4479 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4480 let id = self.id;
4481 self.send_job(None, move |git_repo, _cx| async move {
4482 match git_repo {
4483 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4484 backend.show(commit).await
4485 }
4486 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4487 let resp = client
4488 .request(proto::GitShow {
4489 project_id: project_id.0,
4490 repository_id: id.to_proto(),
4491 commit,
4492 })
4493 .await?;
4494
4495 Ok(CommitDetails {
4496 sha: resp.sha.into(),
4497 message: resp.message.into(),
4498 commit_timestamp: resp.commit_timestamp,
4499 author_email: resp.author_email.into(),
4500 author_name: resp.author_name.into(),
4501 })
4502 }
4503 }
4504 })
4505 }
4506
4507 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4508 let id = self.id;
4509 self.send_job(None, move |git_repo, cx| async move {
4510 match git_repo {
4511 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4512 backend.load_commit(commit, cx).await
4513 }
4514 RepositoryState::Remote(RemoteRepositoryState {
4515 client, project_id, ..
4516 }) => {
4517 let response = client
4518 .request(proto::LoadCommitDiff {
4519 project_id: project_id.0,
4520 repository_id: id.to_proto(),
4521 commit,
4522 })
4523 .await?;
4524 Ok(CommitDiff {
4525 files: response
4526 .files
4527 .into_iter()
4528 .map(|file| {
4529 Ok(CommitFile {
4530 path: RepoPath::from_proto(&file.path)?,
4531 old_text: file.old_text,
4532 new_text: file.new_text,
4533 is_binary: file.is_binary,
4534 })
4535 })
4536 .collect::<Result<Vec<_>>>()?,
4537 })
4538 }
4539 }
4540 })
4541 }
4542
4543 pub fn file_history(
4544 &mut self,
4545 path: RepoPath,
4546 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4547 self.file_history_paginated(path, 0, None)
4548 }
4549
4550 pub fn file_history_paginated(
4551 &mut self,
4552 path: RepoPath,
4553 skip: usize,
4554 limit: Option<usize>,
4555 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4556 let id = self.id;
4557 self.send_job(None, move |git_repo, _cx| async move {
4558 match git_repo {
4559 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4560 backend.file_history_paginated(path, skip, limit).await
4561 }
4562 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4563 let response = client
4564 .request(proto::GitFileHistory {
4565 project_id: project_id.0,
4566 repository_id: id.to_proto(),
4567 path: path.to_proto(),
4568 skip: skip as u64,
4569 limit: limit.map(|l| l as u64),
4570 })
4571 .await?;
4572 Ok(git::repository::FileHistory {
4573 entries: response
4574 .entries
4575 .into_iter()
4576 .map(|entry| git::repository::FileHistoryEntry {
4577 sha: entry.sha.into(),
4578 subject: entry.subject.into(),
4579 message: entry.message.into(),
4580 commit_timestamp: entry.commit_timestamp,
4581 author_name: entry.author_name.into(),
4582 author_email: entry.author_email.into(),
4583 })
4584 .collect(),
4585 path: RepoPath::from_proto(&response.path)?,
4586 })
4587 }
4588 }
4589 })
4590 }
4591
4592 pub fn get_graph_data(
4593 &self,
4594 log_source: LogSource,
4595 log_order: LogOrder,
4596 ) -> Option<&InitialGitGraphData> {
4597 self.initial_graph_data.get(&(log_source, log_order))
4598 }
4599
4600 pub fn search_commits(
4601 &mut self,
4602 log_source: LogSource,
4603 search_args: SearchCommitArgs,
4604 request_tx: smol::channel::Sender<Oid>,
4605 cx: &mut Context<Self>,
4606 ) {
4607 let repository_state = self.repository_state.clone();
4608
4609 cx.background_spawn(async move {
4610 let repo_state = repository_state.await;
4611
4612 match repo_state {
4613 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4614 backend
4615 .search_commits(log_source, search_args, request_tx)
4616 .await
4617 .log_err();
4618 }
4619 Ok(RepositoryState::Remote(_)) => {}
4620 Err(_) => {}
4621 };
4622 })
4623 .detach();
4624 }
4625
4626 pub fn graph_data(
4627 &mut self,
4628 log_source: LogSource,
4629 log_order: LogOrder,
4630 range: Range<usize>,
4631 cx: &mut Context<Self>,
4632 ) -> GraphDataResponse<'_> {
4633 let initial_commit_data = self
4634 .initial_graph_data
4635 .entry((log_source.clone(), log_order))
4636 .or_insert_with(|| {
4637 let state = self.repository_state.clone();
4638 let log_source = log_source.clone();
4639
4640 let fetch_task = cx.spawn(async move |repository, cx| {
4641 let state = state.await;
4642 let result = match state {
4643 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4644 Self::local_git_graph_data(
4645 repository.clone(),
4646 backend,
4647 log_source.clone(),
4648 log_order,
4649 cx,
4650 )
4651 .await
4652 }
4653 Ok(RepositoryState::Remote(_)) => {
4654 Err("Git graph is not supported for collab yet".into())
4655 }
4656 Err(e) => Err(SharedString::from(e)),
4657 };
4658
4659 if let Err(fetch_task_error) = result {
4660 repository
4661 .update(cx, |repository, _| {
4662 if let Some(data) = repository
4663 .initial_graph_data
4664 .get_mut(&(log_source, log_order))
4665 {
4666 data.error = Some(fetch_task_error);
4667 } else {
4668 debug_panic!(
4669 "This task would be dropped if this entry doesn't exist"
4670 );
4671 }
4672 })
4673 .ok();
4674 }
4675 });
4676
4677 InitialGitGraphData {
4678 fetch_task,
4679 error: None,
4680 commit_data: Vec::new(),
4681 commit_oid_to_index: HashMap::default(),
4682 }
4683 });
4684
4685 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4686 let max_end = initial_commit_data.commit_data.len();
4687
4688 GraphDataResponse {
4689 commits: &initial_commit_data.commit_data
4690 [range.start.min(max_start)..range.end.min(max_end)],
4691 is_loading: !initial_commit_data.fetch_task.is_ready(),
4692 error: initial_commit_data.error.clone(),
4693 }
4694 }
4695
4696 async fn local_git_graph_data(
4697 this: WeakEntity<Self>,
4698 backend: Arc<dyn GitRepository>,
4699 log_source: LogSource,
4700 log_order: LogOrder,
4701 cx: &mut AsyncApp,
4702 ) -> Result<(), SharedString> {
4703 let (request_tx, request_rx) =
4704 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4705
4706 let task = cx.background_executor().spawn({
4707 let log_source = log_source.clone();
4708 async move {
4709 backend
4710 .initial_graph_data(log_source, log_order, request_tx)
4711 .await
4712 .map_err(|err| SharedString::from(err.to_string()))
4713 }
4714 });
4715
4716 let graph_data_key = (log_source, log_order);
4717
4718 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4719 this.update(cx, |repository, cx| {
4720 let graph_data = repository
4721 .initial_graph_data
4722 .entry(graph_data_key.clone())
4723 .and_modify(|graph_data| {
4724 for commit_data in initial_graph_commit_data {
4725 graph_data
4726 .commit_oid_to_index
4727 .insert(commit_data.sha, graph_data.commit_data.len());
4728 graph_data.commit_data.push(commit_data);
4729 }
4730 cx.emit(RepositoryEvent::GraphEvent(
4731 graph_data_key.clone(),
4732 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4733 ));
4734 });
4735
4736 match &graph_data {
4737 Entry::Occupied(_) => {}
4738 Entry::Vacant(_) => {
4739 debug_panic!("This task should be dropped if data doesn't exist");
4740 }
4741 }
4742 })
4743 .ok();
4744 }
4745
4746 task.await?;
4747 Ok(())
4748 }
4749
4750 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4751 if !self.commit_data.contains_key(&sha) {
4752 match &self.graph_commit_data_handler {
4753 GraphCommitHandlerState::Open(handler) => {
4754 if handler.commit_data_request.try_send(sha).is_ok() {
4755 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4756 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4757 }
4758 }
4759 GraphCommitHandlerState::Closed => {
4760 self.open_graph_commit_data_handler(cx);
4761 }
4762 GraphCommitHandlerState::Starting => {}
4763 }
4764 }
4765
4766 self.commit_data
4767 .get(&sha)
4768 .unwrap_or(&CommitDataState::Loading)
4769 }
4770
4771 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4772 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4773
4774 let state = self.repository_state.clone();
4775 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4776 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4777
4778 let foreground_task = cx.spawn(async move |this, cx| {
4779 while let Ok((sha, commit_data)) = result_rx.recv().await {
4780 let result = this.update(cx, |this, cx| {
4781 let old_value = this
4782 .commit_data
4783 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4784 debug_assert!(
4785 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4786 "We should never overwrite commit data"
4787 );
4788
4789 cx.notify();
4790 });
4791 if result.is_err() {
4792 break;
4793 }
4794 }
4795
4796 this.update(cx, |this, _cx| {
4797 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4798 })
4799 .ok();
4800 });
4801
4802 let request_tx_for_handler = request_tx;
4803 let background_executor = cx.background_executor().clone();
4804
4805 cx.background_spawn(async move {
4806 let backend = match state.await {
4807 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4808 Ok(RepositoryState::Remote(_)) => {
4809 log::error!("commit_data_reader not supported for remote repositories");
4810 return;
4811 }
4812 Err(error) => {
4813 log::error!("failed to get repository state: {error}");
4814 return;
4815 }
4816 };
4817
4818 let reader = match backend.commit_data_reader() {
4819 Ok(reader) => reader,
4820 Err(error) => {
4821 log::error!("failed to create commit data reader: {error:?}");
4822 return;
4823 }
4824 };
4825
4826 loop {
4827 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4828
4829 futures::select_biased! {
4830 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4831 let Ok(sha) = sha else {
4832 break;
4833 };
4834
4835 match reader.read(sha).await {
4836 Ok(commit_data) => {
4837 if result_tx.send((sha, commit_data)).await.is_err() {
4838 break;
4839 }
4840 }
4841 Err(error) => {
4842 log::error!("failed to read commit data for {sha}: {error:?}");
4843 }
4844 }
4845 }
4846 _ = futures::FutureExt::fuse(timeout) => {
4847 break;
4848 }
4849 }
4850 }
4851
4852 drop(result_tx);
4853 })
4854 .detach();
4855
4856 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4857 _task: foreground_task,
4858 commit_data_request: request_tx_for_handler,
4859 });
4860 }
4861
4862 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4863 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4864 }
4865
4866 fn save_buffers<'a>(
4867 &self,
4868 entries: impl IntoIterator<Item = &'a RepoPath>,
4869 cx: &mut Context<Self>,
4870 ) -> Vec<Task<anyhow::Result<()>>> {
4871 let mut save_futures = Vec::new();
4872 if let Some(buffer_store) = self.buffer_store(cx) {
4873 buffer_store.update(cx, |buffer_store, cx| {
4874 for path in entries {
4875 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4876 continue;
4877 };
4878 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4879 && buffer
4880 .read(cx)
4881 .file()
4882 .is_some_and(|file| file.disk_state().exists())
4883 && buffer.read(cx).has_unsaved_edits()
4884 {
4885 save_futures.push(buffer_store.save_buffer(buffer, cx));
4886 }
4887 }
4888 })
4889 }
4890 save_futures
4891 }
4892
4893 pub fn stage_entries(
4894 &mut self,
4895 entries: Vec<RepoPath>,
4896 cx: &mut Context<Self>,
4897 ) -> Task<anyhow::Result<()>> {
4898 self.stage_or_unstage_entries(true, entries, cx)
4899 }
4900
4901 pub fn unstage_entries(
4902 &mut self,
4903 entries: Vec<RepoPath>,
4904 cx: &mut Context<Self>,
4905 ) -> Task<anyhow::Result<()>> {
4906 self.stage_or_unstage_entries(false, entries, cx)
4907 }
4908
4909 fn stage_or_unstage_entries(
4910 &mut self,
4911 stage: bool,
4912 entries: Vec<RepoPath>,
4913 cx: &mut Context<Self>,
4914 ) -> Task<anyhow::Result<()>> {
4915 if entries.is_empty() {
4916 return Task::ready(Ok(()));
4917 }
4918 let Some(git_store) = self.git_store.upgrade() else {
4919 return Task::ready(Ok(()));
4920 };
4921 let id = self.id;
4922 let save_tasks = self.save_buffers(&entries, cx);
4923 let paths = entries
4924 .iter()
4925 .map(|p| p.as_unix_str())
4926 .collect::<Vec<_>>()
4927 .join(" ");
4928 let status = if stage {
4929 format!("git add {paths}")
4930 } else {
4931 format!("git reset {paths}")
4932 };
4933 let job_key = GitJobKey::WriteIndex(entries.clone());
4934
4935 self.spawn_job_with_tracking(
4936 entries.clone(),
4937 if stage {
4938 pending_op::GitStatus::Staged
4939 } else {
4940 pending_op::GitStatus::Unstaged
4941 },
4942 cx,
4943 async move |this, cx| {
4944 for save_task in save_tasks {
4945 save_task.await?;
4946 }
4947
4948 this.update(cx, |this, cx| {
4949 let weak_this = cx.weak_entity();
4950 this.send_keyed_job(
4951 Some(job_key),
4952 Some(status.into()),
4953 move |git_repo, mut cx| async move {
4954 let hunk_staging_operation_counts = weak_this
4955 .update(&mut cx, |this, cx| {
4956 let mut hunk_staging_operation_counts = HashMap::default();
4957 for path in &entries {
4958 let Some(project_path) =
4959 this.repo_path_to_project_path(path, cx)
4960 else {
4961 continue;
4962 };
4963 let Some(buffer) = git_store
4964 .read(cx)
4965 .buffer_store
4966 .read(cx)
4967 .get_by_path(&project_path)
4968 else {
4969 continue;
4970 };
4971 let Some(diff_state) = git_store
4972 .read(cx)
4973 .diffs
4974 .get(&buffer.read(cx).remote_id())
4975 .cloned()
4976 else {
4977 continue;
4978 };
4979 let Some(uncommitted_diff) =
4980 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4981 |uncommitted_diff| uncommitted_diff.upgrade(),
4982 )
4983 else {
4984 continue;
4985 };
4986 let buffer_snapshot = buffer.read(cx).text_snapshot();
4987 let file_exists = buffer
4988 .read(cx)
4989 .file()
4990 .is_some_and(|file| file.disk_state().exists());
4991 let hunk_staging_operation_count =
4992 diff_state.update(cx, |diff_state, cx| {
4993 uncommitted_diff.update(
4994 cx,
4995 |uncommitted_diff, cx| {
4996 uncommitted_diff
4997 .stage_or_unstage_all_hunks(
4998 stage,
4999 &buffer_snapshot,
5000 file_exists,
5001 cx,
5002 );
5003 },
5004 );
5005
5006 diff_state.hunk_staging_operation_count += 1;
5007 diff_state.hunk_staging_operation_count
5008 });
5009 hunk_staging_operation_counts.insert(
5010 diff_state.downgrade(),
5011 hunk_staging_operation_count,
5012 );
5013 }
5014 hunk_staging_operation_counts
5015 })
5016 .unwrap_or_default();
5017
5018 let result = match git_repo {
5019 RepositoryState::Local(LocalRepositoryState {
5020 backend,
5021 environment,
5022 ..
5023 }) => {
5024 if stage {
5025 backend.stage_paths(entries, environment.clone()).await
5026 } else {
5027 backend.unstage_paths(entries, environment.clone()).await
5028 }
5029 }
5030 RepositoryState::Remote(RemoteRepositoryState {
5031 project_id,
5032 client,
5033 }) => {
5034 if stage {
5035 client
5036 .request(proto::Stage {
5037 project_id: project_id.0,
5038 repository_id: id.to_proto(),
5039 paths: entries
5040 .into_iter()
5041 .map(|repo_path| repo_path.to_proto())
5042 .collect(),
5043 })
5044 .await
5045 .context("sending stage request")
5046 .map(|_| ())
5047 } else {
5048 client
5049 .request(proto::Unstage {
5050 project_id: project_id.0,
5051 repository_id: id.to_proto(),
5052 paths: entries
5053 .into_iter()
5054 .map(|repo_path| repo_path.to_proto())
5055 .collect(),
5056 })
5057 .await
5058 .context("sending unstage request")
5059 .map(|_| ())
5060 }
5061 }
5062 };
5063
5064 for (diff_state, hunk_staging_operation_count) in
5065 hunk_staging_operation_counts
5066 {
5067 diff_state
5068 .update(&mut cx, |diff_state, cx| {
5069 if result.is_ok() {
5070 diff_state.hunk_staging_operation_count_as_of_write =
5071 hunk_staging_operation_count;
5072 } else if let Some(uncommitted_diff) =
5073 &diff_state.uncommitted_diff
5074 {
5075 uncommitted_diff
5076 .update(cx, |uncommitted_diff, cx| {
5077 uncommitted_diff.clear_pending_hunks(cx);
5078 })
5079 .ok();
5080 }
5081 })
5082 .ok();
5083 }
5084
5085 result
5086 },
5087 )
5088 })?
5089 .await?
5090 },
5091 )
5092 }
5093
5094 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5095 let snapshot = self.snapshot.clone();
5096 let pending_ops = self.pending_ops.clone();
5097 let to_stage = cx.background_spawn(async move {
5098 snapshot
5099 .status()
5100 .filter_map(|entry| {
5101 if let Some(ops) =
5102 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5103 {
5104 if ops.staging() || ops.staged() {
5105 None
5106 } else {
5107 Some(entry.repo_path)
5108 }
5109 } else if entry.status.staging().is_fully_staged() {
5110 None
5111 } else {
5112 Some(entry.repo_path)
5113 }
5114 })
5115 .collect()
5116 });
5117
5118 cx.spawn(async move |this, cx| {
5119 let to_stage = to_stage.await;
5120 this.update(cx, |this, cx| {
5121 this.stage_or_unstage_entries(true, to_stage, cx)
5122 })?
5123 .await
5124 })
5125 }
5126
5127 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5128 let snapshot = self.snapshot.clone();
5129 let pending_ops = self.pending_ops.clone();
5130 let to_unstage = cx.background_spawn(async move {
5131 snapshot
5132 .status()
5133 .filter_map(|entry| {
5134 if let Some(ops) =
5135 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5136 {
5137 if !ops.staging() && !ops.staged() {
5138 None
5139 } else {
5140 Some(entry.repo_path)
5141 }
5142 } else if entry.status.staging().is_fully_unstaged() {
5143 None
5144 } else {
5145 Some(entry.repo_path)
5146 }
5147 })
5148 .collect()
5149 });
5150
5151 cx.spawn(async move |this, cx| {
5152 let to_unstage = to_unstage.await;
5153 this.update(cx, |this, cx| {
5154 this.stage_or_unstage_entries(false, to_unstage, cx)
5155 })?
5156 .await
5157 })
5158 }
5159
5160 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5161 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5162
5163 self.stash_entries(to_stash, cx)
5164 }
5165
5166 pub fn stash_entries(
5167 &mut self,
5168 entries: Vec<RepoPath>,
5169 cx: &mut Context<Self>,
5170 ) -> Task<anyhow::Result<()>> {
5171 let id = self.id;
5172
5173 cx.spawn(async move |this, cx| {
5174 this.update(cx, |this, _| {
5175 this.send_job(None, move |git_repo, _cx| async move {
5176 match git_repo {
5177 RepositoryState::Local(LocalRepositoryState {
5178 backend,
5179 environment,
5180 ..
5181 }) => backend.stash_paths(entries, environment).await,
5182 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5183 client
5184 .request(proto::Stash {
5185 project_id: project_id.0,
5186 repository_id: id.to_proto(),
5187 paths: entries
5188 .into_iter()
5189 .map(|repo_path| repo_path.to_proto())
5190 .collect(),
5191 })
5192 .await?;
5193 Ok(())
5194 }
5195 }
5196 })
5197 })?
5198 .await??;
5199 Ok(())
5200 })
5201 }
5202
5203 pub fn stash_pop(
5204 &mut self,
5205 index: Option<usize>,
5206 cx: &mut Context<Self>,
5207 ) -> Task<anyhow::Result<()>> {
5208 let id = self.id;
5209 cx.spawn(async move |this, cx| {
5210 this.update(cx, |this, _| {
5211 this.send_job(None, move |git_repo, _cx| async move {
5212 match git_repo {
5213 RepositoryState::Local(LocalRepositoryState {
5214 backend,
5215 environment,
5216 ..
5217 }) => backend.stash_pop(index, environment).await,
5218 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5219 client
5220 .request(proto::StashPop {
5221 project_id: project_id.0,
5222 repository_id: id.to_proto(),
5223 stash_index: index.map(|i| i as u64),
5224 })
5225 .await
5226 .context("sending stash pop request")?;
5227 Ok(())
5228 }
5229 }
5230 })
5231 })?
5232 .await??;
5233 Ok(())
5234 })
5235 }
5236
5237 pub fn stash_apply(
5238 &mut self,
5239 index: Option<usize>,
5240 cx: &mut Context<Self>,
5241 ) -> Task<anyhow::Result<()>> {
5242 let id = self.id;
5243 cx.spawn(async move |this, cx| {
5244 this.update(cx, |this, _| {
5245 this.send_job(None, move |git_repo, _cx| async move {
5246 match git_repo {
5247 RepositoryState::Local(LocalRepositoryState {
5248 backend,
5249 environment,
5250 ..
5251 }) => backend.stash_apply(index, environment).await,
5252 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5253 client
5254 .request(proto::StashApply {
5255 project_id: project_id.0,
5256 repository_id: id.to_proto(),
5257 stash_index: index.map(|i| i as u64),
5258 })
5259 .await
5260 .context("sending stash apply request")?;
5261 Ok(())
5262 }
5263 }
5264 })
5265 })?
5266 .await??;
5267 Ok(())
5268 })
5269 }
5270
5271 pub fn stash_drop(
5272 &mut self,
5273 index: Option<usize>,
5274 cx: &mut Context<Self>,
5275 ) -> oneshot::Receiver<anyhow::Result<()>> {
5276 let id = self.id;
5277 let updates_tx = self
5278 .git_store()
5279 .and_then(|git_store| match &git_store.read(cx).state {
5280 GitStoreState::Local { downstream, .. } => downstream
5281 .as_ref()
5282 .map(|downstream| downstream.updates_tx.clone()),
5283 _ => None,
5284 });
5285 let this = cx.weak_entity();
5286 self.send_job(None, move |git_repo, mut cx| async move {
5287 match git_repo {
5288 RepositoryState::Local(LocalRepositoryState {
5289 backend,
5290 environment,
5291 ..
5292 }) => {
5293 // TODO would be nice to not have to do this manually
5294 let result = backend.stash_drop(index, environment).await;
5295 if result.is_ok()
5296 && let Ok(stash_entries) = backend.stash_entries().await
5297 {
5298 let snapshot = this.update(&mut cx, |this, cx| {
5299 this.snapshot.stash_entries = stash_entries;
5300 cx.emit(RepositoryEvent::StashEntriesChanged);
5301 this.snapshot.clone()
5302 })?;
5303 if let Some(updates_tx) = updates_tx {
5304 updates_tx
5305 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5306 .ok();
5307 }
5308 }
5309
5310 result
5311 }
5312 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5313 client
5314 .request(proto::StashDrop {
5315 project_id: project_id.0,
5316 repository_id: id.to_proto(),
5317 stash_index: index.map(|i| i as u64),
5318 })
5319 .await
5320 .context("sending stash pop request")?;
5321 Ok(())
5322 }
5323 }
5324 })
5325 }
5326
5327 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5328 let id = self.id;
5329 self.send_job(
5330 Some(format!("git hook {}", hook.as_str()).into()),
5331 move |git_repo, _cx| async move {
5332 match git_repo {
5333 RepositoryState::Local(LocalRepositoryState {
5334 backend,
5335 environment,
5336 ..
5337 }) => backend.run_hook(hook, environment.clone()).await,
5338 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5339 client
5340 .request(proto::RunGitHook {
5341 project_id: project_id.0,
5342 repository_id: id.to_proto(),
5343 hook: hook.to_proto(),
5344 })
5345 .await?;
5346
5347 Ok(())
5348 }
5349 }
5350 },
5351 )
5352 }
5353
5354 pub fn commit(
5355 &mut self,
5356 message: SharedString,
5357 name_and_email: Option<(SharedString, SharedString)>,
5358 options: CommitOptions,
5359 askpass: AskPassDelegate,
5360 cx: &mut App,
5361 ) -> oneshot::Receiver<Result<()>> {
5362 let id = self.id;
5363 let askpass_delegates = self.askpass_delegates.clone();
5364 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5365
5366 let rx = self.run_hook(RunHook::PreCommit, cx);
5367
5368 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5369 rx.await??;
5370
5371 match git_repo {
5372 RepositoryState::Local(LocalRepositoryState {
5373 backend,
5374 environment,
5375 ..
5376 }) => {
5377 backend
5378 .commit(message, name_and_email, options, askpass, environment)
5379 .await
5380 }
5381 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5382 askpass_delegates.lock().insert(askpass_id, askpass);
5383 let _defer = util::defer(|| {
5384 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5385 debug_assert!(askpass_delegate.is_some());
5386 });
5387 let (name, email) = name_and_email.unzip();
5388 client
5389 .request(proto::Commit {
5390 project_id: project_id.0,
5391 repository_id: id.to_proto(),
5392 message: String::from(message),
5393 name: name.map(String::from),
5394 email: email.map(String::from),
5395 options: Some(proto::commit::CommitOptions {
5396 amend: options.amend,
5397 signoff: options.signoff,
5398 allow_empty: options.allow_empty,
5399 }),
5400 askpass_id,
5401 })
5402 .await?;
5403
5404 Ok(())
5405 }
5406 }
5407 })
5408 }
5409
5410 pub fn fetch(
5411 &mut self,
5412 fetch_options: FetchOptions,
5413 askpass: AskPassDelegate,
5414 _cx: &mut App,
5415 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5416 let askpass_delegates = self.askpass_delegates.clone();
5417 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5418 let id = self.id;
5419
5420 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5421 match git_repo {
5422 RepositoryState::Local(LocalRepositoryState {
5423 backend,
5424 environment,
5425 ..
5426 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5427 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5428 askpass_delegates.lock().insert(askpass_id, askpass);
5429 let _defer = util::defer(|| {
5430 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5431 debug_assert!(askpass_delegate.is_some());
5432 });
5433
5434 let response = client
5435 .request(proto::Fetch {
5436 project_id: project_id.0,
5437 repository_id: id.to_proto(),
5438 askpass_id,
5439 remote: fetch_options.to_proto(),
5440 })
5441 .await?;
5442
5443 Ok(RemoteCommandOutput {
5444 stdout: response.stdout,
5445 stderr: response.stderr,
5446 })
5447 }
5448 }
5449 })
5450 }
5451
5452 pub fn push(
5453 &mut self,
5454 branch: SharedString,
5455 remote_branch: SharedString,
5456 remote: SharedString,
5457 options: Option<PushOptions>,
5458 askpass: AskPassDelegate,
5459 cx: &mut Context<Self>,
5460 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5461 let askpass_delegates = self.askpass_delegates.clone();
5462 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5463 let id = self.id;
5464
5465 let args = options
5466 .map(|option| match option {
5467 PushOptions::SetUpstream => " --set-upstream",
5468 PushOptions::Force => " --force-with-lease",
5469 })
5470 .unwrap_or("");
5471
5472 let updates_tx = self
5473 .git_store()
5474 .and_then(|git_store| match &git_store.read(cx).state {
5475 GitStoreState::Local { downstream, .. } => downstream
5476 .as_ref()
5477 .map(|downstream| downstream.updates_tx.clone()),
5478 _ => None,
5479 });
5480
5481 let this = cx.weak_entity();
5482 self.send_job(
5483 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5484 move |git_repo, mut cx| async move {
5485 match git_repo {
5486 RepositoryState::Local(LocalRepositoryState {
5487 backend,
5488 environment,
5489 ..
5490 }) => {
5491 let result = backend
5492 .push(
5493 branch.to_string(),
5494 remote_branch.to_string(),
5495 remote.to_string(),
5496 options,
5497 askpass,
5498 environment.clone(),
5499 cx.clone(),
5500 )
5501 .await;
5502 // TODO would be nice to not have to do this manually
5503 if result.is_ok() {
5504 let branches = backend.branches().await?;
5505 let branch = branches.into_iter().find(|branch| branch.is_head);
5506 log::info!("head branch after scan is {branch:?}");
5507 let snapshot = this.update(&mut cx, |this, cx| {
5508 this.snapshot.branch = branch;
5509 cx.emit(RepositoryEvent::BranchChanged);
5510 this.snapshot.clone()
5511 })?;
5512 if let Some(updates_tx) = updates_tx {
5513 updates_tx
5514 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5515 .ok();
5516 }
5517 }
5518 result
5519 }
5520 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5521 askpass_delegates.lock().insert(askpass_id, askpass);
5522 let _defer = util::defer(|| {
5523 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5524 debug_assert!(askpass_delegate.is_some());
5525 });
5526 let response = client
5527 .request(proto::Push {
5528 project_id: project_id.0,
5529 repository_id: id.to_proto(),
5530 askpass_id,
5531 branch_name: branch.to_string(),
5532 remote_branch_name: remote_branch.to_string(),
5533 remote_name: remote.to_string(),
5534 options: options.map(|options| match options {
5535 PushOptions::Force => proto::push::PushOptions::Force,
5536 PushOptions::SetUpstream => {
5537 proto::push::PushOptions::SetUpstream
5538 }
5539 }
5540 as i32),
5541 })
5542 .await?;
5543
5544 Ok(RemoteCommandOutput {
5545 stdout: response.stdout,
5546 stderr: response.stderr,
5547 })
5548 }
5549 }
5550 },
5551 )
5552 }
5553
5554 pub fn pull(
5555 &mut self,
5556 branch: Option<SharedString>,
5557 remote: SharedString,
5558 rebase: bool,
5559 askpass: AskPassDelegate,
5560 _cx: &mut App,
5561 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5562 let askpass_delegates = self.askpass_delegates.clone();
5563 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5564 let id = self.id;
5565
5566 let mut status = "git pull".to_string();
5567 if rebase {
5568 status.push_str(" --rebase");
5569 }
5570 status.push_str(&format!(" {}", remote));
5571 if let Some(b) = &branch {
5572 status.push_str(&format!(" {}", b));
5573 }
5574
5575 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5576 match git_repo {
5577 RepositoryState::Local(LocalRepositoryState {
5578 backend,
5579 environment,
5580 ..
5581 }) => {
5582 backend
5583 .pull(
5584 branch.as_ref().map(|b| b.to_string()),
5585 remote.to_string(),
5586 rebase,
5587 askpass,
5588 environment.clone(),
5589 cx,
5590 )
5591 .await
5592 }
5593 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5594 askpass_delegates.lock().insert(askpass_id, askpass);
5595 let _defer = util::defer(|| {
5596 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5597 debug_assert!(askpass_delegate.is_some());
5598 });
5599 let response = client
5600 .request(proto::Pull {
5601 project_id: project_id.0,
5602 repository_id: id.to_proto(),
5603 askpass_id,
5604 rebase,
5605 branch_name: branch.as_ref().map(|b| b.to_string()),
5606 remote_name: remote.to_string(),
5607 })
5608 .await?;
5609
5610 Ok(RemoteCommandOutput {
5611 stdout: response.stdout,
5612 stderr: response.stderr,
5613 })
5614 }
5615 }
5616 })
5617 }
5618
5619 fn spawn_set_index_text_job(
5620 &mut self,
5621 path: RepoPath,
5622 content: Option<String>,
5623 hunk_staging_operation_count: Option<usize>,
5624 cx: &mut Context<Self>,
5625 ) -> oneshot::Receiver<anyhow::Result<()>> {
5626 let id = self.id;
5627 let this = cx.weak_entity();
5628 let git_store = self.git_store.clone();
5629 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5630 self.send_keyed_job(
5631 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5632 None,
5633 move |git_repo, mut cx| async move {
5634 log::debug!(
5635 "start updating index text for buffer {}",
5636 path.as_unix_str()
5637 );
5638
5639 match git_repo {
5640 RepositoryState::Local(LocalRepositoryState {
5641 fs,
5642 backend,
5643 environment,
5644 ..
5645 }) => {
5646 let executable = match fs.metadata(&abs_path).await {
5647 Ok(Some(meta)) => meta.is_executable,
5648 Ok(None) => false,
5649 Err(_err) => false,
5650 };
5651 backend
5652 .set_index_text(path.clone(), content, environment.clone(), executable)
5653 .await?;
5654 }
5655 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5656 client
5657 .request(proto::SetIndexText {
5658 project_id: project_id.0,
5659 repository_id: id.to_proto(),
5660 path: path.to_proto(),
5661 text: content,
5662 })
5663 .await?;
5664 }
5665 }
5666 log::debug!(
5667 "finish updating index text for buffer {}",
5668 path.as_unix_str()
5669 );
5670
5671 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5672 let project_path = this
5673 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5674 .ok()
5675 .flatten();
5676 git_store
5677 .update(&mut cx, |git_store, cx| {
5678 let buffer_id = git_store
5679 .buffer_store
5680 .read(cx)
5681 .get_by_path(&project_path?)?
5682 .read(cx)
5683 .remote_id();
5684 let diff_state = git_store.diffs.get(&buffer_id)?;
5685 diff_state.update(cx, |diff_state, _| {
5686 diff_state.hunk_staging_operation_count_as_of_write =
5687 hunk_staging_operation_count;
5688 });
5689 Some(())
5690 })
5691 .context("Git store dropped")?;
5692 }
5693 Ok(())
5694 },
5695 )
5696 }
5697
5698 pub fn create_remote(
5699 &mut self,
5700 remote_name: String,
5701 remote_url: String,
5702 ) -> oneshot::Receiver<Result<()>> {
5703 let id = self.id;
5704 self.send_job(
5705 Some(format!("git remote add {remote_name} {remote_url}").into()),
5706 move |repo, _cx| async move {
5707 match repo {
5708 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5709 backend.create_remote(remote_name, remote_url).await
5710 }
5711 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5712 client
5713 .request(proto::GitCreateRemote {
5714 project_id: project_id.0,
5715 repository_id: id.to_proto(),
5716 remote_name,
5717 remote_url,
5718 })
5719 .await?;
5720
5721 Ok(())
5722 }
5723 }
5724 },
5725 )
5726 }
5727
5728 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5729 let id = self.id;
5730 self.send_job(
5731 Some(format!("git remove remote {remote_name}").into()),
5732 move |repo, _cx| async move {
5733 match repo {
5734 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5735 backend.remove_remote(remote_name).await
5736 }
5737 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5738 client
5739 .request(proto::GitRemoveRemote {
5740 project_id: project_id.0,
5741 repository_id: id.to_proto(),
5742 remote_name,
5743 })
5744 .await?;
5745
5746 Ok(())
5747 }
5748 }
5749 },
5750 )
5751 }
5752
5753 pub fn get_remotes(
5754 &mut self,
5755 branch_name: Option<String>,
5756 is_push: bool,
5757 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5758 let id = self.id;
5759 self.send_job(None, move |repo, _cx| async move {
5760 match repo {
5761 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5762 let remote = if let Some(branch_name) = branch_name {
5763 if is_push {
5764 backend.get_push_remote(branch_name).await?
5765 } else {
5766 backend.get_branch_remote(branch_name).await?
5767 }
5768 } else {
5769 None
5770 };
5771
5772 match remote {
5773 Some(remote) => Ok(vec![remote]),
5774 None => backend.get_all_remotes().await,
5775 }
5776 }
5777 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5778 let response = client
5779 .request(proto::GetRemotes {
5780 project_id: project_id.0,
5781 repository_id: id.to_proto(),
5782 branch_name,
5783 is_push,
5784 })
5785 .await?;
5786
5787 let remotes = response
5788 .remotes
5789 .into_iter()
5790 .map(|remotes| Remote {
5791 name: remotes.name.into(),
5792 })
5793 .collect();
5794
5795 Ok(remotes)
5796 }
5797 }
5798 })
5799 }
5800
5801 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5802 let id = self.id;
5803 self.send_job(None, move |repo, _| async move {
5804 match repo {
5805 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5806 backend.branches().await
5807 }
5808 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5809 let response = client
5810 .request(proto::GitGetBranches {
5811 project_id: project_id.0,
5812 repository_id: id.to_proto(),
5813 })
5814 .await?;
5815
5816 let branches = response
5817 .branches
5818 .into_iter()
5819 .map(|branch| proto_to_branch(&branch))
5820 .collect();
5821
5822 Ok(branches)
5823 }
5824 }
5825 })
5826 }
5827
5828 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5829 /// returns the pathed for the linked worktree.
5830 ///
5831 /// Returns None if this is the main checkout.
5832 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5833 if self.work_directory_abs_path != self.original_repo_abs_path {
5834 Some(&self.work_directory_abs_path)
5835 } else {
5836 None
5837 }
5838 }
5839
5840 pub fn path_for_new_linked_worktree(
5841 &self,
5842 branch_name: &str,
5843 worktree_directory_setting: &str,
5844 ) -> Result<PathBuf> {
5845 let original_repo = self.original_repo_abs_path.clone();
5846 let project_name = original_repo
5847 .file_name()
5848 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5849 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5850 Ok(directory.join(branch_name).join(project_name))
5851 }
5852
5853 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5854 let id = self.id;
5855 self.send_job(None, move |repo, _| async move {
5856 match repo {
5857 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5858 backend.worktrees().await
5859 }
5860 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5861 let response = client
5862 .request(proto::GitGetWorktrees {
5863 project_id: project_id.0,
5864 repository_id: id.to_proto(),
5865 })
5866 .await?;
5867
5868 let worktrees = response
5869 .worktrees
5870 .into_iter()
5871 .map(|worktree| proto_to_worktree(&worktree))
5872 .collect();
5873
5874 Ok(worktrees)
5875 }
5876 }
5877 })
5878 }
5879
5880 pub fn create_worktree(
5881 &mut self,
5882 branch_name: String,
5883 path: PathBuf,
5884 commit: Option<String>,
5885 ) -> oneshot::Receiver<Result<()>> {
5886 let id = self.id;
5887 self.send_job(
5888 Some(format!("git worktree add: {}", branch_name).into()),
5889 move |repo, _cx| async move {
5890 match repo {
5891 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5892 backend
5893 .create_worktree(Some(branch_name), path, commit)
5894 .await
5895 }
5896 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5897 client
5898 .request(proto::GitCreateWorktree {
5899 project_id: project_id.0,
5900 repository_id: id.to_proto(),
5901 name: branch_name,
5902 directory: path.to_string_lossy().to_string(),
5903 commit,
5904 })
5905 .await?;
5906
5907 Ok(())
5908 }
5909 }
5910 },
5911 )
5912 }
5913
5914 pub fn create_worktree_detached(
5915 &mut self,
5916 path: PathBuf,
5917 commit: String,
5918 ) -> oneshot::Receiver<Result<()>> {
5919 self.send_job(
5920 Some("git worktree add (detached)".into()),
5921 move |repo, _cx| async move {
5922 match repo {
5923 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5924 backend.create_worktree(None, path, Some(commit)).await
5925 }
5926 RepositoryState::Remote(_) => {
5927 anyhow::bail!(
5928 "create_worktree_detached is not supported for remote repositories"
5929 )
5930 }
5931 }
5932 },
5933 )
5934 }
5935
5936 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
5937 self.send_job(None, move |repo, _cx| async move {
5938 match repo {
5939 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5940 Ok(backend.head_sha().await)
5941 }
5942 RepositoryState::Remote(_) => {
5943 anyhow::bail!("head_sha is not supported for remote repositories")
5944 }
5945 }
5946 })
5947 }
5948
5949 pub fn update_ref(
5950 &mut self,
5951 ref_name: String,
5952 commit: String,
5953 ) -> oneshot::Receiver<Result<()>> {
5954 self.send_job(None, move |repo, _cx| async move {
5955 match repo {
5956 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5957 backend.update_ref(ref_name, commit).await
5958 }
5959 RepositoryState::Remote(_) => {
5960 anyhow::bail!("update_ref is not supported for remote repositories")
5961 }
5962 }
5963 })
5964 }
5965
5966 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
5967 self.send_job(None, move |repo, _cx| async move {
5968 match repo {
5969 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5970 backend.delete_ref(ref_name).await
5971 }
5972 RepositoryState::Remote(_) => {
5973 anyhow::bail!("delete_ref is not supported for remote repositories")
5974 }
5975 }
5976 })
5977 }
5978
5979 pub fn stage_all_including_untracked(&mut self) -> oneshot::Receiver<Result<()>> {
5980 self.send_job(None, move |repo, _cx| async move {
5981 match repo {
5982 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5983 backend.stage_all_including_untracked().await
5984 }
5985 RepositoryState::Remote(_) => {
5986 anyhow::bail!(
5987 "stage_all_including_untracked is not supported for remote repositories"
5988 )
5989 }
5990 }
5991 })
5992 }
5993
5994 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5995 let id = self.id;
5996 self.send_job(
5997 Some(format!("git worktree remove: {}", path.display()).into()),
5998 move |repo, _cx| async move {
5999 match repo {
6000 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6001 backend.remove_worktree(path, force).await
6002 }
6003 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6004 client
6005 .request(proto::GitRemoveWorktree {
6006 project_id: project_id.0,
6007 repository_id: id.to_proto(),
6008 path: path.to_string_lossy().to_string(),
6009 force,
6010 })
6011 .await?;
6012
6013 Ok(())
6014 }
6015 }
6016 },
6017 )
6018 }
6019
6020 pub fn rename_worktree(
6021 &mut self,
6022 old_path: PathBuf,
6023 new_path: PathBuf,
6024 ) -> oneshot::Receiver<Result<()>> {
6025 let id = self.id;
6026 self.send_job(
6027 Some(format!("git worktree move: {}", old_path.display()).into()),
6028 move |repo, _cx| async move {
6029 match repo {
6030 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6031 backend.rename_worktree(old_path, new_path).await
6032 }
6033 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6034 client
6035 .request(proto::GitRenameWorktree {
6036 project_id: project_id.0,
6037 repository_id: id.to_proto(),
6038 old_path: old_path.to_string_lossy().to_string(),
6039 new_path: new_path.to_string_lossy().to_string(),
6040 })
6041 .await?;
6042
6043 Ok(())
6044 }
6045 }
6046 },
6047 )
6048 }
6049
6050 pub fn default_branch(
6051 &mut self,
6052 include_remote_name: bool,
6053 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6054 let id = self.id;
6055 self.send_job(None, move |repo, _| async move {
6056 match repo {
6057 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6058 backend.default_branch(include_remote_name).await
6059 }
6060 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6061 let response = client
6062 .request(proto::GetDefaultBranch {
6063 project_id: project_id.0,
6064 repository_id: id.to_proto(),
6065 })
6066 .await?;
6067
6068 anyhow::Ok(response.branch.map(SharedString::from))
6069 }
6070 }
6071 })
6072 }
6073
6074 pub fn diff_tree(
6075 &mut self,
6076 diff_type: DiffTreeType,
6077 _cx: &App,
6078 ) -> oneshot::Receiver<Result<TreeDiff>> {
6079 let repository_id = self.snapshot.id;
6080 self.send_job(None, move |repo, _cx| async move {
6081 match repo {
6082 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6083 backend.diff_tree(diff_type).await
6084 }
6085 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6086 let response = client
6087 .request(proto::GetTreeDiff {
6088 project_id: project_id.0,
6089 repository_id: repository_id.0,
6090 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6091 base: diff_type.base().to_string(),
6092 head: diff_type.head().to_string(),
6093 })
6094 .await?;
6095
6096 let entries = response
6097 .entries
6098 .into_iter()
6099 .filter_map(|entry| {
6100 let status = match entry.status() {
6101 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6102 proto::tree_diff_status::Status::Modified => {
6103 TreeDiffStatus::Modified {
6104 old: git::Oid::from_str(
6105 &entry.oid.context("missing oid").log_err()?,
6106 )
6107 .log_err()?,
6108 }
6109 }
6110 proto::tree_diff_status::Status::Deleted => {
6111 TreeDiffStatus::Deleted {
6112 old: git::Oid::from_str(
6113 &entry.oid.context("missing oid").log_err()?,
6114 )
6115 .log_err()?,
6116 }
6117 }
6118 };
6119 Some((
6120 RepoPath::from_rel_path(
6121 &RelPath::from_proto(&entry.path).log_err()?,
6122 ),
6123 status,
6124 ))
6125 })
6126 .collect();
6127
6128 Ok(TreeDiff { entries })
6129 }
6130 }
6131 })
6132 }
6133
6134 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6135 let id = self.id;
6136 self.send_job(None, move |repo, _cx| async move {
6137 match repo {
6138 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6139 backend.diff(diff_type).await
6140 }
6141 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6142 let (proto_diff_type, merge_base_ref) = match &diff_type {
6143 DiffType::HeadToIndex => {
6144 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6145 }
6146 DiffType::HeadToWorktree => {
6147 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6148 }
6149 DiffType::MergeBase { base_ref } => (
6150 proto::git_diff::DiffType::MergeBase.into(),
6151 Some(base_ref.to_string()),
6152 ),
6153 };
6154 let response = client
6155 .request(proto::GitDiff {
6156 project_id: project_id.0,
6157 repository_id: id.to_proto(),
6158 diff_type: proto_diff_type,
6159 merge_base_ref,
6160 })
6161 .await?;
6162
6163 Ok(response.diff)
6164 }
6165 }
6166 })
6167 }
6168
6169 pub fn create_branch(
6170 &mut self,
6171 branch_name: String,
6172 base_branch: Option<String>,
6173 ) -> oneshot::Receiver<Result<()>> {
6174 let id = self.id;
6175 let status_msg = if let Some(ref base) = base_branch {
6176 format!("git switch -c {branch_name} {base}").into()
6177 } else {
6178 format!("git switch -c {branch_name}").into()
6179 };
6180 self.send_job(Some(status_msg), move |repo, _cx| async move {
6181 match repo {
6182 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6183 backend.create_branch(branch_name, base_branch).await
6184 }
6185 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6186 client
6187 .request(proto::GitCreateBranch {
6188 project_id: project_id.0,
6189 repository_id: id.to_proto(),
6190 branch_name,
6191 })
6192 .await?;
6193
6194 Ok(())
6195 }
6196 }
6197 })
6198 }
6199
6200 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6201 let id = self.id;
6202 self.send_job(
6203 Some(format!("git switch {branch_name}").into()),
6204 move |repo, _cx| async move {
6205 match repo {
6206 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6207 backend.change_branch(branch_name).await
6208 }
6209 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6210 client
6211 .request(proto::GitChangeBranch {
6212 project_id: project_id.0,
6213 repository_id: id.to_proto(),
6214 branch_name,
6215 })
6216 .await?;
6217
6218 Ok(())
6219 }
6220 }
6221 },
6222 )
6223 }
6224
6225 pub fn delete_branch(
6226 &mut self,
6227 is_remote: bool,
6228 branch_name: String,
6229 ) -> oneshot::Receiver<Result<()>> {
6230 let id = self.id;
6231 self.send_job(
6232 Some(
6233 format!(
6234 "git branch {} {}",
6235 if is_remote { "-dr" } else { "-d" },
6236 branch_name
6237 )
6238 .into(),
6239 ),
6240 move |repo, _cx| async move {
6241 match repo {
6242 RepositoryState::Local(state) => {
6243 state.backend.delete_branch(is_remote, branch_name).await
6244 }
6245 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6246 client
6247 .request(proto::GitDeleteBranch {
6248 project_id: project_id.0,
6249 repository_id: id.to_proto(),
6250 is_remote,
6251 branch_name,
6252 })
6253 .await?;
6254
6255 Ok(())
6256 }
6257 }
6258 },
6259 )
6260 }
6261
6262 pub fn rename_branch(
6263 &mut self,
6264 branch: String,
6265 new_name: String,
6266 ) -> oneshot::Receiver<Result<()>> {
6267 let id = self.id;
6268 self.send_job(
6269 Some(format!("git branch -m {branch} {new_name}").into()),
6270 move |repo, _cx| async move {
6271 match repo {
6272 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6273 backend.rename_branch(branch, new_name).await
6274 }
6275 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6276 client
6277 .request(proto::GitRenameBranch {
6278 project_id: project_id.0,
6279 repository_id: id.to_proto(),
6280 branch,
6281 new_name,
6282 })
6283 .await?;
6284
6285 Ok(())
6286 }
6287 }
6288 },
6289 )
6290 }
6291
6292 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6293 let id = self.id;
6294 self.send_job(None, move |repo, _cx| async move {
6295 match repo {
6296 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6297 backend.check_for_pushed_commit().await
6298 }
6299 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6300 let response = client
6301 .request(proto::CheckForPushedCommits {
6302 project_id: project_id.0,
6303 repository_id: id.to_proto(),
6304 })
6305 .await?;
6306
6307 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6308
6309 Ok(branches)
6310 }
6311 }
6312 })
6313 }
6314
6315 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6316 self.send_job(None, |repo, _cx| async move {
6317 match repo {
6318 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6319 backend.checkpoint().await
6320 }
6321 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6322 }
6323 })
6324 }
6325
6326 pub fn restore_checkpoint(
6327 &mut self,
6328 checkpoint: GitRepositoryCheckpoint,
6329 ) -> oneshot::Receiver<Result<()>> {
6330 self.send_job(None, move |repo, _cx| async move {
6331 match repo {
6332 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6333 backend.restore_checkpoint(checkpoint).await
6334 }
6335 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6336 }
6337 })
6338 }
6339
6340 pub(crate) fn apply_remote_update(
6341 &mut self,
6342 update: proto::UpdateRepository,
6343 cx: &mut Context<Self>,
6344 ) -> Result<()> {
6345 if let Some(main_path) = &update.original_repo_abs_path {
6346 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6347 }
6348
6349 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6350 let new_head_commit = update
6351 .head_commit_details
6352 .as_ref()
6353 .map(proto_to_commit_details);
6354 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6355 cx.emit(RepositoryEvent::BranchChanged)
6356 }
6357 self.snapshot.branch = new_branch;
6358 self.snapshot.head_commit = new_head_commit;
6359
6360 // We don't store any merge head state for downstream projects; the upstream
6361 // will track it and we will just get the updated conflicts
6362 let new_merge_heads = TreeMap::from_ordered_entries(
6363 update
6364 .current_merge_conflicts
6365 .into_iter()
6366 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6367 );
6368 let conflicts_changed =
6369 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6370 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6371 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6372 let new_stash_entries = GitStash {
6373 entries: update
6374 .stash_entries
6375 .iter()
6376 .filter_map(|entry| proto_to_stash(entry).ok())
6377 .collect(),
6378 };
6379 if self.snapshot.stash_entries != new_stash_entries {
6380 cx.emit(RepositoryEvent::StashEntriesChanged)
6381 }
6382 self.snapshot.stash_entries = new_stash_entries;
6383 let new_linked_worktrees: Arc<[GitWorktree]> = update
6384 .linked_worktrees
6385 .iter()
6386 .map(proto_to_worktree)
6387 .collect();
6388 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6389 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6390 }
6391 self.snapshot.linked_worktrees = new_linked_worktrees;
6392 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6393 self.snapshot.remote_origin_url = update.remote_origin_url;
6394
6395 let edits = update
6396 .removed_statuses
6397 .into_iter()
6398 .filter_map(|path| {
6399 Some(sum_tree::Edit::Remove(PathKey(
6400 RelPath::from_proto(&path).log_err()?,
6401 )))
6402 })
6403 .chain(
6404 update
6405 .updated_statuses
6406 .into_iter()
6407 .filter_map(|updated_status| {
6408 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6409 }),
6410 )
6411 .collect::<Vec<_>>();
6412 if conflicts_changed || !edits.is_empty() {
6413 cx.emit(RepositoryEvent::StatusesChanged);
6414 }
6415 self.snapshot.statuses_by_path.edit(edits, ());
6416
6417 if update.is_last_update {
6418 self.snapshot.scan_id = update.scan_id;
6419 }
6420 self.clear_pending_ops(cx);
6421 Ok(())
6422 }
6423
6424 pub fn compare_checkpoints(
6425 &mut self,
6426 left: GitRepositoryCheckpoint,
6427 right: GitRepositoryCheckpoint,
6428 ) -> oneshot::Receiver<Result<bool>> {
6429 self.send_job(None, move |repo, _cx| async move {
6430 match repo {
6431 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6432 backend.compare_checkpoints(left, right).await
6433 }
6434 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6435 }
6436 })
6437 }
6438
6439 pub fn diff_checkpoints(
6440 &mut self,
6441 base_checkpoint: GitRepositoryCheckpoint,
6442 target_checkpoint: GitRepositoryCheckpoint,
6443 ) -> oneshot::Receiver<Result<String>> {
6444 self.send_job(None, move |repo, _cx| async move {
6445 match repo {
6446 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6447 backend
6448 .diff_checkpoints(base_checkpoint, target_checkpoint)
6449 .await
6450 }
6451 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6452 }
6453 })
6454 }
6455
6456 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6457 let updated = SumTree::from_iter(
6458 self.pending_ops.iter().filter_map(|ops| {
6459 let inner_ops: Vec<PendingOp> =
6460 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6461 if inner_ops.is_empty() {
6462 None
6463 } else {
6464 Some(PendingOps {
6465 repo_path: ops.repo_path.clone(),
6466 ops: inner_ops,
6467 })
6468 }
6469 }),
6470 (),
6471 );
6472
6473 if updated != self.pending_ops {
6474 cx.emit(RepositoryEvent::PendingOpsChanged {
6475 pending_ops: self.pending_ops.clone(),
6476 })
6477 }
6478
6479 self.pending_ops = updated;
6480 }
6481
6482 fn schedule_scan(
6483 &mut self,
6484 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6485 cx: &mut Context<Self>,
6486 ) {
6487 let this = cx.weak_entity();
6488 let _ = self.send_keyed_job(
6489 Some(GitJobKey::ReloadGitState),
6490 None,
6491 |state, mut cx| async move {
6492 log::debug!("run scheduled git status scan");
6493
6494 let Some(this) = this.upgrade() else {
6495 return Ok(());
6496 };
6497 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6498 bail!("not a local repository")
6499 };
6500 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6501 this.update(&mut cx, |this, cx| {
6502 this.clear_pending_ops(cx);
6503 });
6504 if let Some(updates_tx) = updates_tx {
6505 updates_tx
6506 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6507 .ok();
6508 }
6509 Ok(())
6510 },
6511 );
6512 }
6513
6514 fn spawn_local_git_worker(
6515 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6516 cx: &mut Context<Self>,
6517 ) -> mpsc::UnboundedSender<GitJob> {
6518 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6519
6520 cx.spawn(async move |_, cx| {
6521 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6522 if let Some(git_hosting_provider_registry) =
6523 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6524 {
6525 git_hosting_providers::register_additional_providers(
6526 git_hosting_provider_registry,
6527 state.backend.clone(),
6528 )
6529 .await;
6530 }
6531 let state = RepositoryState::Local(state);
6532 let mut jobs = VecDeque::new();
6533 loop {
6534 while let Ok(Some(next_job)) = job_rx.try_next() {
6535 jobs.push_back(next_job);
6536 }
6537
6538 if let Some(job) = jobs.pop_front() {
6539 if let Some(current_key) = &job.key
6540 && jobs
6541 .iter()
6542 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6543 {
6544 continue;
6545 }
6546 (job.job)(state.clone(), cx).await;
6547 } else if let Some(job) = job_rx.next().await {
6548 jobs.push_back(job);
6549 } else {
6550 break;
6551 }
6552 }
6553 anyhow::Ok(())
6554 })
6555 .detach_and_log_err(cx);
6556
6557 job_tx
6558 }
6559
6560 fn spawn_remote_git_worker(
6561 state: RemoteRepositoryState,
6562 cx: &mut Context<Self>,
6563 ) -> mpsc::UnboundedSender<GitJob> {
6564 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6565
6566 cx.spawn(async move |_, cx| {
6567 let state = RepositoryState::Remote(state);
6568 let mut jobs = VecDeque::new();
6569 loop {
6570 while let Ok(Some(next_job)) = job_rx.try_next() {
6571 jobs.push_back(next_job);
6572 }
6573
6574 if let Some(job) = jobs.pop_front() {
6575 if let Some(current_key) = &job.key
6576 && jobs
6577 .iter()
6578 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6579 {
6580 continue;
6581 }
6582 (job.job)(state.clone(), cx).await;
6583 } else if let Some(job) = job_rx.next().await {
6584 jobs.push_back(job);
6585 } else {
6586 break;
6587 }
6588 }
6589 anyhow::Ok(())
6590 })
6591 .detach_and_log_err(cx);
6592
6593 job_tx
6594 }
6595
6596 fn load_staged_text(
6597 &mut self,
6598 buffer_id: BufferId,
6599 repo_path: RepoPath,
6600 cx: &App,
6601 ) -> Task<Result<Option<String>>> {
6602 let rx = self.send_job(None, move |state, _| async move {
6603 match state {
6604 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6605 anyhow::Ok(backend.load_index_text(repo_path).await)
6606 }
6607 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6608 let response = client
6609 .request(proto::OpenUnstagedDiff {
6610 project_id: project_id.to_proto(),
6611 buffer_id: buffer_id.to_proto(),
6612 })
6613 .await?;
6614 Ok(response.staged_text)
6615 }
6616 }
6617 });
6618 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6619 }
6620
6621 fn load_committed_text(
6622 &mut self,
6623 buffer_id: BufferId,
6624 repo_path: RepoPath,
6625 cx: &App,
6626 ) -> Task<Result<DiffBasesChange>> {
6627 let rx = self.send_job(None, move |state, _| async move {
6628 match state {
6629 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6630 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6631 let staged_text = backend.load_index_text(repo_path).await;
6632 let diff_bases_change = if committed_text == staged_text {
6633 DiffBasesChange::SetBoth(committed_text)
6634 } else {
6635 DiffBasesChange::SetEach {
6636 index: staged_text,
6637 head: committed_text,
6638 }
6639 };
6640 anyhow::Ok(diff_bases_change)
6641 }
6642 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6643 use proto::open_uncommitted_diff_response::Mode;
6644
6645 let response = client
6646 .request(proto::OpenUncommittedDiff {
6647 project_id: project_id.to_proto(),
6648 buffer_id: buffer_id.to_proto(),
6649 })
6650 .await?;
6651 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6652 let bases = match mode {
6653 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6654 Mode::IndexAndHead => DiffBasesChange::SetEach {
6655 head: response.committed_text,
6656 index: response.staged_text,
6657 },
6658 };
6659 Ok(bases)
6660 }
6661 }
6662 });
6663
6664 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6665 }
6666
6667 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6668 let repository_id = self.snapshot.id;
6669 let rx = self.send_job(None, move |state, _| async move {
6670 match state {
6671 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6672 backend.load_blob_content(oid).await
6673 }
6674 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6675 let response = client
6676 .request(proto::GetBlobContent {
6677 project_id: project_id.to_proto(),
6678 repository_id: repository_id.0,
6679 oid: oid.to_string(),
6680 })
6681 .await?;
6682 Ok(response.content)
6683 }
6684 }
6685 });
6686 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6687 }
6688
6689 fn paths_changed(
6690 &mut self,
6691 paths: Vec<RepoPath>,
6692 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6693 cx: &mut Context<Self>,
6694 ) {
6695 if !paths.is_empty() {
6696 self.paths_needing_status_update.push(paths);
6697 }
6698
6699 let this = cx.weak_entity();
6700 let _ = self.send_keyed_job(
6701 Some(GitJobKey::RefreshStatuses),
6702 None,
6703 |state, mut cx| async move {
6704 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6705 (
6706 this.snapshot.clone(),
6707 mem::take(&mut this.paths_needing_status_update),
6708 )
6709 })?;
6710 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6711 bail!("not a local repository")
6712 };
6713
6714 if changed_paths.is_empty() {
6715 return Ok(());
6716 }
6717
6718 let has_head = prev_snapshot.head_commit.is_some();
6719
6720 let stash_entries = backend.stash_entries().await?;
6721 let changed_path_statuses = cx
6722 .background_spawn(async move {
6723 let mut changed_paths =
6724 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6725 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6726
6727 let status_task = backend.status(&changed_paths_vec);
6728 let diff_stat_future = if has_head {
6729 backend.diff_stat(&changed_paths_vec)
6730 } else {
6731 future::ready(Ok(status::GitDiffStat {
6732 entries: Arc::default(),
6733 }))
6734 .boxed()
6735 };
6736
6737 let (statuses, diff_stats) =
6738 futures::future::try_join(status_task, diff_stat_future).await?;
6739
6740 let diff_stats: HashMap<RepoPath, DiffStat> =
6741 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6742
6743 let mut changed_path_statuses = Vec::new();
6744 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6745 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6746
6747 for (repo_path, status) in &*statuses.entries {
6748 let current_diff_stat = diff_stats.get(repo_path).copied();
6749
6750 changed_paths.remove(repo_path);
6751 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6752 && cursor.item().is_some_and(|entry| {
6753 entry.status == *status && entry.diff_stat == current_diff_stat
6754 })
6755 {
6756 continue;
6757 }
6758
6759 changed_path_statuses.push(Edit::Insert(StatusEntry {
6760 repo_path: repo_path.clone(),
6761 status: *status,
6762 diff_stat: current_diff_stat,
6763 }));
6764 }
6765 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6766 for path in changed_paths.into_iter() {
6767 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6768 changed_path_statuses
6769 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6770 }
6771 }
6772 anyhow::Ok(changed_path_statuses)
6773 })
6774 .await?;
6775
6776 this.update(&mut cx, |this, cx| {
6777 if this.snapshot.stash_entries != stash_entries {
6778 cx.emit(RepositoryEvent::StashEntriesChanged);
6779 this.snapshot.stash_entries = stash_entries;
6780 }
6781
6782 if !changed_path_statuses.is_empty() {
6783 cx.emit(RepositoryEvent::StatusesChanged);
6784 this.snapshot
6785 .statuses_by_path
6786 .edit(changed_path_statuses, ());
6787 this.snapshot.scan_id += 1;
6788 }
6789
6790 if let Some(updates_tx) = updates_tx {
6791 updates_tx
6792 .unbounded_send(DownstreamUpdate::UpdateRepository(
6793 this.snapshot.clone(),
6794 ))
6795 .ok();
6796 }
6797 })
6798 },
6799 );
6800 }
6801
6802 /// currently running git command and when it started
6803 pub fn current_job(&self) -> Option<JobInfo> {
6804 self.active_jobs.values().next().cloned()
6805 }
6806
6807 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6808 self.send_job(None, |_, _| async {})
6809 }
6810
6811 fn spawn_job_with_tracking<AsyncFn>(
6812 &mut self,
6813 paths: Vec<RepoPath>,
6814 git_status: pending_op::GitStatus,
6815 cx: &mut Context<Self>,
6816 f: AsyncFn,
6817 ) -> Task<Result<()>>
6818 where
6819 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6820 {
6821 let ids = self.new_pending_ops_for_paths(paths, git_status);
6822
6823 cx.spawn(async move |this, cx| {
6824 let (job_status, result) = match f(this.clone(), cx).await {
6825 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6826 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6827 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6828 };
6829
6830 this.update(cx, |this, _| {
6831 let mut edits = Vec::with_capacity(ids.len());
6832 for (id, entry) in ids {
6833 if let Some(mut ops) = this
6834 .pending_ops
6835 .get(&PathKey(entry.as_ref().clone()), ())
6836 .cloned()
6837 {
6838 if let Some(op) = ops.op_by_id_mut(id) {
6839 op.job_status = job_status;
6840 }
6841 edits.push(sum_tree::Edit::Insert(ops));
6842 }
6843 }
6844 this.pending_ops.edit(edits, ());
6845 })?;
6846
6847 result
6848 })
6849 }
6850
6851 fn new_pending_ops_for_paths(
6852 &mut self,
6853 paths: Vec<RepoPath>,
6854 git_status: pending_op::GitStatus,
6855 ) -> Vec<(PendingOpId, RepoPath)> {
6856 let mut edits = Vec::with_capacity(paths.len());
6857 let mut ids = Vec::with_capacity(paths.len());
6858 for path in paths {
6859 let mut ops = self
6860 .pending_ops
6861 .get(&PathKey(path.as_ref().clone()), ())
6862 .cloned()
6863 .unwrap_or_else(|| PendingOps::new(&path));
6864 let id = ops.max_id() + 1;
6865 ops.ops.push(PendingOp {
6866 id,
6867 git_status,
6868 job_status: pending_op::JobStatus::Running,
6869 });
6870 edits.push(sum_tree::Edit::Insert(ops));
6871 ids.push((id, path));
6872 }
6873 self.pending_ops.edit(edits, ());
6874 ids
6875 }
6876 pub fn default_remote_url(&self) -> Option<String> {
6877 self.remote_upstream_url
6878 .clone()
6879 .or(self.remote_origin_url.clone())
6880 }
6881}
6882
6883/// If `path` is a git linked worktree checkout, resolves it to the main
6884/// repository's working directory path. Returns `None` if `path` is a normal
6885/// repository, not a git repo, or if resolution fails.
6886///
6887/// Resolution works by:
6888/// 1. Reading the `.git` file to get the `gitdir:` pointer
6889/// 2. Following that to the worktree-specific git directory
6890/// 3. Reading the `commondir` file to find the shared `.git` directory
6891/// 4. Deriving the main repo's working directory from the common dir
6892pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6893 let dot_git = path.join(".git");
6894 let metadata = fs.metadata(&dot_git).await.ok()??;
6895 if metadata.is_dir {
6896 return None; // Normal repo, not a linked worktree
6897 }
6898 // It's a .git file — parse the gitdir: pointer
6899 let content = fs.load(&dot_git).await.ok()?;
6900 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6901 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6902 // Read commondir to find the main .git directory
6903 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6904 let common_dir = fs
6905 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6906 .await
6907 .ok()?;
6908 Some(git::repository::original_repo_path_from_common_dir(
6909 &common_dir,
6910 ))
6911}
6912
6913/// Validates that the resolved worktree directory is acceptable:
6914/// - The setting must not be an absolute path.
6915/// - The resolved path must be either a subdirectory of the working
6916/// directory or a subdirectory of its parent (i.e., a sibling).
6917///
6918/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6919pub fn worktrees_directory_for_repo(
6920 original_repo_abs_path: &Path,
6921 worktree_directory_setting: &str,
6922) -> Result<PathBuf> {
6923 // Check the original setting before trimming, since a path like "///"
6924 // is absolute but becomes "" after stripping trailing separators.
6925 // Also check for leading `/` or `\` explicitly, because on Windows
6926 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6927 // would slip through even though it's clearly not a relative path.
6928 if Path::new(worktree_directory_setting).is_absolute()
6929 || worktree_directory_setting.starts_with('/')
6930 || worktree_directory_setting.starts_with('\\')
6931 {
6932 anyhow::bail!(
6933 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6934 );
6935 }
6936
6937 if worktree_directory_setting.is_empty() {
6938 anyhow::bail!("git.worktree_directory must not be empty");
6939 }
6940
6941 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6942 if trimmed == ".." {
6943 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6944 }
6945
6946 let joined = original_repo_abs_path.join(trimmed);
6947 let resolved = util::normalize_path(&joined);
6948 let resolved = if resolved.starts_with(original_repo_abs_path) {
6949 resolved
6950 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
6951 resolved.join(repo_dir_name)
6952 } else {
6953 resolved
6954 };
6955
6956 let parent = original_repo_abs_path
6957 .parent()
6958 .unwrap_or(original_repo_abs_path);
6959
6960 if !resolved.starts_with(parent) {
6961 anyhow::bail!(
6962 "git.worktree_directory resolved to {resolved:?}, which is outside \
6963 the project root and its parent directory. It must resolve to a \
6964 subdirectory of {original_repo_abs_path:?} or a sibling of it."
6965 );
6966 }
6967
6968 Ok(resolved)
6969}
6970
6971/// Returns a short name for a linked worktree suitable for UI display
6972///
6973/// Uses the main worktree path to come up with a short name that disambiguates
6974/// the linked worktree from the main worktree.
6975pub fn linked_worktree_short_name(
6976 main_worktree_path: &Path,
6977 linked_worktree_path: &Path,
6978) -> Option<SharedString> {
6979 if main_worktree_path == linked_worktree_path {
6980 return None;
6981 }
6982
6983 let project_name = main_worktree_path.file_name()?.to_str()?;
6984 let directory_name = linked_worktree_path.file_name()?.to_str()?;
6985 let name = if directory_name != project_name {
6986 directory_name.to_string()
6987 } else {
6988 linked_worktree_path
6989 .parent()?
6990 .file_name()?
6991 .to_str()?
6992 .to_string()
6993 };
6994 Some(name.into())
6995}
6996
6997fn get_permalink_in_rust_registry_src(
6998 provider_registry: Arc<GitHostingProviderRegistry>,
6999 path: PathBuf,
7000 selection: Range<u32>,
7001) -> Result<url::Url> {
7002 #[derive(Deserialize)]
7003 struct CargoVcsGit {
7004 sha1: String,
7005 }
7006
7007 #[derive(Deserialize)]
7008 struct CargoVcsInfo {
7009 git: CargoVcsGit,
7010 path_in_vcs: String,
7011 }
7012
7013 #[derive(Deserialize)]
7014 struct CargoPackage {
7015 repository: String,
7016 }
7017
7018 #[derive(Deserialize)]
7019 struct CargoToml {
7020 package: CargoPackage,
7021 }
7022
7023 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7024 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7025 Some((dir, json))
7026 }) else {
7027 bail!("No .cargo_vcs_info.json found in parent directories")
7028 };
7029 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7030 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7031 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7032 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7033 .context("parsing package.repository field of manifest")?;
7034 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7035 let permalink = provider.build_permalink(
7036 remote,
7037 BuildPermalinkParams::new(
7038 &cargo_vcs_info.git.sha1,
7039 &RepoPath::from_rel_path(
7040 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7041 ),
7042 Some(selection),
7043 ),
7044 );
7045 Ok(permalink)
7046}
7047
7048fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7049 let Some(blame) = blame else {
7050 return proto::BlameBufferResponse {
7051 blame_response: None,
7052 };
7053 };
7054
7055 let entries = blame
7056 .entries
7057 .into_iter()
7058 .map(|entry| proto::BlameEntry {
7059 sha: entry.sha.as_bytes().into(),
7060 start_line: entry.range.start,
7061 end_line: entry.range.end,
7062 original_line_number: entry.original_line_number,
7063 author: entry.author,
7064 author_mail: entry.author_mail,
7065 author_time: entry.author_time,
7066 author_tz: entry.author_tz,
7067 committer: entry.committer_name,
7068 committer_mail: entry.committer_email,
7069 committer_time: entry.committer_time,
7070 committer_tz: entry.committer_tz,
7071 summary: entry.summary,
7072 previous: entry.previous,
7073 filename: entry.filename,
7074 })
7075 .collect::<Vec<_>>();
7076
7077 let messages = blame
7078 .messages
7079 .into_iter()
7080 .map(|(oid, message)| proto::CommitMessage {
7081 oid: oid.as_bytes().into(),
7082 message,
7083 })
7084 .collect::<Vec<_>>();
7085
7086 proto::BlameBufferResponse {
7087 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7088 }
7089}
7090
7091fn deserialize_blame_buffer_response(
7092 response: proto::BlameBufferResponse,
7093) -> Option<git::blame::Blame> {
7094 let response = response.blame_response?;
7095 let entries = response
7096 .entries
7097 .into_iter()
7098 .filter_map(|entry| {
7099 Some(git::blame::BlameEntry {
7100 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7101 range: entry.start_line..entry.end_line,
7102 original_line_number: entry.original_line_number,
7103 committer_name: entry.committer,
7104 committer_time: entry.committer_time,
7105 committer_tz: entry.committer_tz,
7106 committer_email: entry.committer_mail,
7107 author: entry.author,
7108 author_mail: entry.author_mail,
7109 author_time: entry.author_time,
7110 author_tz: entry.author_tz,
7111 summary: entry.summary,
7112 previous: entry.previous,
7113 filename: entry.filename,
7114 })
7115 })
7116 .collect::<Vec<_>>();
7117
7118 let messages = response
7119 .messages
7120 .into_iter()
7121 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7122 .collect::<HashMap<_, _>>();
7123
7124 Some(Blame { entries, messages })
7125}
7126
7127fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7128 proto::Branch {
7129 is_head: branch.is_head,
7130 ref_name: branch.ref_name.to_string(),
7131 unix_timestamp: branch
7132 .most_recent_commit
7133 .as_ref()
7134 .map(|commit| commit.commit_timestamp as u64),
7135 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7136 ref_name: upstream.ref_name.to_string(),
7137 tracking: upstream
7138 .tracking
7139 .status()
7140 .map(|upstream| proto::UpstreamTracking {
7141 ahead: upstream.ahead as u64,
7142 behind: upstream.behind as u64,
7143 }),
7144 }),
7145 most_recent_commit: branch
7146 .most_recent_commit
7147 .as_ref()
7148 .map(|commit| proto::CommitSummary {
7149 sha: commit.sha.to_string(),
7150 subject: commit.subject.to_string(),
7151 commit_timestamp: commit.commit_timestamp,
7152 author_name: commit.author_name.to_string(),
7153 }),
7154 }
7155}
7156
7157fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7158 proto::Worktree {
7159 path: worktree.path.to_string_lossy().to_string(),
7160 ref_name: worktree
7161 .ref_name
7162 .as_ref()
7163 .map(|s| s.to_string())
7164 .unwrap_or_default(),
7165 sha: worktree.sha.to_string(),
7166 is_main: worktree.is_main,
7167 }
7168}
7169
7170fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7171 git::repository::Worktree {
7172 path: PathBuf::from(proto.path.clone()),
7173 ref_name: Some(SharedString::from(&proto.ref_name)),
7174 sha: proto.sha.clone().into(),
7175 is_main: proto.is_main,
7176 }
7177}
7178
7179fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7180 git::repository::Branch {
7181 is_head: proto.is_head,
7182 ref_name: proto.ref_name.clone().into(),
7183 upstream: proto
7184 .upstream
7185 .as_ref()
7186 .map(|upstream| git::repository::Upstream {
7187 ref_name: upstream.ref_name.to_string().into(),
7188 tracking: upstream
7189 .tracking
7190 .as_ref()
7191 .map(|tracking| {
7192 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7193 ahead: tracking.ahead as u32,
7194 behind: tracking.behind as u32,
7195 })
7196 })
7197 .unwrap_or(git::repository::UpstreamTracking::Gone),
7198 }),
7199 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7200 git::repository::CommitSummary {
7201 sha: commit.sha.to_string().into(),
7202 subject: commit.subject.to_string().into(),
7203 commit_timestamp: commit.commit_timestamp,
7204 author_name: commit.author_name.to_string().into(),
7205 has_parent: true,
7206 }
7207 }),
7208 }
7209}
7210
7211fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7212 proto::GitCommitDetails {
7213 sha: commit.sha.to_string(),
7214 message: commit.message.to_string(),
7215 commit_timestamp: commit.commit_timestamp,
7216 author_email: commit.author_email.to_string(),
7217 author_name: commit.author_name.to_string(),
7218 }
7219}
7220
7221fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7222 CommitDetails {
7223 sha: proto.sha.clone().into(),
7224 message: proto.message.clone().into(),
7225 commit_timestamp: proto.commit_timestamp,
7226 author_email: proto.author_email.clone().into(),
7227 author_name: proto.author_name.clone().into(),
7228 }
7229}
7230
7231/// This snapshot computes the repository state on the foreground thread while
7232/// running the git commands on the background thread. We update branch, head,
7233/// remotes, and worktrees first so the UI can react sooner, then compute file
7234/// state and emit those events immediately after.
7235async fn compute_snapshot(
7236 this: Entity<Repository>,
7237 backend: Arc<dyn GitRepository>,
7238 cx: &mut AsyncApp,
7239) -> Result<RepositorySnapshot> {
7240 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7241 this.paths_needing_status_update.clear();
7242 (
7243 this.id,
7244 this.work_directory_abs_path.clone(),
7245 this.snapshot.clone(),
7246 )
7247 });
7248
7249 let head_commit_future = {
7250 let backend = backend.clone();
7251 async move {
7252 Ok(match backend.head_sha().await {
7253 Some(head_sha) => backend.show(head_sha).await.log_err(),
7254 None => None,
7255 })
7256 }
7257 };
7258 let (branches, head_commit, all_worktrees) = cx
7259 .background_spawn({
7260 let backend = backend.clone();
7261 async move {
7262 futures::future::try_join3(
7263 backend.branches(),
7264 head_commit_future,
7265 backend.worktrees(),
7266 )
7267 .await
7268 }
7269 })
7270 .await?;
7271 let branch = branches.into_iter().find(|branch| branch.is_head);
7272
7273 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7274 .into_iter()
7275 .filter(|wt| wt.path != *work_directory_abs_path)
7276 .collect();
7277
7278 let (remote_origin_url, remote_upstream_url) = cx
7279 .background_spawn({
7280 let backend = backend.clone();
7281 async move {
7282 Ok::<_, anyhow::Error>(
7283 futures::future::join(
7284 backend.remote_url("origin"),
7285 backend.remote_url("upstream"),
7286 )
7287 .await,
7288 )
7289 }
7290 })
7291 .await?;
7292
7293 let snapshot = this.update(cx, |this, cx| {
7294 let branch_changed =
7295 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7296 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7297
7298 this.snapshot = RepositorySnapshot {
7299 id,
7300 work_directory_abs_path,
7301 branch,
7302 head_commit,
7303 remote_origin_url,
7304 remote_upstream_url,
7305 linked_worktrees,
7306 scan_id: prev_snapshot.scan_id + 1,
7307 ..prev_snapshot
7308 };
7309
7310 if branch_changed {
7311 cx.emit(RepositoryEvent::BranchChanged);
7312 }
7313
7314 if worktrees_changed {
7315 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7316 }
7317
7318 this.snapshot.clone()
7319 });
7320
7321 let (statuses, diff_stats, stash_entries) = cx
7322 .background_spawn({
7323 let backend = backend.clone();
7324 let snapshot = snapshot.clone();
7325 async move {
7326 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7327 if snapshot.head_commit.is_some() {
7328 backend.diff_stat(&[])
7329 } else {
7330 future::ready(Ok(status::GitDiffStat {
7331 entries: Arc::default(),
7332 }))
7333 .boxed()
7334 };
7335 futures::future::try_join3(
7336 backend.status(&[RepoPath::from_rel_path(
7337 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7338 )]),
7339 diff_stat_future,
7340 backend.stash_entries(),
7341 )
7342 .await
7343 }
7344 })
7345 .await?;
7346
7347 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7348 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7349 let mut conflicted_paths = Vec::new();
7350 let statuses_by_path = SumTree::from_iter(
7351 statuses.entries.iter().map(|(repo_path, status)| {
7352 if status.is_conflicted() {
7353 conflicted_paths.push(repo_path.clone());
7354 }
7355 StatusEntry {
7356 repo_path: repo_path.clone(),
7357 status: *status,
7358 diff_stat: diff_stat_map.get(repo_path).copied(),
7359 }
7360 }),
7361 (),
7362 );
7363
7364 let merge_details = cx
7365 .background_spawn({
7366 let backend = backend.clone();
7367 let mut merge_details = snapshot.merge.clone();
7368 async move {
7369 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7370 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7371 }
7372 })
7373 .await?;
7374 let (merge_details, conflicts_changed) = merge_details;
7375 log::debug!("new merge details: {merge_details:?}");
7376
7377 Ok(this.update(cx, |this, cx| {
7378 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7379 cx.emit(RepositoryEvent::StatusesChanged);
7380 }
7381 if stash_entries != this.snapshot.stash_entries {
7382 cx.emit(RepositoryEvent::StashEntriesChanged);
7383 }
7384
7385 this.snapshot.scan_id += 1;
7386 this.snapshot.merge = merge_details;
7387 this.snapshot.statuses_by_path = statuses_by_path;
7388 this.snapshot.stash_entries = stash_entries;
7389
7390 this.snapshot.clone()
7391 }))
7392}
7393
7394fn status_from_proto(
7395 simple_status: i32,
7396 status: Option<proto::GitFileStatus>,
7397) -> anyhow::Result<FileStatus> {
7398 use proto::git_file_status::Variant;
7399
7400 let Some(variant) = status.and_then(|status| status.variant) else {
7401 let code = proto::GitStatus::from_i32(simple_status)
7402 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7403 let result = match code {
7404 proto::GitStatus::Added => TrackedStatus {
7405 worktree_status: StatusCode::Added,
7406 index_status: StatusCode::Unmodified,
7407 }
7408 .into(),
7409 proto::GitStatus::Modified => TrackedStatus {
7410 worktree_status: StatusCode::Modified,
7411 index_status: StatusCode::Unmodified,
7412 }
7413 .into(),
7414 proto::GitStatus::Conflict => UnmergedStatus {
7415 first_head: UnmergedStatusCode::Updated,
7416 second_head: UnmergedStatusCode::Updated,
7417 }
7418 .into(),
7419 proto::GitStatus::Deleted => TrackedStatus {
7420 worktree_status: StatusCode::Deleted,
7421 index_status: StatusCode::Unmodified,
7422 }
7423 .into(),
7424 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7425 };
7426 return Ok(result);
7427 };
7428
7429 let result = match variant {
7430 Variant::Untracked(_) => FileStatus::Untracked,
7431 Variant::Ignored(_) => FileStatus::Ignored,
7432 Variant::Unmerged(unmerged) => {
7433 let [first_head, second_head] =
7434 [unmerged.first_head, unmerged.second_head].map(|head| {
7435 let code = proto::GitStatus::from_i32(head)
7436 .with_context(|| format!("Invalid git status code: {head}"))?;
7437 let result = match code {
7438 proto::GitStatus::Added => UnmergedStatusCode::Added,
7439 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7440 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7441 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7442 };
7443 Ok(result)
7444 });
7445 let [first_head, second_head] = [first_head?, second_head?];
7446 UnmergedStatus {
7447 first_head,
7448 second_head,
7449 }
7450 .into()
7451 }
7452 Variant::Tracked(tracked) => {
7453 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7454 .map(|status| {
7455 let code = proto::GitStatus::from_i32(status)
7456 .with_context(|| format!("Invalid git status code: {status}"))?;
7457 let result = match code {
7458 proto::GitStatus::Modified => StatusCode::Modified,
7459 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7460 proto::GitStatus::Added => StatusCode::Added,
7461 proto::GitStatus::Deleted => StatusCode::Deleted,
7462 proto::GitStatus::Renamed => StatusCode::Renamed,
7463 proto::GitStatus::Copied => StatusCode::Copied,
7464 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7465 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7466 };
7467 Ok(result)
7468 });
7469 let [index_status, worktree_status] = [index_status?, worktree_status?];
7470 TrackedStatus {
7471 index_status,
7472 worktree_status,
7473 }
7474 .into()
7475 }
7476 };
7477 Ok(result)
7478}
7479
7480fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7481 use proto::git_file_status::{Tracked, Unmerged, Variant};
7482
7483 let variant = match status {
7484 FileStatus::Untracked => Variant::Untracked(Default::default()),
7485 FileStatus::Ignored => Variant::Ignored(Default::default()),
7486 FileStatus::Unmerged(UnmergedStatus {
7487 first_head,
7488 second_head,
7489 }) => Variant::Unmerged(Unmerged {
7490 first_head: unmerged_status_to_proto(first_head),
7491 second_head: unmerged_status_to_proto(second_head),
7492 }),
7493 FileStatus::Tracked(TrackedStatus {
7494 index_status,
7495 worktree_status,
7496 }) => Variant::Tracked(Tracked {
7497 index_status: tracked_status_to_proto(index_status),
7498 worktree_status: tracked_status_to_proto(worktree_status),
7499 }),
7500 };
7501 proto::GitFileStatus {
7502 variant: Some(variant),
7503 }
7504}
7505
7506fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7507 match code {
7508 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7509 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7510 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7511 }
7512}
7513
7514fn tracked_status_to_proto(code: StatusCode) -> i32 {
7515 match code {
7516 StatusCode::Added => proto::GitStatus::Added as _,
7517 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7518 StatusCode::Modified => proto::GitStatus::Modified as _,
7519 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7520 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7521 StatusCode::Copied => proto::GitStatus::Copied as _,
7522 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7523 }
7524}