1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
36 DiffType, FetchOptions, GitCommitTemplate, GitRepository, GitRepositoryCheckpoint,
37 GraphCommitData, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote,
38 RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus,
39 Worktree as GitWorktree,
40 },
41 stash::{GitStash, StashEntry},
42 status::{
43 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
44 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
45 },
46};
47use gpui::{
48 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
49 WeakEntity,
50};
51use language::{
52 Buffer, BufferEvent, Language, LanguageRegistry,
53 proto::{deserialize_version, serialize_version},
54};
55use parking_lot::Mutex;
56use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
57use postage::stream::Stream as _;
58use rpc::{
59 AnyProtoClient, TypedEnvelope,
60 proto::{self, git_reset, split_repository_update},
61};
62use serde::Deserialize;
63use settings::WorktreeId;
64use smol::future::yield_now;
65use std::{
66 cmp::Ordering,
67 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
68 future::Future,
69 mem,
70 ops::Range,
71 path::{Path, PathBuf},
72 str::FromStr,
73 sync::{
74 Arc,
75 atomic::{self, AtomicU64},
76 },
77 time::Instant,
78};
79use sum_tree::{Edit, SumTree, TreeMap};
80use task::Shell;
81use text::{Bias, BufferId};
82use util::{
83 ResultExt, debug_panic,
84 paths::{PathStyle, SanitizedPath},
85 post_inc,
86 rel_path::RelPath,
87};
88use worktree::{
89 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
90 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
91};
92use zeroize::Zeroize;
93
94pub struct GitStore {
95 state: GitStoreState,
96 buffer_store: Entity<BufferStore>,
97 worktree_store: Entity<WorktreeStore>,
98 repositories: HashMap<RepositoryId, Entity<Repository>>,
99 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
100 active_repo_id: Option<RepositoryId>,
101 #[allow(clippy::type_complexity)]
102 loading_diffs:
103 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
104 diffs: HashMap<BufferId, Entity<BufferGitState>>,
105 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
106 _subscriptions: Vec<Subscription>,
107}
108
109#[derive(Default)]
110struct SharedDiffs {
111 unstaged: Option<Entity<BufferDiff>>,
112 uncommitted: Option<Entity<BufferDiff>>,
113}
114
115struct BufferGitState {
116 unstaged_diff: Option<WeakEntity<BufferDiff>>,
117 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
118 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
119 conflict_set: Option<WeakEntity<ConflictSet>>,
120 recalculate_diff_task: Option<Task<Result<()>>>,
121 reparse_conflict_markers_task: Option<Task<Result<()>>>,
122 language: Option<Arc<Language>>,
123 language_registry: Option<Arc<LanguageRegistry>>,
124 conflict_updated_futures: Vec<oneshot::Sender<()>>,
125 recalculating_tx: postage::watch::Sender<bool>,
126
127 /// These operation counts are used to ensure that head and index text
128 /// values read from the git repository are up-to-date with any hunk staging
129 /// operations that have been performed on the BufferDiff.
130 ///
131 /// The operation count is incremented immediately when the user initiates a
132 /// hunk stage/unstage operation. Then, upon finishing writing the new index
133 /// text do disk, the `operation count as of write` is updated to reflect
134 /// the operation count that prompted the write.
135 hunk_staging_operation_count: usize,
136 hunk_staging_operation_count_as_of_write: usize,
137
138 head_text: Option<Arc<str>>,
139 index_text: Option<Arc<str>>,
140 oid_texts: HashMap<git::Oid, Arc<str>>,
141 head_changed: bool,
142 index_changed: bool,
143 language_changed: bool,
144}
145
146#[derive(Clone, Debug)]
147enum DiffBasesChange {
148 SetIndex(Option<String>),
149 SetHead(Option<String>),
150 SetEach {
151 index: Option<String>,
152 head: Option<String>,
153 },
154 SetBoth(Option<String>),
155}
156
157#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
158enum DiffKind {
159 Unstaged,
160 Uncommitted,
161 SinceOid(Option<git::Oid>),
162}
163
164enum GitStoreState {
165 Local {
166 next_repository_id: Arc<AtomicU64>,
167 downstream: Option<LocalDownstreamState>,
168 project_environment: Entity<ProjectEnvironment>,
169 fs: Arc<dyn Fs>,
170 },
171 Remote {
172 upstream_client: AnyProtoClient,
173 upstream_project_id: u64,
174 downstream: Option<(AnyProtoClient, ProjectId)>,
175 },
176}
177
178enum DownstreamUpdate {
179 UpdateRepository(RepositorySnapshot),
180 RemoveRepository(RepositoryId),
181}
182
183struct LocalDownstreamState {
184 client: AnyProtoClient,
185 project_id: ProjectId,
186 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
187 _task: Task<Result<()>>,
188}
189
190#[derive(Clone, Debug)]
191pub struct GitStoreCheckpoint {
192 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
193}
194
195#[derive(Clone, Debug, PartialEq, Eq)]
196pub struct StatusEntry {
197 pub repo_path: RepoPath,
198 pub status: FileStatus,
199 pub diff_stat: Option<DiffStat>,
200}
201
202impl StatusEntry {
203 fn to_proto(&self) -> proto::StatusEntry {
204 let simple_status = match self.status {
205 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
206 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
207 FileStatus::Tracked(TrackedStatus {
208 index_status,
209 worktree_status,
210 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
211 worktree_status
212 } else {
213 index_status
214 }),
215 };
216
217 proto::StatusEntry {
218 repo_path: self.repo_path.to_proto(),
219 simple_status,
220 status: Some(status_to_proto(self.status)),
221 diff_stat_added: self.diff_stat.map(|ds| ds.added),
222 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
223 }
224 }
225}
226
227impl TryFrom<proto::StatusEntry> for StatusEntry {
228 type Error = anyhow::Error;
229
230 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
231 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
232 let status = status_from_proto(value.simple_status, value.status)?;
233 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
234 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
235 _ => None,
236 };
237 Ok(Self {
238 repo_path,
239 status,
240 diff_stat,
241 })
242 }
243}
244
245impl sum_tree::Item for StatusEntry {
246 type Summary = PathSummary<GitSummary>;
247
248 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
249 PathSummary {
250 max_path: self.repo_path.as_ref().clone(),
251 item_summary: self.status.summary(),
252 }
253 }
254}
255
256impl sum_tree::KeyedItem for StatusEntry {
257 type Key = PathKey;
258
259 fn key(&self) -> Self::Key {
260 PathKey(self.repo_path.as_ref().clone())
261 }
262}
263
264#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
265pub struct RepositoryId(pub u64);
266
267#[derive(Clone, Debug, Default, PartialEq, Eq)]
268pub struct MergeDetails {
269 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
270 pub message: Option<SharedString>,
271}
272
273#[derive(Clone)]
274pub enum CommitDataState {
275 Loading,
276 Loaded(Arc<GraphCommitData>),
277}
278
279#[derive(Clone, Debug, PartialEq, Eq)]
280pub struct RepositorySnapshot {
281 pub id: RepositoryId,
282 pub statuses_by_path: SumTree<StatusEntry>,
283 pub work_directory_abs_path: Arc<Path>,
284 /// The working directory of the original repository. For a normal
285 /// checkout this equals `work_directory_abs_path`. For a git worktree
286 /// checkout, this is the original repo's working directory — used to
287 /// anchor new worktree creation so they don't nest.
288 pub original_repo_abs_path: Arc<Path>,
289 pub path_style: PathStyle,
290 pub branch: Option<Branch>,
291 pub branch_list: Arc<[Branch]>,
292 pub head_commit: Option<CommitDetails>,
293 pub scan_id: u64,
294 pub merge: MergeDetails,
295 pub remote_origin_url: Option<String>,
296 pub remote_upstream_url: Option<String>,
297 pub stash_entries: GitStash,
298 pub linked_worktrees: Arc<[GitWorktree]>,
299}
300
301type JobId = u64;
302
303#[derive(Clone, Debug, PartialEq, Eq)]
304pub struct JobInfo {
305 pub start: Instant,
306 pub message: SharedString,
307}
308
309struct GraphCommitDataHandler {
310 _task: Task<()>,
311 commit_data_request: smol::channel::Sender<Oid>,
312}
313
314enum GraphCommitHandlerState {
315 Starting,
316 Open(GraphCommitDataHandler),
317 Closed,
318}
319
320pub struct InitialGitGraphData {
321 fetch_task: Task<()>,
322 pub error: Option<SharedString>,
323 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
324 pub commit_oid_to_index: HashMap<Oid, usize>,
325}
326
327pub struct GraphDataResponse<'a> {
328 pub commits: &'a [Arc<InitialGraphCommitData>],
329 pub is_loading: bool,
330 pub error: Option<SharedString>,
331}
332
333pub struct Repository {
334 this: WeakEntity<Self>,
335 snapshot: RepositorySnapshot,
336 commit_message_buffer: Option<Entity<Buffer>>,
337 git_store: WeakEntity<GitStore>,
338 // For a local repository, holds paths that have had worktree events since the last status scan completed,
339 // and that should be examined during the next status scan.
340 paths_needing_status_update: Vec<Vec<RepoPath>>,
341 job_sender: mpsc::UnboundedSender<GitJob>,
342 active_jobs: HashMap<JobId, JobInfo>,
343 pending_ops: SumTree<PendingOps>,
344 job_id: JobId,
345 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
346 latest_askpass_id: u64,
347 repository_state: Shared<Task<Result<RepositoryState, String>>>,
348 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
349 graph_commit_data_handler: GraphCommitHandlerState,
350 commit_data: HashMap<Oid, CommitDataState>,
351}
352
353impl std::ops::Deref for Repository {
354 type Target = RepositorySnapshot;
355
356 fn deref(&self) -> &Self::Target {
357 &self.snapshot
358 }
359}
360
361#[derive(Clone)]
362pub struct LocalRepositoryState {
363 pub fs: Arc<dyn Fs>,
364 pub backend: Arc<dyn GitRepository>,
365 pub environment: Arc<HashMap<String, String>>,
366}
367
368impl LocalRepositoryState {
369 async fn new(
370 work_directory_abs_path: Arc<Path>,
371 dot_git_abs_path: Arc<Path>,
372 project_environment: WeakEntity<ProjectEnvironment>,
373 fs: Arc<dyn Fs>,
374 is_trusted: bool,
375 cx: &mut AsyncApp,
376 ) -> anyhow::Result<Self> {
377 let environment = project_environment
378 .update(cx, |project_environment, cx| {
379 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
380 })?
381 .await
382 .unwrap_or_else(|| {
383 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
384 HashMap::default()
385 });
386 let search_paths = environment.get("PATH").map(|val| val.to_owned());
387 let backend = cx
388 .background_spawn({
389 let fs = fs.clone();
390 async move {
391 let system_git_binary_path = search_paths
392 .and_then(|search_paths| {
393 which::which_in("git", Some(search_paths), &work_directory_abs_path)
394 .ok()
395 })
396 .or_else(|| which::which("git").ok());
397 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
398 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
399 }
400 })
401 .await?;
402 backend.set_trusted(is_trusted);
403 Ok(LocalRepositoryState {
404 backend,
405 environment: Arc::new(environment),
406 fs,
407 })
408 }
409}
410
411#[derive(Clone)]
412pub struct RemoteRepositoryState {
413 pub project_id: ProjectId,
414 pub client: AnyProtoClient,
415}
416
417#[derive(Clone)]
418pub enum RepositoryState {
419 Local(LocalRepositoryState),
420 Remote(RemoteRepositoryState),
421}
422
423#[derive(Clone, Debug, PartialEq, Eq)]
424pub enum GitGraphEvent {
425 CountUpdated(usize),
426 FullyLoaded,
427 LoadingError,
428}
429
430#[derive(Clone, Debug, PartialEq, Eq)]
431pub enum RepositoryEvent {
432 StatusesChanged,
433 HeadChanged,
434 BranchListChanged,
435 StashEntriesChanged,
436 GitWorktreeListChanged,
437 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
438 GraphEvent((LogSource, LogOrder), GitGraphEvent),
439}
440
441#[derive(Clone, Debug)]
442pub struct JobsUpdated;
443
444#[derive(Debug)]
445pub enum GitStoreEvent {
446 ActiveRepositoryChanged(Option<RepositoryId>),
447 /// Bool is true when the repository that's updated is the active repository
448 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
449 RepositoryAdded,
450 RepositoryRemoved(RepositoryId),
451 IndexWriteError(anyhow::Error),
452 JobsUpdated,
453 ConflictsUpdated,
454}
455
456impl EventEmitter<RepositoryEvent> for Repository {}
457impl EventEmitter<JobsUpdated> for Repository {}
458impl EventEmitter<GitStoreEvent> for GitStore {}
459
460pub struct GitJob {
461 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
462 key: Option<GitJobKey>,
463}
464
465#[derive(PartialEq, Eq)]
466enum GitJobKey {
467 WriteIndex(Vec<RepoPath>),
468 ReloadBufferDiffBases,
469 RefreshStatuses,
470 ReloadGitState,
471}
472
473impl GitStore {
474 pub fn local(
475 worktree_store: &Entity<WorktreeStore>,
476 buffer_store: Entity<BufferStore>,
477 environment: Entity<ProjectEnvironment>,
478 fs: Arc<dyn Fs>,
479 cx: &mut Context<Self>,
480 ) -> Self {
481 Self::new(
482 worktree_store.clone(),
483 buffer_store,
484 GitStoreState::Local {
485 next_repository_id: Arc::new(AtomicU64::new(1)),
486 downstream: None,
487 project_environment: environment,
488 fs,
489 },
490 cx,
491 )
492 }
493
494 pub fn remote(
495 worktree_store: &Entity<WorktreeStore>,
496 buffer_store: Entity<BufferStore>,
497 upstream_client: AnyProtoClient,
498 project_id: u64,
499 cx: &mut Context<Self>,
500 ) -> Self {
501 Self::new(
502 worktree_store.clone(),
503 buffer_store,
504 GitStoreState::Remote {
505 upstream_client,
506 upstream_project_id: project_id,
507 downstream: None,
508 },
509 cx,
510 )
511 }
512
513 fn new(
514 worktree_store: Entity<WorktreeStore>,
515 buffer_store: Entity<BufferStore>,
516 state: GitStoreState,
517 cx: &mut Context<Self>,
518 ) -> Self {
519 let mut _subscriptions = vec![
520 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
521 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
522 ];
523
524 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
525 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
526 }
527
528 GitStore {
529 state,
530 buffer_store,
531 worktree_store,
532 repositories: HashMap::default(),
533 worktree_ids: HashMap::default(),
534 active_repo_id: None,
535 _subscriptions,
536 loading_diffs: HashMap::default(),
537 shared_diffs: HashMap::default(),
538 diffs: HashMap::default(),
539 }
540 }
541
542 pub fn init(client: &AnyProtoClient) {
543 client.add_entity_request_handler(Self::handle_get_remotes);
544 client.add_entity_request_handler(Self::handle_get_branches);
545 client.add_entity_request_handler(Self::handle_get_default_branch);
546 client.add_entity_request_handler(Self::handle_change_branch);
547 client.add_entity_request_handler(Self::handle_create_branch);
548 client.add_entity_request_handler(Self::handle_rename_branch);
549 client.add_entity_request_handler(Self::handle_create_remote);
550 client.add_entity_request_handler(Self::handle_remove_remote);
551 client.add_entity_request_handler(Self::handle_delete_branch);
552 client.add_entity_request_handler(Self::handle_git_init);
553 client.add_entity_request_handler(Self::handle_push);
554 client.add_entity_request_handler(Self::handle_pull);
555 client.add_entity_request_handler(Self::handle_fetch);
556 client.add_entity_request_handler(Self::handle_stage);
557 client.add_entity_request_handler(Self::handle_unstage);
558 client.add_entity_request_handler(Self::handle_stash);
559 client.add_entity_request_handler(Self::handle_stash_pop);
560 client.add_entity_request_handler(Self::handle_stash_apply);
561 client.add_entity_request_handler(Self::handle_stash_drop);
562 client.add_entity_request_handler(Self::handle_commit);
563 client.add_entity_request_handler(Self::handle_run_hook);
564 client.add_entity_request_handler(Self::handle_reset);
565 client.add_entity_request_handler(Self::handle_show);
566 client.add_entity_request_handler(Self::handle_create_checkpoint);
567 client.add_entity_request_handler(Self::handle_create_archive_checkpoint);
568 client.add_entity_request_handler(Self::handle_restore_checkpoint);
569 client.add_entity_request_handler(Self::handle_restore_archive_checkpoint);
570 client.add_entity_request_handler(Self::handle_compare_checkpoints);
571 client.add_entity_request_handler(Self::handle_diff_checkpoints);
572 client.add_entity_request_handler(Self::handle_load_commit_diff);
573 client.add_entity_request_handler(Self::handle_file_history);
574 client.add_entity_request_handler(Self::handle_checkout_files);
575 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
576 client.add_entity_request_handler(Self::handle_set_index_text);
577 client.add_entity_request_handler(Self::handle_askpass);
578 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
579 client.add_entity_request_handler(Self::handle_git_diff);
580 client.add_entity_request_handler(Self::handle_tree_diff);
581 client.add_entity_request_handler(Self::handle_get_blob_content);
582 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
583 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
584 client.add_entity_message_handler(Self::handle_update_diff_bases);
585 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
586 client.add_entity_request_handler(Self::handle_blame_buffer);
587 client.add_entity_message_handler(Self::handle_update_repository);
588 client.add_entity_message_handler(Self::handle_remove_repository);
589 client.add_entity_request_handler(Self::handle_git_clone);
590 client.add_entity_request_handler(Self::handle_get_worktrees);
591 client.add_entity_request_handler(Self::handle_create_worktree);
592 client.add_entity_request_handler(Self::handle_remove_worktree);
593 client.add_entity_request_handler(Self::handle_rename_worktree);
594 client.add_entity_request_handler(Self::handle_get_head_sha);
595 client.add_entity_request_handler(Self::handle_edit_ref);
596 client.add_entity_request_handler(Self::handle_repair_worktrees);
597 }
598
599 pub fn is_local(&self) -> bool {
600 matches!(self.state, GitStoreState::Local { .. })
601 }
602
603 fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context<Self>) {
604 if self.active_repo_id != Some(repo_id) {
605 self.active_repo_id = Some(repo_id);
606 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
607 }
608 }
609
610 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
611 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
612 self.set_active_repo_id(repo.read(cx).id, cx);
613 }
614 }
615
616 pub fn set_active_repo_for_worktree(
617 &mut self,
618 worktree_id: WorktreeId,
619 cx: &mut Context<Self>,
620 ) {
621 let Some(worktree) = self
622 .worktree_store
623 .read(cx)
624 .worktree_for_id(worktree_id, cx)
625 else {
626 return;
627 };
628 let worktree_abs_path = worktree.read(cx).abs_path();
629 let Some(repo_id) = self
630 .repositories
631 .values()
632 .filter(|repo| {
633 let repo_path = &repo.read(cx).work_directory_abs_path;
634 *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
635 })
636 .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
637 .map(|repo| repo.read(cx).id)
638 else {
639 return;
640 };
641
642 self.set_active_repo_id(repo_id, cx);
643 }
644
645 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
646 match &mut self.state {
647 GitStoreState::Remote {
648 downstream: downstream_client,
649 ..
650 } => {
651 for repo in self.repositories.values() {
652 let update = repo.read(cx).snapshot.initial_update(project_id);
653 for update in split_repository_update(update) {
654 client.send(update).log_err();
655 }
656 }
657 *downstream_client = Some((client, ProjectId(project_id)));
658 }
659 GitStoreState::Local {
660 downstream: downstream_client,
661 ..
662 } => {
663 let mut snapshots = HashMap::default();
664 let (updates_tx, mut updates_rx) = mpsc::unbounded();
665 for repo in self.repositories.values() {
666 updates_tx
667 .unbounded_send(DownstreamUpdate::UpdateRepository(
668 repo.read(cx).snapshot.clone(),
669 ))
670 .ok();
671 }
672 *downstream_client = Some(LocalDownstreamState {
673 client: client.clone(),
674 project_id: ProjectId(project_id),
675 updates_tx,
676 _task: cx.spawn(async move |this, cx| {
677 cx.background_spawn(async move {
678 while let Some(update) = updates_rx.next().await {
679 match update {
680 DownstreamUpdate::UpdateRepository(snapshot) => {
681 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
682 {
683 let update =
684 snapshot.build_update(old_snapshot, project_id);
685 *old_snapshot = snapshot;
686 for update in split_repository_update(update) {
687 client.send(update)?;
688 }
689 } else {
690 let update = snapshot.initial_update(project_id);
691 for update in split_repository_update(update) {
692 client.send(update)?;
693 }
694 snapshots.insert(snapshot.id, snapshot);
695 }
696 }
697 DownstreamUpdate::RemoveRepository(id) => {
698 client.send(proto::RemoveRepository {
699 project_id,
700 id: id.to_proto(),
701 })?;
702 }
703 }
704 }
705 anyhow::Ok(())
706 })
707 .await
708 .ok();
709 this.update(cx, |this, _| {
710 if let GitStoreState::Local {
711 downstream: downstream_client,
712 ..
713 } = &mut this.state
714 {
715 downstream_client.take();
716 } else {
717 unreachable!("unshared called on remote store");
718 }
719 })
720 }),
721 });
722 }
723 }
724 }
725
726 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
727 match &mut self.state {
728 GitStoreState::Local {
729 downstream: downstream_client,
730 ..
731 } => {
732 downstream_client.take();
733 }
734 GitStoreState::Remote {
735 downstream: downstream_client,
736 ..
737 } => {
738 downstream_client.take();
739 }
740 }
741 self.shared_diffs.clear();
742 }
743
744 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
745 self.shared_diffs.remove(peer_id);
746 }
747
748 pub fn active_repository(&self) -> Option<Entity<Repository>> {
749 self.active_repo_id
750 .as_ref()
751 .map(|id| self.repositories[id].clone())
752 }
753
754 pub fn open_unstaged_diff(
755 &mut self,
756 buffer: Entity<Buffer>,
757 cx: &mut Context<Self>,
758 ) -> Task<Result<Entity<BufferDiff>>> {
759 let buffer_id = buffer.read(cx).remote_id();
760 if let Some(diff_state) = self.diffs.get(&buffer_id)
761 && let Some(unstaged_diff) = diff_state
762 .read(cx)
763 .unstaged_diff
764 .as_ref()
765 .and_then(|weak| weak.upgrade())
766 {
767 if let Some(task) =
768 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
769 {
770 return cx.background_executor().spawn(async move {
771 task.await;
772 Ok(unstaged_diff)
773 });
774 }
775 return Task::ready(Ok(unstaged_diff));
776 }
777
778 let Some((repo, repo_path)) =
779 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
780 else {
781 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
782 };
783
784 let task = self
785 .loading_diffs
786 .entry((buffer_id, DiffKind::Unstaged))
787 .or_insert_with(|| {
788 let staged_text = repo.update(cx, |repo, cx| {
789 repo.load_staged_text(buffer_id, repo_path, cx)
790 });
791 cx.spawn(async move |this, cx| {
792 Self::open_diff_internal(
793 this,
794 DiffKind::Unstaged,
795 staged_text.await.map(DiffBasesChange::SetIndex),
796 buffer,
797 cx,
798 )
799 .await
800 .map_err(Arc::new)
801 })
802 .shared()
803 })
804 .clone();
805
806 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
807 }
808
809 pub fn open_diff_since(
810 &mut self,
811 oid: Option<git::Oid>,
812 buffer: Entity<Buffer>,
813 repo: Entity<Repository>,
814 cx: &mut Context<Self>,
815 ) -> Task<Result<Entity<BufferDiff>>> {
816 let buffer_id = buffer.read(cx).remote_id();
817
818 if let Some(diff_state) = self.diffs.get(&buffer_id)
819 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
820 {
821 if let Some(task) =
822 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
823 {
824 return cx.background_executor().spawn(async move {
825 task.await;
826 Ok(oid_diff)
827 });
828 }
829 return Task::ready(Ok(oid_diff));
830 }
831
832 let diff_kind = DiffKind::SinceOid(oid);
833 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
834 let task = task.clone();
835 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
836 }
837
838 let task = cx
839 .spawn(async move |this, cx| {
840 let result: Result<Entity<BufferDiff>> = async {
841 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
842 let language_registry =
843 buffer.update(cx, |buffer, _| buffer.language_registry());
844 let content: Option<Arc<str>> = match oid {
845 None => None,
846 Some(oid) => Some(
847 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
848 .await?
849 .into(),
850 ),
851 };
852 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
853
854 buffer_diff
855 .update(cx, |buffer_diff, cx| {
856 buffer_diff.language_changed(
857 buffer_snapshot.language().cloned(),
858 language_registry,
859 cx,
860 );
861 buffer_diff.set_base_text(
862 content.clone(),
863 buffer_snapshot.language().cloned(),
864 buffer_snapshot.text,
865 cx,
866 )
867 })
868 .await?;
869 let unstaged_diff = this
870 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
871 .await?;
872 buffer_diff.update(cx, |buffer_diff, _| {
873 buffer_diff.set_secondary_diff(unstaged_diff);
874 });
875
876 this.update(cx, |this, cx| {
877 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
878 .detach();
879
880 this.loading_diffs.remove(&(buffer_id, diff_kind));
881
882 let git_store = cx.weak_entity();
883 let diff_state = this
884 .diffs
885 .entry(buffer_id)
886 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
887
888 diff_state.update(cx, |state, _| {
889 if let Some(oid) = oid {
890 if let Some(content) = content {
891 state.oid_texts.insert(oid, content);
892 }
893 }
894 state.oid_diffs.insert(oid, buffer_diff.downgrade());
895 });
896 })?;
897
898 Ok(buffer_diff)
899 }
900 .await;
901 result.map_err(Arc::new)
902 })
903 .shared();
904
905 self.loading_diffs
906 .insert((buffer_id, diff_kind), task.clone());
907 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
908 }
909
910 #[ztracing::instrument(skip_all)]
911 pub fn open_uncommitted_diff(
912 &mut self,
913 buffer: Entity<Buffer>,
914 cx: &mut Context<Self>,
915 ) -> Task<Result<Entity<BufferDiff>>> {
916 let buffer_id = buffer.read(cx).remote_id();
917
918 if let Some(diff_state) = self.diffs.get(&buffer_id)
919 && let Some(uncommitted_diff) = diff_state
920 .read(cx)
921 .uncommitted_diff
922 .as_ref()
923 .and_then(|weak| weak.upgrade())
924 {
925 if let Some(task) =
926 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
927 {
928 return cx.background_executor().spawn(async move {
929 task.await;
930 Ok(uncommitted_diff)
931 });
932 }
933 return Task::ready(Ok(uncommitted_diff));
934 }
935
936 let Some((repo, repo_path)) =
937 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
938 else {
939 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
940 };
941
942 let task = self
943 .loading_diffs
944 .entry((buffer_id, DiffKind::Uncommitted))
945 .or_insert_with(|| {
946 let changes = repo.update(cx, |repo, cx| {
947 repo.load_committed_text(buffer_id, repo_path, cx)
948 });
949
950 // todo(lw): hot foreground spawn
951 cx.spawn(async move |this, cx| {
952 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
953 .await
954 .map_err(Arc::new)
955 })
956 .shared()
957 })
958 .clone();
959
960 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
961 }
962
963 #[ztracing::instrument(skip_all)]
964 async fn open_diff_internal(
965 this: WeakEntity<Self>,
966 kind: DiffKind,
967 texts: Result<DiffBasesChange>,
968 buffer_entity: Entity<Buffer>,
969 cx: &mut AsyncApp,
970 ) -> Result<Entity<BufferDiff>> {
971 let diff_bases_change = match texts {
972 Err(e) => {
973 this.update(cx, |this, cx| {
974 let buffer = buffer_entity.read(cx);
975 let buffer_id = buffer.remote_id();
976 this.loading_diffs.remove(&(buffer_id, kind));
977 })?;
978 return Err(e);
979 }
980 Ok(change) => change,
981 };
982
983 this.update(cx, |this, cx| {
984 let buffer = buffer_entity.read(cx);
985 let buffer_id = buffer.remote_id();
986 let language = buffer.language().cloned();
987 let language_registry = buffer.language_registry();
988 let text_snapshot = buffer.text_snapshot();
989 this.loading_diffs.remove(&(buffer_id, kind));
990
991 let git_store = cx.weak_entity();
992 let diff_state = this
993 .diffs
994 .entry(buffer_id)
995 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
996
997 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
998
999 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
1000 diff_state.update(cx, |diff_state, cx| {
1001 diff_state.language_changed = true;
1002 diff_state.language = language;
1003 diff_state.language_registry = language_registry;
1004
1005 match kind {
1006 DiffKind::Unstaged => {
1007 diff_state.unstaged_diff.get_or_insert(diff.downgrade());
1008 }
1009 DiffKind::Uncommitted => {
1010 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1011 diff
1012 } else {
1013 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1014 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1015 unstaged_diff
1016 };
1017
1018 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1019 diff_state.uncommitted_diff = Some(diff.downgrade())
1020 }
1021 DiffKind::SinceOid(_) => {
1022 unreachable!("open_diff_internal is not used for OID diffs")
1023 }
1024 }
1025
1026 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
1027 let rx = diff_state.wait_for_recalculation();
1028
1029 anyhow::Ok(async move {
1030 if let Some(rx) = rx {
1031 rx.await;
1032 }
1033 Ok(diff)
1034 })
1035 })
1036 })??
1037 .await
1038 }
1039
1040 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1041 let diff_state = self.diffs.get(&buffer_id)?;
1042 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1043 }
1044
1045 pub fn get_uncommitted_diff(
1046 &self,
1047 buffer_id: BufferId,
1048 cx: &App,
1049 ) -> Option<Entity<BufferDiff>> {
1050 let diff_state = self.diffs.get(&buffer_id)?;
1051 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1052 }
1053
1054 pub fn get_diff_since_oid(
1055 &self,
1056 buffer_id: BufferId,
1057 oid: Option<git::Oid>,
1058 cx: &App,
1059 ) -> Option<Entity<BufferDiff>> {
1060 let diff_state = self.diffs.get(&buffer_id)?;
1061 diff_state.read(cx).oid_diff(oid)
1062 }
1063
1064 pub fn open_conflict_set(
1065 &mut self,
1066 buffer: Entity<Buffer>,
1067 cx: &mut Context<Self>,
1068 ) -> Entity<ConflictSet> {
1069 log::debug!("open conflict set");
1070 let buffer_id = buffer.read(cx).remote_id();
1071
1072 if let Some(git_state) = self.diffs.get(&buffer_id)
1073 && let Some(conflict_set) = git_state
1074 .read(cx)
1075 .conflict_set
1076 .as_ref()
1077 .and_then(|weak| weak.upgrade())
1078 {
1079 let conflict_set = conflict_set;
1080 let buffer_snapshot = buffer.read(cx).text_snapshot();
1081
1082 git_state.update(cx, |state, cx| {
1083 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1084 });
1085
1086 return conflict_set;
1087 }
1088
1089 let is_unmerged = self
1090 .repository_and_path_for_buffer_id(buffer_id, cx)
1091 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1092 let git_store = cx.weak_entity();
1093 let buffer_git_state = self
1094 .diffs
1095 .entry(buffer_id)
1096 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1097 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1098
1099 self._subscriptions
1100 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1101 cx.emit(GitStoreEvent::ConflictsUpdated);
1102 }));
1103
1104 buffer_git_state.update(cx, |state, cx| {
1105 state.conflict_set = Some(conflict_set.downgrade());
1106 let buffer_snapshot = buffer.read(cx).text_snapshot();
1107 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1108 });
1109
1110 conflict_set
1111 }
1112
1113 pub fn project_path_git_status(
1114 &self,
1115 project_path: &ProjectPath,
1116 cx: &App,
1117 ) -> Option<FileStatus> {
1118 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1119 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1120 }
1121
1122 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1123 let mut work_directory_abs_paths = Vec::new();
1124 let mut checkpoints = Vec::new();
1125 for repository in self.repositories.values() {
1126 repository.update(cx, |repository, _| {
1127 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1128 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1129 });
1130 }
1131
1132 cx.background_executor().spawn(async move {
1133 let checkpoints = future::try_join_all(checkpoints).await?;
1134 Ok(GitStoreCheckpoint {
1135 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1136 .into_iter()
1137 .zip(checkpoints)
1138 .collect(),
1139 })
1140 })
1141 }
1142
1143 pub fn restore_checkpoint(
1144 &self,
1145 checkpoint: GitStoreCheckpoint,
1146 cx: &mut App,
1147 ) -> Task<Result<()>> {
1148 let repositories_by_work_dir_abs_path = self
1149 .repositories
1150 .values()
1151 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1152 .collect::<HashMap<_, _>>();
1153
1154 let mut tasks = Vec::new();
1155 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1156 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1157 let restore = repository.update(cx, |repository, _| {
1158 repository.restore_checkpoint(checkpoint)
1159 });
1160 tasks.push(async move { restore.await? });
1161 }
1162 }
1163 cx.background_spawn(async move {
1164 future::try_join_all(tasks).await?;
1165 Ok(())
1166 })
1167 }
1168
1169 /// Compares two checkpoints, returning true if they are equal.
1170 pub fn compare_checkpoints(
1171 &self,
1172 left: GitStoreCheckpoint,
1173 mut right: GitStoreCheckpoint,
1174 cx: &mut App,
1175 ) -> Task<Result<bool>> {
1176 let repositories_by_work_dir_abs_path = self
1177 .repositories
1178 .values()
1179 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1180 .collect::<HashMap<_, _>>();
1181
1182 let mut tasks = Vec::new();
1183 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1184 if let Some(right_checkpoint) = right
1185 .checkpoints_by_work_dir_abs_path
1186 .remove(&work_dir_abs_path)
1187 {
1188 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1189 {
1190 let compare = repository.update(cx, |repository, _| {
1191 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1192 });
1193
1194 tasks.push(async move { compare.await? });
1195 }
1196 } else {
1197 return Task::ready(Ok(false));
1198 }
1199 }
1200 cx.background_spawn(async move {
1201 Ok(future::try_join_all(tasks)
1202 .await?
1203 .into_iter()
1204 .all(|result| result))
1205 })
1206 }
1207
1208 /// Blames a buffer.
1209 pub fn blame_buffer(
1210 &self,
1211 buffer: &Entity<Buffer>,
1212 version: Option<clock::Global>,
1213 cx: &mut Context<Self>,
1214 ) -> Task<Result<Option<Blame>>> {
1215 let buffer = buffer.read(cx);
1216 let Some((repo, repo_path)) =
1217 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1218 else {
1219 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1220 };
1221 let content = match &version {
1222 Some(version) => buffer.rope_for_version(version),
1223 None => buffer.as_rope().clone(),
1224 };
1225 let line_ending = buffer.line_ending();
1226 let version = version.unwrap_or(buffer.version());
1227 let buffer_id = buffer.remote_id();
1228
1229 let repo = repo.downgrade();
1230 cx.spawn(async move |_, cx| {
1231 let repository_state = repo
1232 .update(cx, |repo, _| repo.repository_state.clone())?
1233 .await
1234 .map_err(|err| anyhow::anyhow!(err))?;
1235 match repository_state {
1236 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1237 .blame(repo_path.clone(), content, line_ending)
1238 .await
1239 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1240 .map(Some),
1241 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1242 let response = client
1243 .request(proto::BlameBuffer {
1244 project_id: project_id.to_proto(),
1245 buffer_id: buffer_id.into(),
1246 version: serialize_version(&version),
1247 })
1248 .await?;
1249 Ok(deserialize_blame_buffer_response(response))
1250 }
1251 }
1252 })
1253 }
1254
1255 pub fn file_history(
1256 &self,
1257 repo: &Entity<Repository>,
1258 path: RepoPath,
1259 cx: &mut App,
1260 ) -> Task<Result<git::repository::FileHistory>> {
1261 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1262
1263 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1264 }
1265
1266 pub fn file_history_paginated(
1267 &self,
1268 repo: &Entity<Repository>,
1269 path: RepoPath,
1270 skip: usize,
1271 limit: Option<usize>,
1272 cx: &mut App,
1273 ) -> Task<Result<git::repository::FileHistory>> {
1274 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1275
1276 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1277 }
1278
1279 pub fn get_permalink_to_line(
1280 &self,
1281 buffer: &Entity<Buffer>,
1282 selection: Range<u32>,
1283 cx: &mut App,
1284 ) -> Task<Result<url::Url>> {
1285 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1286 return Task::ready(Err(anyhow!("buffer has no file")));
1287 };
1288
1289 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1290 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1291 cx,
1292 ) else {
1293 // If we're not in a Git repo, check whether this is a Rust source
1294 // file in the Cargo registry (presumably opened with go-to-definition
1295 // from a normal Rust file). If so, we can put together a permalink
1296 // using crate metadata.
1297 if buffer
1298 .read(cx)
1299 .language()
1300 .is_none_or(|lang| lang.name() != "Rust")
1301 {
1302 return Task::ready(Err(anyhow!("no permalink available")));
1303 }
1304 let file_path = file.worktree.read(cx).absolutize(&file.path);
1305 return cx.spawn(async move |cx| {
1306 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1307 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1308 .context("no permalink available")
1309 });
1310 };
1311
1312 let buffer_id = buffer.read(cx).remote_id();
1313 let branch = repo.read(cx).branch.clone();
1314 let remote = branch
1315 .as_ref()
1316 .and_then(|b| b.upstream.as_ref())
1317 .and_then(|b| b.remote_name())
1318 .unwrap_or("origin")
1319 .to_string();
1320
1321 let rx = repo.update(cx, |repo, _| {
1322 repo.send_job(None, move |state, cx| async move {
1323 match state {
1324 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1325 let origin_url = backend
1326 .remote_url(&remote)
1327 .await
1328 .with_context(|| format!("remote \"{remote}\" not found"))?;
1329
1330 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1331
1332 let provider_registry =
1333 cx.update(GitHostingProviderRegistry::default_global);
1334
1335 let (provider, remote) =
1336 parse_git_remote_url(provider_registry, &origin_url)
1337 .context("parsing Git remote URL")?;
1338
1339 Ok(provider.build_permalink(
1340 remote,
1341 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1342 ))
1343 }
1344 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1345 let response = client
1346 .request(proto::GetPermalinkToLine {
1347 project_id: project_id.to_proto(),
1348 buffer_id: buffer_id.into(),
1349 selection: Some(proto::Range {
1350 start: selection.start as u64,
1351 end: selection.end as u64,
1352 }),
1353 })
1354 .await?;
1355
1356 url::Url::parse(&response.permalink).context("failed to parse permalink")
1357 }
1358 }
1359 })
1360 });
1361 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1362 }
1363
1364 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1365 match &self.state {
1366 GitStoreState::Local {
1367 downstream: downstream_client,
1368 ..
1369 } => downstream_client
1370 .as_ref()
1371 .map(|state| (state.client.clone(), state.project_id)),
1372 GitStoreState::Remote {
1373 downstream: downstream_client,
1374 ..
1375 } => downstream_client.clone(),
1376 }
1377 }
1378
1379 fn upstream_client(&self) -> Option<AnyProtoClient> {
1380 match &self.state {
1381 GitStoreState::Local { .. } => None,
1382 GitStoreState::Remote {
1383 upstream_client, ..
1384 } => Some(upstream_client.clone()),
1385 }
1386 }
1387
1388 fn on_worktree_store_event(
1389 &mut self,
1390 worktree_store: Entity<WorktreeStore>,
1391 event: &WorktreeStoreEvent,
1392 cx: &mut Context<Self>,
1393 ) {
1394 let GitStoreState::Local {
1395 project_environment,
1396 downstream,
1397 next_repository_id,
1398 fs,
1399 } = &self.state
1400 else {
1401 return;
1402 };
1403
1404 match event {
1405 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1406 if let Some(worktree) = self
1407 .worktree_store
1408 .read(cx)
1409 .worktree_for_id(*worktree_id, cx)
1410 {
1411 let paths_by_git_repo =
1412 self.process_updated_entries(&worktree, updated_entries, cx);
1413 let downstream = downstream
1414 .as_ref()
1415 .map(|downstream| downstream.updates_tx.clone());
1416 cx.spawn(async move |_, cx| {
1417 let paths_by_git_repo = paths_by_git_repo.await;
1418 for (repo, paths) in paths_by_git_repo {
1419 repo.update(cx, |repo, cx| {
1420 repo.paths_changed(paths, downstream.clone(), cx);
1421 });
1422 }
1423 })
1424 .detach();
1425 }
1426 }
1427 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1428 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1429 else {
1430 return;
1431 };
1432 if !worktree.read(cx).is_visible() {
1433 log::debug!(
1434 "not adding repositories for local worktree {:?} because it's not visible",
1435 worktree.read(cx).abs_path()
1436 );
1437 return;
1438 }
1439 self.update_repositories_from_worktree(
1440 *worktree_id,
1441 project_environment.clone(),
1442 next_repository_id.clone(),
1443 downstream
1444 .as_ref()
1445 .map(|downstream| downstream.updates_tx.clone()),
1446 changed_repos.clone(),
1447 fs.clone(),
1448 cx,
1449 );
1450 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1451 }
1452 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1453 let repos_without_worktree: Vec<RepositoryId> = self
1454 .worktree_ids
1455 .iter_mut()
1456 .filter_map(|(repo_id, worktree_ids)| {
1457 worktree_ids.remove(worktree_id);
1458 if worktree_ids.is_empty() {
1459 Some(*repo_id)
1460 } else {
1461 None
1462 }
1463 })
1464 .collect();
1465 let is_active_repo_removed = repos_without_worktree
1466 .iter()
1467 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1468
1469 for repo_id in repos_without_worktree {
1470 self.repositories.remove(&repo_id);
1471 self.worktree_ids.remove(&repo_id);
1472 if let Some(updates_tx) =
1473 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1474 {
1475 updates_tx
1476 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1477 .ok();
1478 }
1479 }
1480
1481 if is_active_repo_removed {
1482 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1483 self.active_repo_id = Some(repo_id);
1484 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1485 } else {
1486 self.active_repo_id = None;
1487 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1488 }
1489 }
1490 }
1491 _ => {}
1492 }
1493 }
1494 fn on_repository_event(
1495 &mut self,
1496 repo: Entity<Repository>,
1497 event: &RepositoryEvent,
1498 cx: &mut Context<Self>,
1499 ) {
1500 let id = repo.read(cx).id;
1501 let repo_snapshot = repo.read(cx).snapshot.clone();
1502 for (buffer_id, diff) in self.diffs.iter() {
1503 if let Some((buffer_repo, repo_path)) =
1504 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1505 && buffer_repo == repo
1506 {
1507 diff.update(cx, |diff, cx| {
1508 if let Some(conflict_set) = &diff.conflict_set {
1509 let conflict_status_changed =
1510 conflict_set.update(cx, |conflict_set, cx| {
1511 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1512 conflict_set.set_has_conflict(has_conflict, cx)
1513 })?;
1514 if conflict_status_changed {
1515 let buffer_store = self.buffer_store.read(cx);
1516 if let Some(buffer) = buffer_store.get(*buffer_id) {
1517 let _ = diff
1518 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1519 }
1520 }
1521 }
1522 anyhow::Ok(())
1523 })
1524 .ok();
1525 }
1526 }
1527 cx.emit(GitStoreEvent::RepositoryUpdated(
1528 id,
1529 event.clone(),
1530 self.active_repo_id == Some(id),
1531 ))
1532 }
1533
1534 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1535 cx.emit(GitStoreEvent::JobsUpdated)
1536 }
1537
1538 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1539 fn update_repositories_from_worktree(
1540 &mut self,
1541 worktree_id: WorktreeId,
1542 project_environment: Entity<ProjectEnvironment>,
1543 next_repository_id: Arc<AtomicU64>,
1544 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1545 updated_git_repositories: UpdatedGitRepositoriesSet,
1546 fs: Arc<dyn Fs>,
1547 cx: &mut Context<Self>,
1548 ) {
1549 let mut removed_ids = Vec::new();
1550 for update in updated_git_repositories.iter() {
1551 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1552 let existing_work_directory_abs_path =
1553 repo.read(cx).work_directory_abs_path.clone();
1554 Some(&existing_work_directory_abs_path)
1555 == update.old_work_directory_abs_path.as_ref()
1556 || Some(&existing_work_directory_abs_path)
1557 == update.new_work_directory_abs_path.as_ref()
1558 }) {
1559 let repo_id = *id;
1560 if let Some(new_work_directory_abs_path) =
1561 update.new_work_directory_abs_path.clone()
1562 {
1563 self.worktree_ids
1564 .entry(repo_id)
1565 .or_insert_with(HashSet::new)
1566 .insert(worktree_id);
1567 existing.update(cx, |existing, cx| {
1568 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1569 existing.schedule_scan(updates_tx.clone(), cx);
1570 });
1571 } else {
1572 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1573 worktree_ids.remove(&worktree_id);
1574 if worktree_ids.is_empty() {
1575 removed_ids.push(repo_id);
1576 }
1577 }
1578 }
1579 } else if let UpdatedGitRepository {
1580 new_work_directory_abs_path: Some(work_directory_abs_path),
1581 dot_git_abs_path: Some(dot_git_abs_path),
1582 repository_dir_abs_path: Some(repository_dir_abs_path),
1583 common_dir_abs_path: Some(common_dir_abs_path),
1584 ..
1585 } = update
1586 {
1587 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1588 work_directory_abs_path,
1589 common_dir_abs_path,
1590 repository_dir_abs_path,
1591 )
1592 .into();
1593 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1594 let is_trusted = TrustedWorktrees::try_get_global(cx)
1595 .map(|trusted_worktrees| {
1596 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1597 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1598 })
1599 })
1600 .unwrap_or(false);
1601 let git_store = cx.weak_entity();
1602 let repo = cx.new(|cx| {
1603 let mut repo = Repository::local(
1604 id,
1605 work_directory_abs_path.clone(),
1606 original_repo_abs_path.clone(),
1607 dot_git_abs_path.clone(),
1608 project_environment.downgrade(),
1609 fs.clone(),
1610 is_trusted,
1611 git_store,
1612 cx,
1613 );
1614 if let Some(updates_tx) = updates_tx.as_ref() {
1615 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1616 updates_tx
1617 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1618 .ok();
1619 }
1620 repo.schedule_scan(updates_tx.clone(), cx);
1621 repo
1622 });
1623 self._subscriptions
1624 .push(cx.subscribe(&repo, Self::on_repository_event));
1625 self._subscriptions
1626 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1627 self.repositories.insert(id, repo);
1628 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1629 cx.emit(GitStoreEvent::RepositoryAdded);
1630 self.active_repo_id.get_or_insert_with(|| {
1631 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1632 id
1633 });
1634 }
1635 }
1636
1637 for id in removed_ids {
1638 if self.active_repo_id == Some(id) {
1639 self.active_repo_id = None;
1640 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1641 }
1642 self.repositories.remove(&id);
1643 if let Some(updates_tx) = updates_tx.as_ref() {
1644 updates_tx
1645 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1646 .ok();
1647 }
1648 }
1649 }
1650
1651 fn on_trusted_worktrees_event(
1652 &mut self,
1653 _: Entity<TrustedWorktreesStore>,
1654 event: &TrustedWorktreesEvent,
1655 cx: &mut Context<Self>,
1656 ) {
1657 if !matches!(self.state, GitStoreState::Local { .. }) {
1658 return;
1659 }
1660
1661 let (is_trusted, event_paths) = match event {
1662 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1663 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1664 };
1665
1666 for (repo_id, worktree_ids) in &self.worktree_ids {
1667 if worktree_ids
1668 .iter()
1669 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1670 {
1671 if let Some(repo) = self.repositories.get(repo_id) {
1672 let repository_state = repo.read(cx).repository_state.clone();
1673 cx.background_spawn(async move {
1674 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1675 state.backend.set_trusted(is_trusted);
1676 }
1677 })
1678 .detach();
1679 }
1680 }
1681 }
1682 }
1683
1684 fn on_buffer_store_event(
1685 &mut self,
1686 _: Entity<BufferStore>,
1687 event: &BufferStoreEvent,
1688 cx: &mut Context<Self>,
1689 ) {
1690 match event {
1691 BufferStoreEvent::BufferAdded(buffer) => {
1692 cx.subscribe(buffer, |this, buffer, event, cx| {
1693 if let BufferEvent::LanguageChanged(_) = event {
1694 let buffer_id = buffer.read(cx).remote_id();
1695 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1696 diff_state.update(cx, |diff_state, cx| {
1697 diff_state.buffer_language_changed(buffer, cx);
1698 });
1699 }
1700 }
1701 })
1702 .detach();
1703 }
1704 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1705 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1706 diffs.remove(buffer_id);
1707 }
1708 }
1709 BufferStoreEvent::BufferDropped(buffer_id) => {
1710 self.diffs.remove(buffer_id);
1711 for diffs in self.shared_diffs.values_mut() {
1712 diffs.remove(buffer_id);
1713 }
1714 }
1715 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1716 // Whenever a buffer's file path changes, it's possible that the
1717 // new path is actually a path that is being tracked by a git
1718 // repository. In that case, we'll want to update the buffer's
1719 // `BufferDiffState`, in case it already has one.
1720 let buffer_id = buffer.read(cx).remote_id();
1721 let diff_state = self.diffs.get(&buffer_id);
1722 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1723
1724 if let Some(diff_state) = diff_state
1725 && let Some((repo, repo_path)) = repo
1726 {
1727 let buffer = buffer.clone();
1728 let diff_state = diff_state.clone();
1729
1730 cx.spawn(async move |_git_store, cx| {
1731 async {
1732 let diff_bases_change = repo
1733 .update(cx, |repo, cx| {
1734 repo.load_committed_text(buffer_id, repo_path, cx)
1735 })
1736 .await?;
1737
1738 diff_state.update(cx, |diff_state, cx| {
1739 let buffer_snapshot = buffer.read(cx).text_snapshot();
1740 diff_state.diff_bases_changed(
1741 buffer_snapshot,
1742 Some(diff_bases_change),
1743 cx,
1744 );
1745 });
1746 anyhow::Ok(())
1747 }
1748 .await
1749 .log_err();
1750 })
1751 .detach();
1752 }
1753 }
1754 }
1755 }
1756
1757 pub fn recalculate_buffer_diffs(
1758 &mut self,
1759 buffers: Vec<Entity<Buffer>>,
1760 cx: &mut Context<Self>,
1761 ) -> impl Future<Output = ()> + use<> {
1762 let mut futures = Vec::new();
1763 for buffer in buffers {
1764 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1765 let buffer = buffer.read(cx).text_snapshot();
1766 diff_state.update(cx, |diff_state, cx| {
1767 diff_state.recalculate_diffs(buffer.clone(), cx);
1768 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1769 });
1770 futures.push(diff_state.update(cx, |diff_state, cx| {
1771 diff_state
1772 .reparse_conflict_markers(buffer, cx)
1773 .map(|_| {})
1774 .boxed()
1775 }));
1776 }
1777 }
1778 async move {
1779 futures::future::join_all(futures).await;
1780 }
1781 }
1782
1783 fn on_buffer_diff_event(
1784 &mut self,
1785 diff: Entity<buffer_diff::BufferDiff>,
1786 event: &BufferDiffEvent,
1787 cx: &mut Context<Self>,
1788 ) {
1789 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1790 let buffer_id = diff.read(cx).buffer_id;
1791 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1792 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1793 diff_state.hunk_staging_operation_count += 1;
1794 diff_state.hunk_staging_operation_count
1795 });
1796 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1797 let recv = repo.update(cx, |repo, cx| {
1798 log::debug!("hunks changed for {}", path.as_unix_str());
1799 repo.spawn_set_index_text_job(
1800 path,
1801 new_index_text.as_ref().map(|rope| rope.to_string()),
1802 Some(hunk_staging_operation_count),
1803 cx,
1804 )
1805 });
1806 let diff = diff.downgrade();
1807 cx.spawn(async move |this, cx| {
1808 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1809 diff.update(cx, |diff, cx| {
1810 diff.clear_pending_hunks(cx);
1811 })
1812 .ok();
1813 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1814 .ok();
1815 }
1816 })
1817 .detach();
1818 }
1819 }
1820 }
1821 }
1822
1823 fn local_worktree_git_repos_changed(
1824 &mut self,
1825 worktree: Entity<Worktree>,
1826 changed_repos: &UpdatedGitRepositoriesSet,
1827 cx: &mut Context<Self>,
1828 ) {
1829 log::debug!("local worktree repos changed");
1830 debug_assert!(worktree.read(cx).is_local());
1831
1832 for repository in self.repositories.values() {
1833 repository.update(cx, |repository, cx| {
1834 let repo_abs_path = &repository.work_directory_abs_path;
1835 if changed_repos.iter().any(|update| {
1836 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1837 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1838 }) {
1839 repository.reload_buffer_diff_bases(cx);
1840 }
1841 });
1842 }
1843 }
1844
1845 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1846 &self.repositories
1847 }
1848
1849 /// Returns the original (main) repository working directory for the given worktree.
1850 /// For normal checkouts this equals the worktree's own path; for linked
1851 /// worktrees it points back to the original repo.
1852 pub fn original_repo_path_for_worktree(
1853 &self,
1854 worktree_id: WorktreeId,
1855 cx: &App,
1856 ) -> Option<Arc<Path>> {
1857 self.active_repo_id
1858 .iter()
1859 .chain(self.worktree_ids.keys())
1860 .find(|repo_id| {
1861 self.worktree_ids
1862 .get(repo_id)
1863 .is_some_and(|ids| ids.contains(&worktree_id))
1864 })
1865 .and_then(|repo_id| self.repositories.get(repo_id))
1866 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1867 }
1868
1869 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1870 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1871 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1872 Some(status.status)
1873 }
1874
1875 pub fn repository_and_path_for_buffer_id(
1876 &self,
1877 buffer_id: BufferId,
1878 cx: &App,
1879 ) -> Option<(Entity<Repository>, RepoPath)> {
1880 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1881 let project_path = buffer.read(cx).project_path(cx)?;
1882 self.repository_and_path_for_project_path(&project_path, cx)
1883 }
1884
1885 pub fn repository_and_path_for_project_path(
1886 &self,
1887 path: &ProjectPath,
1888 cx: &App,
1889 ) -> Option<(Entity<Repository>, RepoPath)> {
1890 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1891 self.repositories
1892 .values()
1893 .filter_map(|repo| {
1894 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1895 Some((repo.clone(), repo_path))
1896 })
1897 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1898 }
1899
1900 pub fn git_init(
1901 &self,
1902 path: Arc<Path>,
1903 fallback_branch_name: String,
1904 cx: &App,
1905 ) -> Task<Result<()>> {
1906 match &self.state {
1907 GitStoreState::Local { fs, .. } => {
1908 let fs = fs.clone();
1909 cx.background_executor()
1910 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1911 }
1912 GitStoreState::Remote {
1913 upstream_client,
1914 upstream_project_id: project_id,
1915 ..
1916 } => {
1917 let client = upstream_client.clone();
1918 let project_id = *project_id;
1919 cx.background_executor().spawn(async move {
1920 client
1921 .request(proto::GitInit {
1922 project_id: project_id,
1923 abs_path: path.to_string_lossy().into_owned(),
1924 fallback_branch_name,
1925 })
1926 .await?;
1927 Ok(())
1928 })
1929 }
1930 }
1931 }
1932
1933 pub fn git_clone(
1934 &self,
1935 repo: String,
1936 path: impl Into<Arc<std::path::Path>>,
1937 cx: &App,
1938 ) -> Task<Result<()>> {
1939 let path = path.into();
1940 match &self.state {
1941 GitStoreState::Local { fs, .. } => {
1942 let fs = fs.clone();
1943 cx.background_executor()
1944 .spawn(async move { fs.git_clone(&repo, &path).await })
1945 }
1946 GitStoreState::Remote {
1947 upstream_client,
1948 upstream_project_id,
1949 ..
1950 } => {
1951 if upstream_client.is_via_collab() {
1952 return Task::ready(Err(anyhow!(
1953 "Git Clone isn't supported for project guests"
1954 )));
1955 }
1956 let request = upstream_client.request(proto::GitClone {
1957 project_id: *upstream_project_id,
1958 abs_path: path.to_string_lossy().into_owned(),
1959 remote_repo: repo,
1960 });
1961
1962 cx.background_spawn(async move {
1963 let result = request.await?;
1964
1965 match result.success {
1966 true => Ok(()),
1967 false => Err(anyhow!("Git Clone failed")),
1968 }
1969 })
1970 }
1971 }
1972 }
1973
1974 async fn handle_update_repository(
1975 this: Entity<Self>,
1976 envelope: TypedEnvelope<proto::UpdateRepository>,
1977 mut cx: AsyncApp,
1978 ) -> Result<()> {
1979 this.update(&mut cx, |this, cx| {
1980 let path_style = this.worktree_store.read(cx).path_style();
1981 let mut update = envelope.payload;
1982
1983 let id = RepositoryId::from_proto(update.id);
1984 let client = this.upstream_client().context("no upstream client")?;
1985
1986 let original_repo_abs_path: Option<Arc<Path>> = update
1987 .original_repo_abs_path
1988 .as_deref()
1989 .map(|p| Path::new(p).into());
1990
1991 let mut repo_subscription = None;
1992 let repo = this.repositories.entry(id).or_insert_with(|| {
1993 let git_store = cx.weak_entity();
1994 let repo = cx.new(|cx| {
1995 Repository::remote(
1996 id,
1997 Path::new(&update.abs_path).into(),
1998 original_repo_abs_path.clone(),
1999 path_style,
2000 ProjectId(update.project_id),
2001 client,
2002 git_store,
2003 cx,
2004 )
2005 });
2006 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
2007 cx.emit(GitStoreEvent::RepositoryAdded);
2008 repo
2009 });
2010 this._subscriptions.extend(repo_subscription);
2011
2012 repo.update(cx, {
2013 let update = update.clone();
2014 |repo, cx| repo.apply_remote_update(update, cx)
2015 })?;
2016
2017 this.active_repo_id.get_or_insert_with(|| {
2018 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2019 id
2020 });
2021
2022 if let Some((client, project_id)) = this.downstream_client() {
2023 update.project_id = project_id.to_proto();
2024 client.send(update).log_err();
2025 }
2026 Ok(())
2027 })
2028 }
2029
2030 async fn handle_remove_repository(
2031 this: Entity<Self>,
2032 envelope: TypedEnvelope<proto::RemoveRepository>,
2033 mut cx: AsyncApp,
2034 ) -> Result<()> {
2035 this.update(&mut cx, |this, cx| {
2036 let mut update = envelope.payload;
2037 let id = RepositoryId::from_proto(update.id);
2038 this.repositories.remove(&id);
2039 if let Some((client, project_id)) = this.downstream_client() {
2040 update.project_id = project_id.to_proto();
2041 client.send(update).log_err();
2042 }
2043 if this.active_repo_id == Some(id) {
2044 this.active_repo_id = None;
2045 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2046 }
2047 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2048 });
2049 Ok(())
2050 }
2051
2052 async fn handle_git_init(
2053 this: Entity<Self>,
2054 envelope: TypedEnvelope<proto::GitInit>,
2055 cx: AsyncApp,
2056 ) -> Result<proto::Ack> {
2057 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2058 let name = envelope.payload.fallback_branch_name;
2059 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2060 .await?;
2061
2062 Ok(proto::Ack {})
2063 }
2064
2065 async fn handle_git_clone(
2066 this: Entity<Self>,
2067 envelope: TypedEnvelope<proto::GitClone>,
2068 cx: AsyncApp,
2069 ) -> Result<proto::GitCloneResponse> {
2070 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2071 let repo_name = envelope.payload.remote_repo;
2072 let result = cx
2073 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2074 .await;
2075
2076 Ok(proto::GitCloneResponse {
2077 success: result.is_ok(),
2078 })
2079 }
2080
2081 async fn handle_fetch(
2082 this: Entity<Self>,
2083 envelope: TypedEnvelope<proto::Fetch>,
2084 mut cx: AsyncApp,
2085 ) -> Result<proto::RemoteMessageResponse> {
2086 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2087 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2088 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2089 let askpass_id = envelope.payload.askpass_id;
2090
2091 let askpass = make_remote_delegate(
2092 this,
2093 envelope.payload.project_id,
2094 repository_id,
2095 askpass_id,
2096 &mut cx,
2097 );
2098
2099 let remote_output = repository_handle
2100 .update(&mut cx, |repository_handle, cx| {
2101 repository_handle.fetch(fetch_options, askpass, cx)
2102 })
2103 .await??;
2104
2105 Ok(proto::RemoteMessageResponse {
2106 stdout: remote_output.stdout,
2107 stderr: remote_output.stderr,
2108 })
2109 }
2110
2111 async fn handle_push(
2112 this: Entity<Self>,
2113 envelope: TypedEnvelope<proto::Push>,
2114 mut cx: AsyncApp,
2115 ) -> Result<proto::RemoteMessageResponse> {
2116 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2117 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2118
2119 let askpass_id = envelope.payload.askpass_id;
2120 let askpass = make_remote_delegate(
2121 this,
2122 envelope.payload.project_id,
2123 repository_id,
2124 askpass_id,
2125 &mut cx,
2126 );
2127
2128 let options = envelope
2129 .payload
2130 .options
2131 .as_ref()
2132 .map(|_| match envelope.payload.options() {
2133 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2134 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2135 });
2136
2137 let branch_name = envelope.payload.branch_name.into();
2138 let remote_branch_name = envelope.payload.remote_branch_name.into();
2139 let remote_name = envelope.payload.remote_name.into();
2140
2141 let remote_output = repository_handle
2142 .update(&mut cx, |repository_handle, cx| {
2143 repository_handle.push(
2144 branch_name,
2145 remote_branch_name,
2146 remote_name,
2147 options,
2148 askpass,
2149 cx,
2150 )
2151 })
2152 .await??;
2153 Ok(proto::RemoteMessageResponse {
2154 stdout: remote_output.stdout,
2155 stderr: remote_output.stderr,
2156 })
2157 }
2158
2159 async fn handle_pull(
2160 this: Entity<Self>,
2161 envelope: TypedEnvelope<proto::Pull>,
2162 mut cx: AsyncApp,
2163 ) -> Result<proto::RemoteMessageResponse> {
2164 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2165 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2166 let askpass_id = envelope.payload.askpass_id;
2167 let askpass = make_remote_delegate(
2168 this,
2169 envelope.payload.project_id,
2170 repository_id,
2171 askpass_id,
2172 &mut cx,
2173 );
2174
2175 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2176 let remote_name = envelope.payload.remote_name.into();
2177 let rebase = envelope.payload.rebase;
2178
2179 let remote_message = repository_handle
2180 .update(&mut cx, |repository_handle, cx| {
2181 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2182 })
2183 .await??;
2184
2185 Ok(proto::RemoteMessageResponse {
2186 stdout: remote_message.stdout,
2187 stderr: remote_message.stderr,
2188 })
2189 }
2190
2191 async fn handle_stage(
2192 this: Entity<Self>,
2193 envelope: TypedEnvelope<proto::Stage>,
2194 mut cx: AsyncApp,
2195 ) -> Result<proto::Ack> {
2196 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2197 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2198
2199 let entries = envelope
2200 .payload
2201 .paths
2202 .into_iter()
2203 .map(|path| RepoPath::new(&path))
2204 .collect::<Result<Vec<_>>>()?;
2205
2206 repository_handle
2207 .update(&mut cx, |repository_handle, cx| {
2208 repository_handle.stage_entries(entries, cx)
2209 })
2210 .await?;
2211 Ok(proto::Ack {})
2212 }
2213
2214 async fn handle_unstage(
2215 this: Entity<Self>,
2216 envelope: TypedEnvelope<proto::Unstage>,
2217 mut cx: AsyncApp,
2218 ) -> Result<proto::Ack> {
2219 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2220 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2221
2222 let entries = envelope
2223 .payload
2224 .paths
2225 .into_iter()
2226 .map(|path| RepoPath::new(&path))
2227 .collect::<Result<Vec<_>>>()?;
2228
2229 repository_handle
2230 .update(&mut cx, |repository_handle, cx| {
2231 repository_handle.unstage_entries(entries, cx)
2232 })
2233 .await?;
2234
2235 Ok(proto::Ack {})
2236 }
2237
2238 async fn handle_stash(
2239 this: Entity<Self>,
2240 envelope: TypedEnvelope<proto::Stash>,
2241 mut cx: AsyncApp,
2242 ) -> Result<proto::Ack> {
2243 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2244 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2245
2246 let entries = envelope
2247 .payload
2248 .paths
2249 .into_iter()
2250 .map(|path| RepoPath::new(&path))
2251 .collect::<Result<Vec<_>>>()?;
2252
2253 repository_handle
2254 .update(&mut cx, |repository_handle, cx| {
2255 repository_handle.stash_entries(entries, cx)
2256 })
2257 .await?;
2258
2259 Ok(proto::Ack {})
2260 }
2261
2262 async fn handle_stash_pop(
2263 this: Entity<Self>,
2264 envelope: TypedEnvelope<proto::StashPop>,
2265 mut cx: AsyncApp,
2266 ) -> Result<proto::Ack> {
2267 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2268 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2269 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2270
2271 repository_handle
2272 .update(&mut cx, |repository_handle, cx| {
2273 repository_handle.stash_pop(stash_index, cx)
2274 })
2275 .await?;
2276
2277 Ok(proto::Ack {})
2278 }
2279
2280 async fn handle_stash_apply(
2281 this: Entity<Self>,
2282 envelope: TypedEnvelope<proto::StashApply>,
2283 mut cx: AsyncApp,
2284 ) -> Result<proto::Ack> {
2285 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2286 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2287 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2288
2289 repository_handle
2290 .update(&mut cx, |repository_handle, cx| {
2291 repository_handle.stash_apply(stash_index, cx)
2292 })
2293 .await?;
2294
2295 Ok(proto::Ack {})
2296 }
2297
2298 async fn handle_stash_drop(
2299 this: Entity<Self>,
2300 envelope: TypedEnvelope<proto::StashDrop>,
2301 mut cx: AsyncApp,
2302 ) -> Result<proto::Ack> {
2303 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2304 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2305 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2306
2307 repository_handle
2308 .update(&mut cx, |repository_handle, cx| {
2309 repository_handle.stash_drop(stash_index, cx)
2310 })
2311 .await??;
2312
2313 Ok(proto::Ack {})
2314 }
2315
2316 async fn handle_set_index_text(
2317 this: Entity<Self>,
2318 envelope: TypedEnvelope<proto::SetIndexText>,
2319 mut cx: AsyncApp,
2320 ) -> Result<proto::Ack> {
2321 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2322 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2323 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2324
2325 repository_handle
2326 .update(&mut cx, |repository_handle, cx| {
2327 repository_handle.spawn_set_index_text_job(
2328 repo_path,
2329 envelope.payload.text,
2330 None,
2331 cx,
2332 )
2333 })
2334 .await??;
2335 Ok(proto::Ack {})
2336 }
2337
2338 async fn handle_run_hook(
2339 this: Entity<Self>,
2340 envelope: TypedEnvelope<proto::RunGitHook>,
2341 mut cx: AsyncApp,
2342 ) -> Result<proto::Ack> {
2343 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2344 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2345 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2346 repository_handle
2347 .update(&mut cx, |repository_handle, cx| {
2348 repository_handle.run_hook(hook, cx)
2349 })
2350 .await??;
2351 Ok(proto::Ack {})
2352 }
2353
2354 async fn handle_commit(
2355 this: Entity<Self>,
2356 envelope: TypedEnvelope<proto::Commit>,
2357 mut cx: AsyncApp,
2358 ) -> Result<proto::Ack> {
2359 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2360 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2361 let askpass_id = envelope.payload.askpass_id;
2362
2363 let askpass = make_remote_delegate(
2364 this,
2365 envelope.payload.project_id,
2366 repository_id,
2367 askpass_id,
2368 &mut cx,
2369 );
2370
2371 let message = SharedString::from(envelope.payload.message);
2372 let name = envelope.payload.name.map(SharedString::from);
2373 let email = envelope.payload.email.map(SharedString::from);
2374 let options = envelope.payload.options.unwrap_or_default();
2375
2376 repository_handle
2377 .update(&mut cx, |repository_handle, cx| {
2378 repository_handle.commit(
2379 message,
2380 name.zip(email),
2381 CommitOptions {
2382 amend: options.amend,
2383 signoff: options.signoff,
2384 allow_empty: options.allow_empty,
2385 },
2386 askpass,
2387 cx,
2388 )
2389 })
2390 .await??;
2391 Ok(proto::Ack {})
2392 }
2393
2394 async fn handle_get_remotes(
2395 this: Entity<Self>,
2396 envelope: TypedEnvelope<proto::GetRemotes>,
2397 mut cx: AsyncApp,
2398 ) -> Result<proto::GetRemotesResponse> {
2399 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2400 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2401
2402 let branch_name = envelope.payload.branch_name;
2403 let is_push = envelope.payload.is_push;
2404
2405 let remotes = repository_handle
2406 .update(&mut cx, |repository_handle, _| {
2407 repository_handle.get_remotes(branch_name, is_push)
2408 })
2409 .await??;
2410
2411 Ok(proto::GetRemotesResponse {
2412 remotes: remotes
2413 .into_iter()
2414 .map(|remotes| proto::get_remotes_response::Remote {
2415 name: remotes.name.to_string(),
2416 })
2417 .collect::<Vec<_>>(),
2418 })
2419 }
2420
2421 async fn handle_get_worktrees(
2422 this: Entity<Self>,
2423 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2424 mut cx: AsyncApp,
2425 ) -> Result<proto::GitWorktreesResponse> {
2426 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2427 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2428
2429 let worktrees = repository_handle
2430 .update(&mut cx, |repository_handle, _| {
2431 repository_handle.worktrees()
2432 })
2433 .await??;
2434
2435 Ok(proto::GitWorktreesResponse {
2436 worktrees: worktrees
2437 .into_iter()
2438 .map(|worktree| worktree_to_proto(&worktree))
2439 .collect::<Vec<_>>(),
2440 })
2441 }
2442
2443 async fn handle_create_worktree(
2444 this: Entity<Self>,
2445 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2446 mut cx: AsyncApp,
2447 ) -> Result<proto::Ack> {
2448 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2449 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2450 let directory = PathBuf::from(envelope.payload.directory);
2451 let name = envelope.payload.name;
2452 let commit = envelope.payload.commit;
2453 let use_existing_branch = envelope.payload.use_existing_branch;
2454 let target = if name.is_empty() {
2455 CreateWorktreeTarget::Detached { base_sha: commit }
2456 } else if use_existing_branch {
2457 CreateWorktreeTarget::ExistingBranch { branch_name: name }
2458 } else {
2459 CreateWorktreeTarget::NewBranch {
2460 branch_name: name,
2461 base_sha: commit,
2462 }
2463 };
2464
2465 repository_handle
2466 .update(&mut cx, |repository_handle, _| {
2467 repository_handle.create_worktree(target, directory)
2468 })
2469 .await??;
2470
2471 Ok(proto::Ack {})
2472 }
2473
2474 async fn handle_remove_worktree(
2475 this: Entity<Self>,
2476 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2477 mut cx: AsyncApp,
2478 ) -> Result<proto::Ack> {
2479 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2480 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2481 let path = PathBuf::from(envelope.payload.path);
2482 let force = envelope.payload.force;
2483
2484 repository_handle
2485 .update(&mut cx, |repository_handle, _| {
2486 repository_handle.remove_worktree(path, force)
2487 })
2488 .await??;
2489
2490 Ok(proto::Ack {})
2491 }
2492
2493 async fn handle_rename_worktree(
2494 this: Entity<Self>,
2495 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2496 mut cx: AsyncApp,
2497 ) -> Result<proto::Ack> {
2498 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2499 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2500 let old_path = PathBuf::from(envelope.payload.old_path);
2501 let new_path = PathBuf::from(envelope.payload.new_path);
2502
2503 repository_handle
2504 .update(&mut cx, |repository_handle, _| {
2505 repository_handle.rename_worktree(old_path, new_path)
2506 })
2507 .await??;
2508
2509 Ok(proto::Ack {})
2510 }
2511
2512 async fn handle_get_head_sha(
2513 this: Entity<Self>,
2514 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2515 mut cx: AsyncApp,
2516 ) -> Result<proto::GitGetHeadShaResponse> {
2517 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2518 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2519
2520 let head_sha = repository_handle
2521 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2522 .await??;
2523
2524 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2525 }
2526
2527 async fn handle_edit_ref(
2528 this: Entity<Self>,
2529 envelope: TypedEnvelope<proto::GitEditRef>,
2530 mut cx: AsyncApp,
2531 ) -> Result<proto::Ack> {
2532 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2533 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2534 let ref_name = envelope.payload.ref_name;
2535 let commit = match envelope.payload.action {
2536 Some(proto::git_edit_ref::Action::UpdateToCommit(sha)) => Some(sha),
2537 Some(proto::git_edit_ref::Action::Delete(_)) => None,
2538 None => anyhow::bail!("GitEditRef missing action"),
2539 };
2540
2541 repository_handle
2542 .update(&mut cx, |repository_handle, _| {
2543 repository_handle.edit_ref(ref_name, commit)
2544 })
2545 .await??;
2546
2547 Ok(proto::Ack {})
2548 }
2549
2550 async fn handle_repair_worktrees(
2551 this: Entity<Self>,
2552 envelope: TypedEnvelope<proto::GitRepairWorktrees>,
2553 mut cx: AsyncApp,
2554 ) -> Result<proto::Ack> {
2555 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2556 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2557
2558 repository_handle
2559 .update(&mut cx, |repository_handle, _| {
2560 repository_handle.repair_worktrees()
2561 })
2562 .await??;
2563
2564 Ok(proto::Ack {})
2565 }
2566
2567 async fn handle_get_branches(
2568 this: Entity<Self>,
2569 envelope: TypedEnvelope<proto::GitGetBranches>,
2570 mut cx: AsyncApp,
2571 ) -> Result<proto::GitBranchesResponse> {
2572 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2573 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2574
2575 let branches = repository_handle
2576 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2577 .await??;
2578
2579 Ok(proto::GitBranchesResponse {
2580 branches: branches
2581 .into_iter()
2582 .map(|branch| branch_to_proto(&branch))
2583 .collect::<Vec<_>>(),
2584 })
2585 }
2586 async fn handle_get_default_branch(
2587 this: Entity<Self>,
2588 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2589 mut cx: AsyncApp,
2590 ) -> Result<proto::GetDefaultBranchResponse> {
2591 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2592 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2593
2594 let branch = repository_handle
2595 .update(&mut cx, |repository_handle, _| {
2596 repository_handle.default_branch(false)
2597 })
2598 .await??
2599 .map(Into::into);
2600
2601 Ok(proto::GetDefaultBranchResponse { branch })
2602 }
2603 async fn handle_create_branch(
2604 this: Entity<Self>,
2605 envelope: TypedEnvelope<proto::GitCreateBranch>,
2606 mut cx: AsyncApp,
2607 ) -> Result<proto::Ack> {
2608 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2609 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2610 let branch_name = envelope.payload.branch_name;
2611
2612 repository_handle
2613 .update(&mut cx, |repository_handle, _| {
2614 repository_handle.create_branch(branch_name, None)
2615 })
2616 .await??;
2617
2618 Ok(proto::Ack {})
2619 }
2620
2621 async fn handle_change_branch(
2622 this: Entity<Self>,
2623 envelope: TypedEnvelope<proto::GitChangeBranch>,
2624 mut cx: AsyncApp,
2625 ) -> Result<proto::Ack> {
2626 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2627 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2628 let branch_name = envelope.payload.branch_name;
2629
2630 repository_handle
2631 .update(&mut cx, |repository_handle, _| {
2632 repository_handle.change_branch(branch_name)
2633 })
2634 .await??;
2635
2636 Ok(proto::Ack {})
2637 }
2638
2639 async fn handle_rename_branch(
2640 this: Entity<Self>,
2641 envelope: TypedEnvelope<proto::GitRenameBranch>,
2642 mut cx: AsyncApp,
2643 ) -> Result<proto::Ack> {
2644 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2645 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2646 let branch = envelope.payload.branch;
2647 let new_name = envelope.payload.new_name;
2648
2649 repository_handle
2650 .update(&mut cx, |repository_handle, _| {
2651 repository_handle.rename_branch(branch, new_name)
2652 })
2653 .await??;
2654
2655 Ok(proto::Ack {})
2656 }
2657
2658 async fn handle_create_remote(
2659 this: Entity<Self>,
2660 envelope: TypedEnvelope<proto::GitCreateRemote>,
2661 mut cx: AsyncApp,
2662 ) -> Result<proto::Ack> {
2663 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2664 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2665 let remote_name = envelope.payload.remote_name;
2666 let remote_url = envelope.payload.remote_url;
2667
2668 repository_handle
2669 .update(&mut cx, |repository_handle, _| {
2670 repository_handle.create_remote(remote_name, remote_url)
2671 })
2672 .await??;
2673
2674 Ok(proto::Ack {})
2675 }
2676
2677 async fn handle_delete_branch(
2678 this: Entity<Self>,
2679 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2680 mut cx: AsyncApp,
2681 ) -> Result<proto::Ack> {
2682 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2683 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2684 let is_remote = envelope.payload.is_remote;
2685 let branch_name = envelope.payload.branch_name;
2686
2687 repository_handle
2688 .update(&mut cx, |repository_handle, _| {
2689 repository_handle.delete_branch(is_remote, branch_name)
2690 })
2691 .await??;
2692
2693 Ok(proto::Ack {})
2694 }
2695
2696 async fn handle_remove_remote(
2697 this: Entity<Self>,
2698 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2699 mut cx: AsyncApp,
2700 ) -> Result<proto::Ack> {
2701 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2702 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2703 let remote_name = envelope.payload.remote_name;
2704
2705 repository_handle
2706 .update(&mut cx, |repository_handle, _| {
2707 repository_handle.remove_remote(remote_name)
2708 })
2709 .await??;
2710
2711 Ok(proto::Ack {})
2712 }
2713
2714 async fn handle_show(
2715 this: Entity<Self>,
2716 envelope: TypedEnvelope<proto::GitShow>,
2717 mut cx: AsyncApp,
2718 ) -> Result<proto::GitCommitDetails> {
2719 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2720 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2721
2722 let commit = repository_handle
2723 .update(&mut cx, |repository_handle, _| {
2724 repository_handle.show(envelope.payload.commit)
2725 })
2726 .await??;
2727 Ok(proto::GitCommitDetails {
2728 sha: commit.sha.into(),
2729 message: commit.message.into(),
2730 commit_timestamp: commit.commit_timestamp,
2731 author_email: commit.author_email.into(),
2732 author_name: commit.author_name.into(),
2733 })
2734 }
2735
2736 async fn handle_create_checkpoint(
2737 this: Entity<Self>,
2738 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2739 mut cx: AsyncApp,
2740 ) -> Result<proto::GitCreateCheckpointResponse> {
2741 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2742 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2743
2744 let checkpoint = repository_handle
2745 .update(&mut cx, |repository, _| repository.checkpoint())
2746 .await??;
2747
2748 Ok(proto::GitCreateCheckpointResponse {
2749 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2750 })
2751 }
2752
2753 async fn handle_create_archive_checkpoint(
2754 this: Entity<Self>,
2755 envelope: TypedEnvelope<proto::GitCreateArchiveCheckpoint>,
2756 mut cx: AsyncApp,
2757 ) -> Result<proto::GitCreateArchiveCheckpointResponse> {
2758 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2759 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2760
2761 let (staged_commit_sha, unstaged_commit_sha) = repository_handle
2762 .update(&mut cx, |repository, _| {
2763 repository.create_archive_checkpoint()
2764 })
2765 .await??;
2766
2767 Ok(proto::GitCreateArchiveCheckpointResponse {
2768 staged_commit_sha,
2769 unstaged_commit_sha,
2770 })
2771 }
2772
2773 async fn handle_restore_checkpoint(
2774 this: Entity<Self>,
2775 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2776 mut cx: AsyncApp,
2777 ) -> Result<proto::Ack> {
2778 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2779 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2780
2781 let checkpoint = GitRepositoryCheckpoint {
2782 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2783 };
2784
2785 repository_handle
2786 .update(&mut cx, |repository, _| {
2787 repository.restore_checkpoint(checkpoint)
2788 })
2789 .await??;
2790
2791 Ok(proto::Ack {})
2792 }
2793
2794 async fn handle_restore_archive_checkpoint(
2795 this: Entity<Self>,
2796 envelope: TypedEnvelope<proto::GitRestoreArchiveCheckpoint>,
2797 mut cx: AsyncApp,
2798 ) -> Result<proto::Ack> {
2799 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2800 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2801 let staged_commit_sha = envelope.payload.staged_commit_sha;
2802 let unstaged_commit_sha = envelope.payload.unstaged_commit_sha;
2803
2804 repository_handle
2805 .update(&mut cx, |repository, _| {
2806 repository.restore_archive_checkpoint(staged_commit_sha, unstaged_commit_sha)
2807 })
2808 .await??;
2809
2810 Ok(proto::Ack {})
2811 }
2812
2813 async fn handle_compare_checkpoints(
2814 this: Entity<Self>,
2815 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2816 mut cx: AsyncApp,
2817 ) -> Result<proto::GitCompareCheckpointsResponse> {
2818 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2819 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2820
2821 let left = GitRepositoryCheckpoint {
2822 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2823 };
2824 let right = GitRepositoryCheckpoint {
2825 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2826 };
2827
2828 let equal = repository_handle
2829 .update(&mut cx, |repository, _| {
2830 repository.compare_checkpoints(left, right)
2831 })
2832 .await??;
2833
2834 Ok(proto::GitCompareCheckpointsResponse { equal })
2835 }
2836
2837 async fn handle_diff_checkpoints(
2838 this: Entity<Self>,
2839 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2840 mut cx: AsyncApp,
2841 ) -> Result<proto::GitDiffCheckpointsResponse> {
2842 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2843 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2844
2845 let base = GitRepositoryCheckpoint {
2846 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2847 };
2848 let target = GitRepositoryCheckpoint {
2849 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2850 };
2851
2852 let diff = repository_handle
2853 .update(&mut cx, |repository, _| {
2854 repository.diff_checkpoints(base, target)
2855 })
2856 .await??;
2857
2858 Ok(proto::GitDiffCheckpointsResponse { diff })
2859 }
2860
2861 async fn handle_load_commit_diff(
2862 this: Entity<Self>,
2863 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2864 mut cx: AsyncApp,
2865 ) -> Result<proto::LoadCommitDiffResponse> {
2866 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2867 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2868
2869 let commit_diff = repository_handle
2870 .update(&mut cx, |repository_handle, _| {
2871 repository_handle.load_commit_diff(envelope.payload.commit)
2872 })
2873 .await??;
2874 Ok(proto::LoadCommitDiffResponse {
2875 files: commit_diff
2876 .files
2877 .into_iter()
2878 .map(|file| proto::CommitFile {
2879 path: file.path.to_proto(),
2880 old_text: file.old_text,
2881 new_text: file.new_text,
2882 is_binary: file.is_binary,
2883 })
2884 .collect(),
2885 })
2886 }
2887
2888 async fn handle_file_history(
2889 this: Entity<Self>,
2890 envelope: TypedEnvelope<proto::GitFileHistory>,
2891 mut cx: AsyncApp,
2892 ) -> Result<proto::GitFileHistoryResponse> {
2893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2895 let path = RepoPath::from_proto(&envelope.payload.path)?;
2896 let skip = envelope.payload.skip as usize;
2897 let limit = envelope.payload.limit.map(|l| l as usize);
2898
2899 let file_history = repository_handle
2900 .update(&mut cx, |repository_handle, _| {
2901 repository_handle.file_history_paginated(path, skip, limit)
2902 })
2903 .await??;
2904
2905 Ok(proto::GitFileHistoryResponse {
2906 entries: file_history
2907 .entries
2908 .into_iter()
2909 .map(|entry| proto::FileHistoryEntry {
2910 sha: entry.sha.to_string(),
2911 subject: entry.subject.to_string(),
2912 message: entry.message.to_string(),
2913 commit_timestamp: entry.commit_timestamp,
2914 author_name: entry.author_name.to_string(),
2915 author_email: entry.author_email.to_string(),
2916 })
2917 .collect(),
2918 path: file_history.path.to_proto(),
2919 })
2920 }
2921
2922 async fn handle_reset(
2923 this: Entity<Self>,
2924 envelope: TypedEnvelope<proto::GitReset>,
2925 mut cx: AsyncApp,
2926 ) -> Result<proto::Ack> {
2927 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2928 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2929
2930 let mode = match envelope.payload.mode() {
2931 git_reset::ResetMode::Soft => ResetMode::Soft,
2932 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2933 };
2934
2935 repository_handle
2936 .update(&mut cx, |repository_handle, cx| {
2937 repository_handle.reset(envelope.payload.commit, mode, cx)
2938 })
2939 .await??;
2940 Ok(proto::Ack {})
2941 }
2942
2943 async fn handle_checkout_files(
2944 this: Entity<Self>,
2945 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2946 mut cx: AsyncApp,
2947 ) -> Result<proto::Ack> {
2948 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2949 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2950 let paths = envelope
2951 .payload
2952 .paths
2953 .iter()
2954 .map(|s| RepoPath::from_proto(s))
2955 .collect::<Result<Vec<_>>>()?;
2956
2957 repository_handle
2958 .update(&mut cx, |repository_handle, cx| {
2959 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2960 })
2961 .await?;
2962 Ok(proto::Ack {})
2963 }
2964
2965 async fn handle_open_commit_message_buffer(
2966 this: Entity<Self>,
2967 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2968 mut cx: AsyncApp,
2969 ) -> Result<proto::OpenBufferResponse> {
2970 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2971 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2972 let buffer = repository
2973 .update(&mut cx, |repository, cx| {
2974 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2975 })
2976 .await?;
2977
2978 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2979 this.update(&mut cx, |this, cx| {
2980 this.buffer_store.update(cx, |buffer_store, cx| {
2981 buffer_store
2982 .create_buffer_for_peer(
2983 &buffer,
2984 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2985 cx,
2986 )
2987 .detach_and_log_err(cx);
2988 })
2989 });
2990
2991 Ok(proto::OpenBufferResponse {
2992 buffer_id: buffer_id.to_proto(),
2993 })
2994 }
2995
2996 async fn handle_askpass(
2997 this: Entity<Self>,
2998 envelope: TypedEnvelope<proto::AskPassRequest>,
2999 mut cx: AsyncApp,
3000 ) -> Result<proto::AskPassResponse> {
3001 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3002 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
3003
3004 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
3005 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
3006 debug_panic!("no askpass found");
3007 anyhow::bail!("no askpass found");
3008 };
3009
3010 let response = askpass
3011 .ask_password(envelope.payload.prompt)
3012 .await
3013 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
3014
3015 delegates
3016 .lock()
3017 .insert(envelope.payload.askpass_id, askpass);
3018
3019 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
3020 Ok(proto::AskPassResponse {
3021 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
3022 })
3023 }
3024
3025 async fn handle_check_for_pushed_commits(
3026 this: Entity<Self>,
3027 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
3028 mut cx: AsyncApp,
3029 ) -> Result<proto::CheckForPushedCommitsResponse> {
3030 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3031 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3032
3033 let branches = repository_handle
3034 .update(&mut cx, |repository_handle, _| {
3035 repository_handle.check_for_pushed_commits()
3036 })
3037 .await??;
3038 Ok(proto::CheckForPushedCommitsResponse {
3039 pushed_to: branches
3040 .into_iter()
3041 .map(|commit| commit.to_string())
3042 .collect(),
3043 })
3044 }
3045
3046 async fn handle_git_diff(
3047 this: Entity<Self>,
3048 envelope: TypedEnvelope<proto::GitDiff>,
3049 mut cx: AsyncApp,
3050 ) -> Result<proto::GitDiffResponse> {
3051 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3052 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3053 let diff_type = match envelope.payload.diff_type() {
3054 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
3055 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
3056 proto::git_diff::DiffType::MergeBase => {
3057 let base_ref = envelope
3058 .payload
3059 .merge_base_ref
3060 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
3061 DiffType::MergeBase {
3062 base_ref: base_ref.into(),
3063 }
3064 }
3065 };
3066
3067 let mut diff = repository_handle
3068 .update(&mut cx, |repository_handle, cx| {
3069 repository_handle.diff(diff_type, cx)
3070 })
3071 .await??;
3072 const ONE_MB: usize = 1_000_000;
3073 if diff.len() > ONE_MB {
3074 diff = diff.chars().take(ONE_MB).collect()
3075 }
3076
3077 Ok(proto::GitDiffResponse { diff })
3078 }
3079
3080 async fn handle_tree_diff(
3081 this: Entity<Self>,
3082 request: TypedEnvelope<proto::GetTreeDiff>,
3083 mut cx: AsyncApp,
3084 ) -> Result<proto::GetTreeDiffResponse> {
3085 let repository_id = RepositoryId(request.payload.repository_id);
3086 let diff_type = if request.payload.is_merge {
3087 DiffTreeType::MergeBase {
3088 base: request.payload.base.into(),
3089 head: request.payload.head.into(),
3090 }
3091 } else {
3092 DiffTreeType::Since {
3093 base: request.payload.base.into(),
3094 head: request.payload.head.into(),
3095 }
3096 };
3097
3098 let diff = this
3099 .update(&mut cx, |this, cx| {
3100 let repository = this.repositories().get(&repository_id)?;
3101 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
3102 })
3103 .context("missing repository")?
3104 .await??;
3105
3106 Ok(proto::GetTreeDiffResponse {
3107 entries: diff
3108 .entries
3109 .into_iter()
3110 .map(|(path, status)| proto::TreeDiffStatus {
3111 path: path.as_ref().to_proto(),
3112 status: match status {
3113 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
3114 TreeDiffStatus::Modified { .. } => {
3115 proto::tree_diff_status::Status::Modified.into()
3116 }
3117 TreeDiffStatus::Deleted { .. } => {
3118 proto::tree_diff_status::Status::Deleted.into()
3119 }
3120 },
3121 oid: match status {
3122 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3123 Some(old.to_string())
3124 }
3125 TreeDiffStatus::Added => None,
3126 },
3127 })
3128 .collect(),
3129 })
3130 }
3131
3132 async fn handle_get_blob_content(
3133 this: Entity<Self>,
3134 request: TypedEnvelope<proto::GetBlobContent>,
3135 mut cx: AsyncApp,
3136 ) -> Result<proto::GetBlobContentResponse> {
3137 let oid = git::Oid::from_str(&request.payload.oid)?;
3138 let repository_id = RepositoryId(request.payload.repository_id);
3139 let content = this
3140 .update(&mut cx, |this, cx| {
3141 let repository = this.repositories().get(&repository_id)?;
3142 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3143 })
3144 .context("missing repository")?
3145 .await?;
3146 Ok(proto::GetBlobContentResponse { content })
3147 }
3148
3149 async fn handle_open_unstaged_diff(
3150 this: Entity<Self>,
3151 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3152 mut cx: AsyncApp,
3153 ) -> Result<proto::OpenUnstagedDiffResponse> {
3154 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3155 let diff = this
3156 .update(&mut cx, |this, cx| {
3157 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3158 Some(this.open_unstaged_diff(buffer, cx))
3159 })
3160 .context("missing buffer")?
3161 .await?;
3162 this.update(&mut cx, |this, _| {
3163 let shared_diffs = this
3164 .shared_diffs
3165 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3166 .or_default();
3167 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3168 });
3169 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3170 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3171 }
3172
3173 async fn handle_open_uncommitted_diff(
3174 this: Entity<Self>,
3175 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3176 mut cx: AsyncApp,
3177 ) -> Result<proto::OpenUncommittedDiffResponse> {
3178 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3179 let diff = this
3180 .update(&mut cx, |this, cx| {
3181 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3182 Some(this.open_uncommitted_diff(buffer, cx))
3183 })
3184 .context("missing buffer")?
3185 .await?;
3186 this.update(&mut cx, |this, _| {
3187 let shared_diffs = this
3188 .shared_diffs
3189 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3190 .or_default();
3191 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3192 });
3193 Ok(diff.read_with(&cx, |diff, cx| {
3194 use proto::open_uncommitted_diff_response::Mode;
3195
3196 let unstaged_diff = diff.secondary_diff();
3197 let index_snapshot = unstaged_diff.and_then(|diff| {
3198 let diff = diff.read(cx);
3199 diff.base_text_exists().then(|| diff.base_text(cx))
3200 });
3201
3202 let mode;
3203 let staged_text;
3204 let committed_text;
3205 if diff.base_text_exists() {
3206 let committed_snapshot = diff.base_text(cx);
3207 committed_text = Some(committed_snapshot.text());
3208 if let Some(index_text) = index_snapshot {
3209 if index_text.remote_id() == committed_snapshot.remote_id() {
3210 mode = Mode::IndexMatchesHead;
3211 staged_text = None;
3212 } else {
3213 mode = Mode::IndexAndHead;
3214 staged_text = Some(index_text.text());
3215 }
3216 } else {
3217 mode = Mode::IndexAndHead;
3218 staged_text = None;
3219 }
3220 } else {
3221 mode = Mode::IndexAndHead;
3222 committed_text = None;
3223 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3224 }
3225
3226 proto::OpenUncommittedDiffResponse {
3227 committed_text,
3228 staged_text,
3229 mode: mode.into(),
3230 }
3231 }))
3232 }
3233
3234 async fn handle_update_diff_bases(
3235 this: Entity<Self>,
3236 request: TypedEnvelope<proto::UpdateDiffBases>,
3237 mut cx: AsyncApp,
3238 ) -> Result<()> {
3239 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3240 this.update(&mut cx, |this, cx| {
3241 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3242 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3243 {
3244 let buffer = buffer.read(cx).text_snapshot();
3245 diff_state.update(cx, |diff_state, cx| {
3246 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3247 })
3248 }
3249 });
3250 Ok(())
3251 }
3252
3253 async fn handle_blame_buffer(
3254 this: Entity<Self>,
3255 envelope: TypedEnvelope<proto::BlameBuffer>,
3256 mut cx: AsyncApp,
3257 ) -> Result<proto::BlameBufferResponse> {
3258 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3259 let version = deserialize_version(&envelope.payload.version);
3260 let buffer = this.read_with(&cx, |this, cx| {
3261 this.buffer_store.read(cx).get_existing(buffer_id)
3262 })?;
3263 buffer
3264 .update(&mut cx, |buffer, _| {
3265 buffer.wait_for_version(version.clone())
3266 })
3267 .await?;
3268 let blame = this
3269 .update(&mut cx, |this, cx| {
3270 this.blame_buffer(&buffer, Some(version), cx)
3271 })
3272 .await?;
3273 Ok(serialize_blame_buffer_response(blame))
3274 }
3275
3276 async fn handle_get_permalink_to_line(
3277 this: Entity<Self>,
3278 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3279 mut cx: AsyncApp,
3280 ) -> Result<proto::GetPermalinkToLineResponse> {
3281 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3282 // let version = deserialize_version(&envelope.payload.version);
3283 let selection = {
3284 let proto_selection = envelope
3285 .payload
3286 .selection
3287 .context("no selection to get permalink for defined")?;
3288 proto_selection.start as u32..proto_selection.end as u32
3289 };
3290 let buffer = this.read_with(&cx, |this, cx| {
3291 this.buffer_store.read(cx).get_existing(buffer_id)
3292 })?;
3293 let permalink = this
3294 .update(&mut cx, |this, cx| {
3295 this.get_permalink_to_line(&buffer, selection, cx)
3296 })
3297 .await?;
3298 Ok(proto::GetPermalinkToLineResponse {
3299 permalink: permalink.to_string(),
3300 })
3301 }
3302
3303 fn repository_for_request(
3304 this: &Entity<Self>,
3305 id: RepositoryId,
3306 cx: &mut AsyncApp,
3307 ) -> Result<Entity<Repository>> {
3308 this.read_with(cx, |this, _| {
3309 this.repositories
3310 .get(&id)
3311 .context("missing repository handle")
3312 .cloned()
3313 })
3314 }
3315
3316 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3317 self.repositories
3318 .iter()
3319 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3320 .collect()
3321 }
3322
3323 fn process_updated_entries(
3324 &self,
3325 worktree: &Entity<Worktree>,
3326 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3327 cx: &mut App,
3328 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3329 let path_style = worktree.read(cx).path_style();
3330 let mut repo_paths = self
3331 .repositories
3332 .values()
3333 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3334 .collect::<Vec<_>>();
3335 let mut entries: Vec<_> = updated_entries
3336 .iter()
3337 .map(|(path, _, _)| path.clone())
3338 .collect();
3339 entries.sort();
3340 let worktree = worktree.read(cx);
3341
3342 let entries = entries
3343 .into_iter()
3344 .map(|path| worktree.absolutize(&path))
3345 .collect::<Arc<[_]>>();
3346
3347 let executor = cx.background_executor().clone();
3348 cx.background_executor().spawn(async move {
3349 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3350 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3351 let mut tasks = FuturesOrdered::new();
3352 for (repo_path, repo) in repo_paths.into_iter().rev() {
3353 let entries = entries.clone();
3354 let task = executor.spawn(async move {
3355 // Find all repository paths that belong to this repo
3356 let mut ix = entries.partition_point(|path| path < &*repo_path);
3357 if ix == entries.len() {
3358 return None;
3359 };
3360
3361 let mut paths = Vec::new();
3362 // All paths prefixed by a given repo will constitute a continuous range.
3363 while let Some(path) = entries.get(ix)
3364 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3365 &repo_path, path, path_style,
3366 )
3367 {
3368 paths.push((repo_path, ix));
3369 ix += 1;
3370 }
3371 if paths.is_empty() {
3372 None
3373 } else {
3374 Some((repo, paths))
3375 }
3376 });
3377 tasks.push_back(task);
3378 }
3379
3380 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3381 let mut path_was_used = vec![false; entries.len()];
3382 let tasks = tasks.collect::<Vec<_>>().await;
3383 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3384 // We always want to assign a path to it's innermost repository.
3385 for t in tasks {
3386 let Some((repo, paths)) = t else {
3387 continue;
3388 };
3389 let entry = paths_by_git_repo.entry(repo).or_default();
3390 for (repo_path, ix) in paths {
3391 if path_was_used[ix] {
3392 continue;
3393 }
3394 path_was_used[ix] = true;
3395 entry.push(repo_path);
3396 }
3397 }
3398
3399 paths_by_git_repo
3400 })
3401 }
3402}
3403
3404impl BufferGitState {
3405 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3406 Self {
3407 unstaged_diff: Default::default(),
3408 uncommitted_diff: Default::default(),
3409 oid_diffs: Default::default(),
3410 recalculate_diff_task: Default::default(),
3411 language: Default::default(),
3412 language_registry: Default::default(),
3413 recalculating_tx: postage::watch::channel_with(false).0,
3414 hunk_staging_operation_count: 0,
3415 hunk_staging_operation_count_as_of_write: 0,
3416 head_text: Default::default(),
3417 index_text: Default::default(),
3418 oid_texts: Default::default(),
3419 head_changed: Default::default(),
3420 index_changed: Default::default(),
3421 language_changed: Default::default(),
3422 conflict_updated_futures: Default::default(),
3423 conflict_set: Default::default(),
3424 reparse_conflict_markers_task: Default::default(),
3425 }
3426 }
3427
3428 #[ztracing::instrument(skip_all)]
3429 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3430 self.language = buffer.read(cx).language().cloned();
3431 self.language_changed = true;
3432 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3433 }
3434
3435 fn reparse_conflict_markers(
3436 &mut self,
3437 buffer: text::BufferSnapshot,
3438 cx: &mut Context<Self>,
3439 ) -> oneshot::Receiver<()> {
3440 let (tx, rx) = oneshot::channel();
3441
3442 let Some(conflict_set) = self
3443 .conflict_set
3444 .as_ref()
3445 .and_then(|conflict_set| conflict_set.upgrade())
3446 else {
3447 return rx;
3448 };
3449
3450 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3451 if conflict_set.has_conflict {
3452 Some(conflict_set.snapshot())
3453 } else {
3454 None
3455 }
3456 });
3457
3458 if let Some(old_snapshot) = old_snapshot {
3459 self.conflict_updated_futures.push(tx);
3460 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3461 let (snapshot, changed_range) = cx
3462 .background_spawn(async move {
3463 let new_snapshot = ConflictSet::parse(&buffer);
3464 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3465 (new_snapshot, changed_range)
3466 })
3467 .await;
3468 this.update(cx, |this, cx| {
3469 if let Some(conflict_set) = &this.conflict_set {
3470 conflict_set
3471 .update(cx, |conflict_set, cx| {
3472 conflict_set.set_snapshot(snapshot, changed_range, cx);
3473 })
3474 .ok();
3475 }
3476 let futures = std::mem::take(&mut this.conflict_updated_futures);
3477 for tx in futures {
3478 tx.send(()).ok();
3479 }
3480 })
3481 }))
3482 }
3483
3484 rx
3485 }
3486
3487 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3488 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3489 }
3490
3491 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3492 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3493 }
3494
3495 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3496 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3497 }
3498
3499 fn handle_base_texts_updated(
3500 &mut self,
3501 buffer: text::BufferSnapshot,
3502 message: proto::UpdateDiffBases,
3503 cx: &mut Context<Self>,
3504 ) {
3505 use proto::update_diff_bases::Mode;
3506
3507 let Some(mode) = Mode::from_i32(message.mode) else {
3508 return;
3509 };
3510
3511 let diff_bases_change = match mode {
3512 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3513 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3514 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3515 Mode::IndexAndHead => DiffBasesChange::SetEach {
3516 index: message.staged_text,
3517 head: message.committed_text,
3518 },
3519 };
3520
3521 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3522 }
3523
3524 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3525 if *self.recalculating_tx.borrow() {
3526 let mut rx = self.recalculating_tx.subscribe();
3527 Some(async move {
3528 loop {
3529 let is_recalculating = rx.recv().await;
3530 if is_recalculating != Some(true) {
3531 break;
3532 }
3533 }
3534 })
3535 } else {
3536 None
3537 }
3538 }
3539
3540 fn diff_bases_changed(
3541 &mut self,
3542 buffer: text::BufferSnapshot,
3543 diff_bases_change: Option<DiffBasesChange>,
3544 cx: &mut Context<Self>,
3545 ) {
3546 match diff_bases_change {
3547 Some(DiffBasesChange::SetIndex(index)) => {
3548 self.index_text = index.map(|mut index| {
3549 text::LineEnding::normalize(&mut index);
3550 Arc::from(index.as_str())
3551 });
3552 self.index_changed = true;
3553 }
3554 Some(DiffBasesChange::SetHead(head)) => {
3555 self.head_text = head.map(|mut head| {
3556 text::LineEnding::normalize(&mut head);
3557 Arc::from(head.as_str())
3558 });
3559 self.head_changed = true;
3560 }
3561 Some(DiffBasesChange::SetBoth(text)) => {
3562 let text = text.map(|mut text| {
3563 text::LineEnding::normalize(&mut text);
3564 Arc::from(text.as_str())
3565 });
3566 self.head_text = text.clone();
3567 self.index_text = text;
3568 self.head_changed = true;
3569 self.index_changed = true;
3570 }
3571 Some(DiffBasesChange::SetEach { index, head }) => {
3572 self.index_text = index.map(|mut index| {
3573 text::LineEnding::normalize(&mut index);
3574 Arc::from(index.as_str())
3575 });
3576 self.index_changed = true;
3577 self.head_text = head.map(|mut head| {
3578 text::LineEnding::normalize(&mut head);
3579 Arc::from(head.as_str())
3580 });
3581 self.head_changed = true;
3582 }
3583 None => {}
3584 }
3585
3586 self.recalculate_diffs(buffer, cx)
3587 }
3588
3589 #[ztracing::instrument(skip_all)]
3590 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3591 *self.recalculating_tx.borrow_mut() = true;
3592
3593 let language = self.language.clone();
3594 let language_registry = self.language_registry.clone();
3595 let unstaged_diff = self.unstaged_diff();
3596 let uncommitted_diff = self.uncommitted_diff();
3597 let head = self.head_text.clone();
3598 let index = self.index_text.clone();
3599 let index_changed = self.index_changed;
3600 let head_changed = self.head_changed;
3601 let language_changed = self.language_changed;
3602 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3603 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3604 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3605 (None, None) => true,
3606 _ => false,
3607 };
3608
3609 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3610 .oid_diffs
3611 .iter()
3612 .filter_map(|(oid, weak)| {
3613 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3614 weak.upgrade().map(|diff| (*oid, diff, base_text))
3615 })
3616 .collect();
3617
3618 self.oid_diffs.retain(|oid, weak| {
3619 let alive = weak.upgrade().is_some();
3620 if !alive {
3621 if let Some(oid) = oid {
3622 self.oid_texts.remove(oid);
3623 }
3624 }
3625 alive
3626 });
3627 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3628 log::debug!(
3629 "start recalculating diffs for buffer {}",
3630 buffer.remote_id()
3631 );
3632
3633 let mut new_unstaged_diff = None;
3634 if let Some(unstaged_diff) = &unstaged_diff {
3635 new_unstaged_diff = Some(
3636 cx.update(|cx| {
3637 unstaged_diff.read(cx).update_diff(
3638 buffer.clone(),
3639 index,
3640 index_changed.then_some(false),
3641 language.clone(),
3642 cx,
3643 )
3644 })
3645 .await,
3646 );
3647 }
3648
3649 // Dropping BufferDiff can be expensive, so yield back to the event loop
3650 // for a bit
3651 yield_now().await;
3652
3653 let mut new_uncommitted_diff = None;
3654 if let Some(uncommitted_diff) = &uncommitted_diff {
3655 new_uncommitted_diff = if index_matches_head {
3656 new_unstaged_diff.clone()
3657 } else {
3658 Some(
3659 cx.update(|cx| {
3660 uncommitted_diff.read(cx).update_diff(
3661 buffer.clone(),
3662 head,
3663 head_changed.then_some(true),
3664 language.clone(),
3665 cx,
3666 )
3667 })
3668 .await,
3669 )
3670 }
3671 }
3672
3673 // Dropping BufferDiff can be expensive, so yield back to the event loop
3674 // for a bit
3675 yield_now().await;
3676
3677 let cancel = this.update(cx, |this, _| {
3678 // This checks whether all pending stage/unstage operations
3679 // have quiesced (i.e. both the corresponding write and the
3680 // read of that write have completed). If not, then we cancel
3681 // this recalculation attempt to avoid invalidating pending
3682 // state too quickly; another recalculation will come along
3683 // later and clear the pending state once the state of the index has settled.
3684 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3685 *this.recalculating_tx.borrow_mut() = false;
3686 true
3687 } else {
3688 false
3689 }
3690 })?;
3691 if cancel {
3692 log::debug!(
3693 concat!(
3694 "aborting recalculating diffs for buffer {}",
3695 "due to subsequent hunk operations",
3696 ),
3697 buffer.remote_id()
3698 );
3699 return Ok(());
3700 }
3701
3702 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3703 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3704 {
3705 let task = unstaged_diff.update(cx, |diff, cx| {
3706 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3707 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3708 // view multibuffers.
3709 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3710 });
3711 Some(task.await)
3712 } else {
3713 None
3714 };
3715
3716 yield_now().await;
3717
3718 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3719 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3720 {
3721 uncommitted_diff
3722 .update(cx, |diff, cx| {
3723 if language_changed {
3724 diff.language_changed(language.clone(), language_registry.clone(), cx);
3725 }
3726 diff.set_snapshot_with_secondary(
3727 new_uncommitted_diff,
3728 &buffer,
3729 unstaged_changed_range.flatten(),
3730 true,
3731 cx,
3732 )
3733 })
3734 .await;
3735 }
3736
3737 yield_now().await;
3738
3739 for (oid, oid_diff, base_text) in oid_diffs {
3740 let new_oid_diff = cx
3741 .update(|cx| {
3742 oid_diff.read(cx).update_diff(
3743 buffer.clone(),
3744 base_text,
3745 None,
3746 language.clone(),
3747 cx,
3748 )
3749 })
3750 .await;
3751
3752 oid_diff
3753 .update(cx, |diff, cx| {
3754 if language_changed {
3755 diff.language_changed(language.clone(), language_registry.clone(), cx);
3756 }
3757 diff.set_snapshot(new_oid_diff, &buffer, cx)
3758 })
3759 .await;
3760
3761 log::debug!(
3762 "finished recalculating oid diff for buffer {} oid {:?}",
3763 buffer.remote_id(),
3764 oid
3765 );
3766
3767 yield_now().await;
3768 }
3769
3770 log::debug!(
3771 "finished recalculating diffs for buffer {}",
3772 buffer.remote_id()
3773 );
3774
3775 if let Some(this) = this.upgrade() {
3776 this.update(cx, |this, _| {
3777 this.index_changed = false;
3778 this.head_changed = false;
3779 this.language_changed = false;
3780 *this.recalculating_tx.borrow_mut() = false;
3781 });
3782 }
3783
3784 Ok(())
3785 }));
3786 }
3787}
3788
3789fn make_remote_delegate(
3790 this: Entity<GitStore>,
3791 project_id: u64,
3792 repository_id: RepositoryId,
3793 askpass_id: u64,
3794 cx: &mut AsyncApp,
3795) -> AskPassDelegate {
3796 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3797 this.update(cx, |this, cx| {
3798 let Some((client, _)) = this.downstream_client() else {
3799 return;
3800 };
3801 let response = client.request(proto::AskPassRequest {
3802 project_id,
3803 repository_id: repository_id.to_proto(),
3804 askpass_id,
3805 prompt,
3806 });
3807 cx.spawn(async move |_, _| {
3808 let mut response = response.await?.response;
3809 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3810 .ok();
3811 response.zeroize();
3812 anyhow::Ok(())
3813 })
3814 .detach_and_log_err(cx);
3815 });
3816 })
3817}
3818
3819impl RepositoryId {
3820 pub fn to_proto(self) -> u64 {
3821 self.0
3822 }
3823
3824 pub fn from_proto(id: u64) -> Self {
3825 RepositoryId(id)
3826 }
3827}
3828
3829impl RepositorySnapshot {
3830 fn empty(
3831 id: RepositoryId,
3832 work_directory_abs_path: Arc<Path>,
3833 original_repo_abs_path: Option<Arc<Path>>,
3834 path_style: PathStyle,
3835 ) -> Self {
3836 Self {
3837 id,
3838 statuses_by_path: Default::default(),
3839 original_repo_abs_path: original_repo_abs_path
3840 .unwrap_or_else(|| work_directory_abs_path.clone()),
3841 work_directory_abs_path,
3842 branch: None,
3843 branch_list: Arc::from([]),
3844 head_commit: None,
3845 scan_id: 0,
3846 merge: Default::default(),
3847 remote_origin_url: None,
3848 remote_upstream_url: None,
3849 stash_entries: Default::default(),
3850 linked_worktrees: Arc::from([]),
3851 path_style,
3852 }
3853 }
3854
3855 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3856 proto::UpdateRepository {
3857 branch_summary: self.branch.as_ref().map(branch_to_proto),
3858 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3859 updated_statuses: self
3860 .statuses_by_path
3861 .iter()
3862 .map(|entry| entry.to_proto())
3863 .collect(),
3864 removed_statuses: Default::default(),
3865 current_merge_conflicts: self
3866 .merge
3867 .merge_heads_by_conflicted_path
3868 .iter()
3869 .map(|(repo_path, _)| repo_path.to_proto())
3870 .collect(),
3871 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3872 project_id,
3873 id: self.id.to_proto(),
3874 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3875 entry_ids: vec![self.id.to_proto()],
3876 scan_id: self.scan_id,
3877 is_last_update: true,
3878 stash_entries: self
3879 .stash_entries
3880 .entries
3881 .iter()
3882 .map(stash_to_proto)
3883 .collect(),
3884 remote_upstream_url: self.remote_upstream_url.clone(),
3885 remote_origin_url: self.remote_origin_url.clone(),
3886 original_repo_abs_path: Some(
3887 self.original_repo_abs_path.to_string_lossy().into_owned(),
3888 ),
3889 linked_worktrees: self
3890 .linked_worktrees
3891 .iter()
3892 .map(worktree_to_proto)
3893 .collect(),
3894 }
3895 }
3896
3897 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3898 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3899 let mut removed_statuses: Vec<String> = Vec::new();
3900
3901 let mut new_statuses = self.statuses_by_path.iter().peekable();
3902 let mut old_statuses = old.statuses_by_path.iter().peekable();
3903
3904 let mut current_new_entry = new_statuses.next();
3905 let mut current_old_entry = old_statuses.next();
3906 loop {
3907 match (current_new_entry, current_old_entry) {
3908 (Some(new_entry), Some(old_entry)) => {
3909 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3910 Ordering::Less => {
3911 updated_statuses.push(new_entry.to_proto());
3912 current_new_entry = new_statuses.next();
3913 }
3914 Ordering::Equal => {
3915 if new_entry.status != old_entry.status
3916 || new_entry.diff_stat != old_entry.diff_stat
3917 {
3918 updated_statuses.push(new_entry.to_proto());
3919 }
3920 current_old_entry = old_statuses.next();
3921 current_new_entry = new_statuses.next();
3922 }
3923 Ordering::Greater => {
3924 removed_statuses.push(old_entry.repo_path.to_proto());
3925 current_old_entry = old_statuses.next();
3926 }
3927 }
3928 }
3929 (None, Some(old_entry)) => {
3930 removed_statuses.push(old_entry.repo_path.to_proto());
3931 current_old_entry = old_statuses.next();
3932 }
3933 (Some(new_entry), None) => {
3934 updated_statuses.push(new_entry.to_proto());
3935 current_new_entry = new_statuses.next();
3936 }
3937 (None, None) => break,
3938 }
3939 }
3940
3941 proto::UpdateRepository {
3942 branch_summary: self.branch.as_ref().map(branch_to_proto),
3943 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3944 updated_statuses,
3945 removed_statuses,
3946 current_merge_conflicts: self
3947 .merge
3948 .merge_heads_by_conflicted_path
3949 .iter()
3950 .map(|(path, _)| path.to_proto())
3951 .collect(),
3952 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3953 project_id,
3954 id: self.id.to_proto(),
3955 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3956 entry_ids: vec![],
3957 scan_id: self.scan_id,
3958 is_last_update: true,
3959 stash_entries: self
3960 .stash_entries
3961 .entries
3962 .iter()
3963 .map(stash_to_proto)
3964 .collect(),
3965 remote_upstream_url: self.remote_upstream_url.clone(),
3966 remote_origin_url: self.remote_origin_url.clone(),
3967 original_repo_abs_path: Some(
3968 self.original_repo_abs_path.to_string_lossy().into_owned(),
3969 ),
3970 linked_worktrees: self
3971 .linked_worktrees
3972 .iter()
3973 .map(worktree_to_proto)
3974 .collect(),
3975 }
3976 }
3977
3978 /// The main worktree is the original checkout that other worktrees were
3979 /// created from.
3980 ///
3981 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3982 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3983 ///
3984 /// Submodules also return `true` here, since they are not linked worktrees.
3985 pub fn is_main_worktree(&self) -> bool {
3986 self.work_directory_abs_path == self.original_repo_abs_path
3987 }
3988
3989 /// Returns true if this repository is a linked worktree, that is, one that
3990 /// was created from another worktree.
3991 ///
3992 /// Returns `false` for both the main worktree and submodules.
3993 pub fn is_linked_worktree(&self) -> bool {
3994 !self.is_main_worktree()
3995 }
3996
3997 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3998 &self.linked_worktrees
3999 }
4000
4001 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
4002 self.statuses_by_path.iter().cloned()
4003 }
4004
4005 pub fn status_summary(&self) -> GitSummary {
4006 self.statuses_by_path.summary().item_summary
4007 }
4008
4009 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
4010 self.statuses_by_path
4011 .get(&PathKey(path.as_ref().clone()), ())
4012 .cloned()
4013 }
4014
4015 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4016 self.statuses_by_path
4017 .get(&PathKey(path.as_ref().clone()), ())
4018 .and_then(|entry| entry.diff_stat)
4019 }
4020
4021 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
4022 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
4023 }
4024
4025 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
4026 self.path_style
4027 .join(&self.work_directory_abs_path, repo_path.as_std_path())
4028 .unwrap()
4029 .into()
4030 }
4031
4032 #[inline]
4033 fn abs_path_to_repo_path_inner(
4034 work_directory_abs_path: &Path,
4035 abs_path: &Path,
4036 path_style: PathStyle,
4037 ) -> Option<RepoPath> {
4038 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
4039 Some(RepoPath::from_rel_path(&rel_path))
4040 }
4041
4042 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
4043 self.merge
4044 .merge_heads_by_conflicted_path
4045 .contains_key(repo_path)
4046 }
4047
4048 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
4049 let had_conflict_on_last_merge_head_change = self
4050 .merge
4051 .merge_heads_by_conflicted_path
4052 .contains_key(repo_path);
4053 let has_conflict_currently = self
4054 .status_for_path(repo_path)
4055 .is_some_and(|entry| entry.status.is_conflicted());
4056 had_conflict_on_last_merge_head_change || has_conflict_currently
4057 }
4058
4059 /// This is the name that will be displayed in the repository selector for this repository.
4060 pub fn display_name(&self) -> SharedString {
4061 self.work_directory_abs_path
4062 .file_name()
4063 .unwrap_or_default()
4064 .to_string_lossy()
4065 .to_string()
4066 .into()
4067 }
4068}
4069
4070pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
4071 proto::StashEntry {
4072 oid: entry.oid.as_bytes().to_vec(),
4073 message: entry.message.clone(),
4074 branch: entry.branch.clone(),
4075 index: entry.index as u64,
4076 timestamp: entry.timestamp,
4077 }
4078}
4079
4080pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
4081 Ok(StashEntry {
4082 oid: Oid::from_bytes(&entry.oid)?,
4083 message: entry.message.clone(),
4084 index: entry.index as usize,
4085 branch: entry.branch.clone(),
4086 timestamp: entry.timestamp,
4087 })
4088}
4089
4090impl MergeDetails {
4091 async fn update(
4092 &mut self,
4093 backend: &Arc<dyn GitRepository>,
4094 current_conflicted_paths: Vec<RepoPath>,
4095 ) -> Result<bool> {
4096 log::debug!("load merge details");
4097 self.message = backend.merge_message().await.map(SharedString::from);
4098 let heads = backend
4099 .revparse_batch(vec![
4100 "MERGE_HEAD".into(),
4101 "CHERRY_PICK_HEAD".into(),
4102 "REBASE_HEAD".into(),
4103 "REVERT_HEAD".into(),
4104 "APPLY_HEAD".into(),
4105 ])
4106 .await
4107 .log_err()
4108 .unwrap_or_default()
4109 .into_iter()
4110 .map(|opt| opt.map(SharedString::from))
4111 .collect::<Vec<_>>();
4112
4113 let mut conflicts_changed = false;
4114
4115 // Record the merge state for newly conflicted paths
4116 for path in ¤t_conflicted_paths {
4117 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
4118 conflicts_changed = true;
4119 self.merge_heads_by_conflicted_path
4120 .insert(path.clone(), heads.clone());
4121 }
4122 }
4123
4124 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4125 self.merge_heads_by_conflicted_path
4126 .retain(|path, old_merge_heads| {
4127 let keep = current_conflicted_paths.contains(path)
4128 || (old_merge_heads == &heads
4129 && old_merge_heads.iter().any(|head| head.is_some()));
4130 if !keep {
4131 conflicts_changed = true;
4132 }
4133 keep
4134 });
4135
4136 Ok(conflicts_changed)
4137 }
4138}
4139
4140impl Repository {
4141 pub fn is_trusted(&self) -> bool {
4142 match self.repository_state.peek() {
4143 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4144 _ => false,
4145 }
4146 }
4147
4148 pub fn snapshot(&self) -> RepositorySnapshot {
4149 self.snapshot.clone()
4150 }
4151
4152 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4153 self.pending_ops.iter().cloned()
4154 }
4155
4156 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4157 self.pending_ops.summary().clone()
4158 }
4159
4160 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4161 self.pending_ops
4162 .get(&PathKey(path.as_ref().clone()), ())
4163 .cloned()
4164 }
4165
4166 fn local(
4167 id: RepositoryId,
4168 work_directory_abs_path: Arc<Path>,
4169 original_repo_abs_path: Arc<Path>,
4170 dot_git_abs_path: Arc<Path>,
4171 project_environment: WeakEntity<ProjectEnvironment>,
4172 fs: Arc<dyn Fs>,
4173 is_trusted: bool,
4174 git_store: WeakEntity<GitStore>,
4175 cx: &mut Context<Self>,
4176 ) -> Self {
4177 let snapshot = RepositorySnapshot::empty(
4178 id,
4179 work_directory_abs_path.clone(),
4180 Some(original_repo_abs_path),
4181 PathStyle::local(),
4182 );
4183 let state = cx
4184 .spawn(async move |_, cx| {
4185 LocalRepositoryState::new(
4186 work_directory_abs_path,
4187 dot_git_abs_path,
4188 project_environment,
4189 fs,
4190 is_trusted,
4191 cx,
4192 )
4193 .await
4194 .map_err(|err| err.to_string())
4195 })
4196 .shared();
4197 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4198 let state = cx
4199 .spawn(async move |_, _| {
4200 let state = state.await?;
4201 Ok(RepositoryState::Local(state))
4202 })
4203 .shared();
4204
4205 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4206 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4207 if this.scan_id > 1 {
4208 this.initial_graph_data.clear();
4209 }
4210 }
4211 RepositoryEvent::StashEntriesChanged => {
4212 if this.scan_id > 1 {
4213 this.initial_graph_data
4214 .retain(|(log_source, _), _| *log_source != LogSource::All);
4215 }
4216 }
4217 _ => {}
4218 })
4219 .detach();
4220
4221 Repository {
4222 this: cx.weak_entity(),
4223 git_store,
4224 snapshot,
4225 pending_ops: Default::default(),
4226 repository_state: state,
4227 commit_message_buffer: None,
4228 askpass_delegates: Default::default(),
4229 paths_needing_status_update: Default::default(),
4230 latest_askpass_id: 0,
4231 job_sender,
4232 job_id: 0,
4233 active_jobs: Default::default(),
4234 initial_graph_data: Default::default(),
4235 commit_data: Default::default(),
4236 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4237 }
4238 }
4239
4240 fn remote(
4241 id: RepositoryId,
4242 work_directory_abs_path: Arc<Path>,
4243 original_repo_abs_path: Option<Arc<Path>>,
4244 path_style: PathStyle,
4245 project_id: ProjectId,
4246 client: AnyProtoClient,
4247 git_store: WeakEntity<GitStore>,
4248 cx: &mut Context<Self>,
4249 ) -> Self {
4250 let snapshot = RepositorySnapshot::empty(
4251 id,
4252 work_directory_abs_path,
4253 original_repo_abs_path,
4254 path_style,
4255 );
4256 let repository_state = RemoteRepositoryState { project_id, client };
4257 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4258 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4259 Self {
4260 this: cx.weak_entity(),
4261 snapshot,
4262 commit_message_buffer: None,
4263 git_store,
4264 pending_ops: Default::default(),
4265 paths_needing_status_update: Default::default(),
4266 job_sender,
4267 repository_state,
4268 askpass_delegates: Default::default(),
4269 latest_askpass_id: 0,
4270 active_jobs: Default::default(),
4271 job_id: 0,
4272 initial_graph_data: Default::default(),
4273 commit_data: Default::default(),
4274 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4275 }
4276 }
4277
4278 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4279 self.git_store.upgrade()
4280 }
4281
4282 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4283 let this = cx.weak_entity();
4284 let git_store = self.git_store.clone();
4285 let _ = self.send_keyed_job(
4286 Some(GitJobKey::ReloadBufferDiffBases),
4287 None,
4288 |state, mut cx| async move {
4289 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4290 log::error!("tried to recompute diffs for a non-local repository");
4291 return Ok(());
4292 };
4293
4294 let Some(this) = this.upgrade() else {
4295 return Ok(());
4296 };
4297
4298 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4299 git_store.update(cx, |git_store, cx| {
4300 git_store
4301 .diffs
4302 .iter()
4303 .filter_map(|(buffer_id, diff_state)| {
4304 let buffer_store = git_store.buffer_store.read(cx);
4305 let buffer = buffer_store.get(*buffer_id)?;
4306 let file = File::from_dyn(buffer.read(cx).file())?;
4307 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4308 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4309 log::debug!(
4310 "start reload diff bases for repo path {}",
4311 repo_path.as_unix_str()
4312 );
4313 diff_state.update(cx, |diff_state, _| {
4314 let has_unstaged_diff = diff_state
4315 .unstaged_diff
4316 .as_ref()
4317 .is_some_and(|diff| diff.is_upgradable());
4318 let has_uncommitted_diff = diff_state
4319 .uncommitted_diff
4320 .as_ref()
4321 .is_some_and(|set| set.is_upgradable());
4322
4323 Some((
4324 buffer,
4325 repo_path,
4326 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4327 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4328 ))
4329 })
4330 })
4331 .collect::<Vec<_>>()
4332 })
4333 })?;
4334
4335 let buffer_diff_base_changes = cx
4336 .background_spawn(async move {
4337 let mut changes = Vec::new();
4338 for (buffer, repo_path, current_index_text, current_head_text) in
4339 &repo_diff_state_updates
4340 {
4341 let index_text = if current_index_text.is_some() {
4342 backend.load_index_text(repo_path.clone()).await
4343 } else {
4344 None
4345 };
4346 let head_text = if current_head_text.is_some() {
4347 backend.load_committed_text(repo_path.clone()).await
4348 } else {
4349 None
4350 };
4351
4352 let change =
4353 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4354 (Some(current_index), Some(current_head)) => {
4355 let index_changed =
4356 index_text.as_deref() != current_index.as_deref();
4357 let head_changed =
4358 head_text.as_deref() != current_head.as_deref();
4359 if index_changed && head_changed {
4360 if index_text == head_text {
4361 Some(DiffBasesChange::SetBoth(head_text))
4362 } else {
4363 Some(DiffBasesChange::SetEach {
4364 index: index_text,
4365 head: head_text,
4366 })
4367 }
4368 } else if index_changed {
4369 Some(DiffBasesChange::SetIndex(index_text))
4370 } else if head_changed {
4371 Some(DiffBasesChange::SetHead(head_text))
4372 } else {
4373 None
4374 }
4375 }
4376 (Some(current_index), None) => {
4377 let index_changed =
4378 index_text.as_deref() != current_index.as_deref();
4379 index_changed
4380 .then_some(DiffBasesChange::SetIndex(index_text))
4381 }
4382 (None, Some(current_head)) => {
4383 let head_changed =
4384 head_text.as_deref() != current_head.as_deref();
4385 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4386 }
4387 (None, None) => None,
4388 };
4389
4390 changes.push((buffer.clone(), change))
4391 }
4392 changes
4393 })
4394 .await;
4395
4396 git_store.update(&mut cx, |git_store, cx| {
4397 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4398 let buffer_snapshot = buffer.read(cx).text_snapshot();
4399 let buffer_id = buffer_snapshot.remote_id();
4400 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4401 continue;
4402 };
4403
4404 let downstream_client = git_store.downstream_client();
4405 diff_state.update(cx, |diff_state, cx| {
4406 use proto::update_diff_bases::Mode;
4407
4408 if let Some((diff_bases_change, (client, project_id))) =
4409 diff_bases_change.clone().zip(downstream_client)
4410 {
4411 let (staged_text, committed_text, mode) = match diff_bases_change {
4412 DiffBasesChange::SetIndex(index) => {
4413 (index, None, Mode::IndexOnly)
4414 }
4415 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4416 DiffBasesChange::SetEach { index, head } => {
4417 (index, head, Mode::IndexAndHead)
4418 }
4419 DiffBasesChange::SetBoth(text) => {
4420 (None, text, Mode::IndexMatchesHead)
4421 }
4422 };
4423 client
4424 .send(proto::UpdateDiffBases {
4425 project_id: project_id.to_proto(),
4426 buffer_id: buffer_id.to_proto(),
4427 staged_text,
4428 committed_text,
4429 mode: mode as i32,
4430 })
4431 .log_err();
4432 }
4433
4434 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4435 });
4436 }
4437 })
4438 },
4439 );
4440 }
4441
4442 pub fn send_job<F, Fut, R>(
4443 &mut self,
4444 status: Option<SharedString>,
4445 job: F,
4446 ) -> oneshot::Receiver<R>
4447 where
4448 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4449 Fut: Future<Output = R> + 'static,
4450 R: Send + 'static,
4451 {
4452 self.send_keyed_job(None, status, job)
4453 }
4454
4455 fn send_keyed_job<F, Fut, R>(
4456 &mut self,
4457 key: Option<GitJobKey>,
4458 status: Option<SharedString>,
4459 job: F,
4460 ) -> oneshot::Receiver<R>
4461 where
4462 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4463 Fut: Future<Output = R> + 'static,
4464 R: Send + 'static,
4465 {
4466 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4467 let job_id = post_inc(&mut self.job_id);
4468 let this = self.this.clone();
4469 self.job_sender
4470 .unbounded_send(GitJob {
4471 key,
4472 job: Box::new(move |state, cx: &mut AsyncApp| {
4473 let job = job(state, cx.clone());
4474 cx.spawn(async move |cx| {
4475 if let Some(s) = status.clone() {
4476 this.update(cx, |this, cx| {
4477 this.active_jobs.insert(
4478 job_id,
4479 JobInfo {
4480 start: Instant::now(),
4481 message: s.clone(),
4482 },
4483 );
4484
4485 cx.notify();
4486 })
4487 .ok();
4488 }
4489 let result = job.await;
4490
4491 this.update(cx, |this, cx| {
4492 this.active_jobs.remove(&job_id);
4493 cx.notify();
4494 })
4495 .ok();
4496
4497 result_tx.send(result).ok();
4498 })
4499 }),
4500 })
4501 .ok();
4502 result_rx
4503 }
4504
4505 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4506 let Some(git_store) = self.git_store.upgrade() else {
4507 return;
4508 };
4509 let entity = cx.entity();
4510 git_store.update(cx, |git_store, cx| {
4511 let Some((&id, _)) = git_store
4512 .repositories
4513 .iter()
4514 .find(|(_, handle)| *handle == &entity)
4515 else {
4516 return;
4517 };
4518 git_store.active_repo_id = Some(id);
4519 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4520 });
4521 }
4522
4523 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4524 self.snapshot.status()
4525 }
4526
4527 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4528 self.snapshot.diff_stat_for_path(path)
4529 }
4530
4531 pub fn cached_stash(&self) -> GitStash {
4532 self.snapshot.stash_entries.clone()
4533 }
4534
4535 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4536 let git_store = self.git_store.upgrade()?;
4537 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4538 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4539 let abs_path = SanitizedPath::new(&abs_path);
4540 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4541 Some(ProjectPath {
4542 worktree_id: worktree.read(cx).id(),
4543 path: relative_path,
4544 })
4545 }
4546
4547 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4548 let git_store = self.git_store.upgrade()?;
4549 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4550 let abs_path = worktree_store.absolutize(path, cx)?;
4551 self.snapshot.abs_path_to_repo_path(&abs_path)
4552 }
4553
4554 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4555 other
4556 .read(cx)
4557 .snapshot
4558 .work_directory_abs_path
4559 .starts_with(&self.snapshot.work_directory_abs_path)
4560 }
4561
4562 pub fn open_commit_buffer(
4563 &mut self,
4564 languages: Option<Arc<LanguageRegistry>>,
4565 buffer_store: Entity<BufferStore>,
4566 cx: &mut Context<Self>,
4567 ) -> Task<Result<Entity<Buffer>>> {
4568 let id = self.id;
4569 if let Some(buffer) = self.commit_message_buffer.clone() {
4570 return Task::ready(Ok(buffer));
4571 }
4572 let this = cx.weak_entity();
4573
4574 let rx = self.send_job(None, move |state, mut cx| async move {
4575 let Some(this) = this.upgrade() else {
4576 bail!("git store was dropped");
4577 };
4578 match state {
4579 RepositoryState::Local(..) => {
4580 this.update(&mut cx, |_, cx| {
4581 Self::open_local_commit_buffer(languages, buffer_store, cx)
4582 })
4583 .await
4584 }
4585 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4586 let request = client.request(proto::OpenCommitMessageBuffer {
4587 project_id: project_id.0,
4588 repository_id: id.to_proto(),
4589 });
4590 let response = request.await.context("requesting to open commit buffer")?;
4591 let buffer_id = BufferId::new(response.buffer_id)?;
4592 let buffer = buffer_store
4593 .update(&mut cx, |buffer_store, cx| {
4594 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4595 })
4596 .await?;
4597 if let Some(language_registry) = languages {
4598 let git_commit_language =
4599 language_registry.language_for_name("Git Commit").await?;
4600 buffer.update(&mut cx, |buffer, cx| {
4601 buffer.set_language(Some(git_commit_language), cx);
4602 });
4603 }
4604 this.update(&mut cx, |this, _| {
4605 this.commit_message_buffer = Some(buffer.clone());
4606 });
4607 Ok(buffer)
4608 }
4609 }
4610 });
4611
4612 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4613 }
4614
4615 fn open_local_commit_buffer(
4616 language_registry: Option<Arc<LanguageRegistry>>,
4617 buffer_store: Entity<BufferStore>,
4618 cx: &mut Context<Self>,
4619 ) -> Task<Result<Entity<Buffer>>> {
4620 cx.spawn(async move |repository, cx| {
4621 let git_commit_language = match language_registry {
4622 Some(language_registry) => {
4623 Some(language_registry.language_for_name("Git Commit").await?)
4624 }
4625 None => None,
4626 };
4627 let buffer = buffer_store
4628 .update(cx, |buffer_store, cx| {
4629 buffer_store.create_buffer(git_commit_language, false, cx)
4630 })
4631 .await?;
4632
4633 repository.update(cx, |repository, _| {
4634 repository.commit_message_buffer = Some(buffer.clone());
4635 })?;
4636 Ok(buffer)
4637 })
4638 }
4639
4640 pub fn checkout_files(
4641 &mut self,
4642 commit: &str,
4643 paths: Vec<RepoPath>,
4644 cx: &mut Context<Self>,
4645 ) -> Task<Result<()>> {
4646 let commit = commit.to_string();
4647 let id = self.id;
4648
4649 self.spawn_job_with_tracking(
4650 paths.clone(),
4651 pending_op::GitStatus::Reverted,
4652 cx,
4653 async move |this, cx| {
4654 this.update(cx, |this, _cx| {
4655 this.send_job(
4656 Some(format!("git checkout {}", commit).into()),
4657 move |git_repo, _| async move {
4658 match git_repo {
4659 RepositoryState::Local(LocalRepositoryState {
4660 backend,
4661 environment,
4662 ..
4663 }) => {
4664 backend
4665 .checkout_files(commit, paths, environment.clone())
4666 .await
4667 }
4668 RepositoryState::Remote(RemoteRepositoryState {
4669 project_id,
4670 client,
4671 }) => {
4672 client
4673 .request(proto::GitCheckoutFiles {
4674 project_id: project_id.0,
4675 repository_id: id.to_proto(),
4676 commit,
4677 paths: paths
4678 .into_iter()
4679 .map(|p| p.to_proto())
4680 .collect(),
4681 })
4682 .await?;
4683
4684 Ok(())
4685 }
4686 }
4687 },
4688 )
4689 })?
4690 .await?
4691 },
4692 )
4693 }
4694
4695 pub fn reset(
4696 &mut self,
4697 commit: String,
4698 reset_mode: ResetMode,
4699 _cx: &mut App,
4700 ) -> oneshot::Receiver<Result<()>> {
4701 let id = self.id;
4702
4703 self.send_job(None, move |git_repo, _| async move {
4704 match git_repo {
4705 RepositoryState::Local(LocalRepositoryState {
4706 backend,
4707 environment,
4708 ..
4709 }) => backend.reset(commit, reset_mode, environment).await,
4710 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4711 client
4712 .request(proto::GitReset {
4713 project_id: project_id.0,
4714 repository_id: id.to_proto(),
4715 commit,
4716 mode: match reset_mode {
4717 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4718 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4719 },
4720 })
4721 .await?;
4722
4723 Ok(())
4724 }
4725 }
4726 })
4727 }
4728
4729 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4730 let id = self.id;
4731 self.send_job(None, move |git_repo, _cx| async move {
4732 match git_repo {
4733 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4734 backend.show(commit).await
4735 }
4736 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4737 let resp = client
4738 .request(proto::GitShow {
4739 project_id: project_id.0,
4740 repository_id: id.to_proto(),
4741 commit,
4742 })
4743 .await?;
4744
4745 Ok(CommitDetails {
4746 sha: resp.sha.into(),
4747 message: resp.message.into(),
4748 commit_timestamp: resp.commit_timestamp,
4749 author_email: resp.author_email.into(),
4750 author_name: resp.author_name.into(),
4751 })
4752 }
4753 }
4754 })
4755 }
4756
4757 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4758 let id = self.id;
4759 self.send_job(None, move |git_repo, cx| async move {
4760 match git_repo {
4761 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4762 backend.load_commit(commit, cx).await
4763 }
4764 RepositoryState::Remote(RemoteRepositoryState {
4765 client, project_id, ..
4766 }) => {
4767 let response = client
4768 .request(proto::LoadCommitDiff {
4769 project_id: project_id.0,
4770 repository_id: id.to_proto(),
4771 commit,
4772 })
4773 .await?;
4774 Ok(CommitDiff {
4775 files: response
4776 .files
4777 .into_iter()
4778 .map(|file| {
4779 Ok(CommitFile {
4780 path: RepoPath::from_proto(&file.path)?,
4781 old_text: file.old_text,
4782 new_text: file.new_text,
4783 is_binary: file.is_binary,
4784 })
4785 })
4786 .collect::<Result<Vec<_>>>()?,
4787 })
4788 }
4789 }
4790 })
4791 }
4792
4793 pub fn file_history(
4794 &mut self,
4795 path: RepoPath,
4796 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4797 self.file_history_paginated(path, 0, None)
4798 }
4799
4800 pub fn file_history_paginated(
4801 &mut self,
4802 path: RepoPath,
4803 skip: usize,
4804 limit: Option<usize>,
4805 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4806 let id = self.id;
4807 self.send_job(None, move |git_repo, _cx| async move {
4808 match git_repo {
4809 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4810 backend.file_history_paginated(path, skip, limit).await
4811 }
4812 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4813 let response = client
4814 .request(proto::GitFileHistory {
4815 project_id: project_id.0,
4816 repository_id: id.to_proto(),
4817 path: path.to_proto(),
4818 skip: skip as u64,
4819 limit: limit.map(|l| l as u64),
4820 })
4821 .await?;
4822 Ok(git::repository::FileHistory {
4823 entries: response
4824 .entries
4825 .into_iter()
4826 .map(|entry| git::repository::FileHistoryEntry {
4827 sha: entry.sha.into(),
4828 subject: entry.subject.into(),
4829 message: entry.message.into(),
4830 commit_timestamp: entry.commit_timestamp,
4831 author_name: entry.author_name.into(),
4832 author_email: entry.author_email.into(),
4833 })
4834 .collect(),
4835 path: RepoPath::from_proto(&response.path)?,
4836 })
4837 }
4838 }
4839 })
4840 }
4841
4842 pub fn get_graph_data(
4843 &self,
4844 log_source: LogSource,
4845 log_order: LogOrder,
4846 ) -> Option<&InitialGitGraphData> {
4847 self.initial_graph_data.get(&(log_source, log_order))
4848 }
4849
4850 pub fn search_commits(
4851 &mut self,
4852 log_source: LogSource,
4853 search_args: SearchCommitArgs,
4854 request_tx: smol::channel::Sender<Oid>,
4855 cx: &mut Context<Self>,
4856 ) {
4857 let repository_state = self.repository_state.clone();
4858
4859 cx.background_spawn(async move {
4860 let repo_state = repository_state.await;
4861
4862 match repo_state {
4863 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4864 backend
4865 .search_commits(log_source, search_args, request_tx)
4866 .await
4867 .log_err();
4868 }
4869 Ok(RepositoryState::Remote(_)) => {}
4870 Err(_) => {}
4871 };
4872 })
4873 .detach();
4874 }
4875
4876 pub fn graph_data(
4877 &mut self,
4878 log_source: LogSource,
4879 log_order: LogOrder,
4880 range: Range<usize>,
4881 cx: &mut Context<Self>,
4882 ) -> GraphDataResponse<'_> {
4883 let initial_commit_data = self
4884 .initial_graph_data
4885 .entry((log_source.clone(), log_order))
4886 .or_insert_with(|| {
4887 let state = self.repository_state.clone();
4888 let log_source = log_source.clone();
4889
4890 let fetch_task = cx.spawn(async move |repository, cx| {
4891 let state = state.await;
4892 let result = match state {
4893 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4894 Self::local_git_graph_data(
4895 repository.clone(),
4896 backend,
4897 log_source.clone(),
4898 log_order,
4899 cx,
4900 )
4901 .await
4902 }
4903 Ok(RepositoryState::Remote(_)) => {
4904 Err("Git graph is not supported for collab yet".into())
4905 }
4906 Err(e) => Err(SharedString::from(e)),
4907 };
4908
4909 if let Err(fetch_task_error) = result {
4910 repository
4911 .update(cx, |repository, _| {
4912 if let Some(data) = repository
4913 .initial_graph_data
4914 .get_mut(&(log_source, log_order))
4915 {
4916 data.error = Some(fetch_task_error);
4917 } else {
4918 debug_panic!(
4919 "This task would be dropped if this entry doesn't exist"
4920 );
4921 }
4922 })
4923 .ok();
4924 }
4925 });
4926
4927 InitialGitGraphData {
4928 fetch_task,
4929 error: None,
4930 commit_data: Vec::new(),
4931 commit_oid_to_index: HashMap::default(),
4932 }
4933 });
4934
4935 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4936 let max_end = initial_commit_data.commit_data.len();
4937
4938 GraphDataResponse {
4939 commits: &initial_commit_data.commit_data
4940 [range.start.min(max_start)..range.end.min(max_end)],
4941 is_loading: !initial_commit_data.fetch_task.is_ready(),
4942 error: initial_commit_data.error.clone(),
4943 }
4944 }
4945
4946 async fn local_git_graph_data(
4947 this: WeakEntity<Self>,
4948 backend: Arc<dyn GitRepository>,
4949 log_source: LogSource,
4950 log_order: LogOrder,
4951 cx: &mut AsyncApp,
4952 ) -> Result<(), SharedString> {
4953 let (request_tx, request_rx) =
4954 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4955
4956 let task = cx.background_executor().spawn({
4957 let log_source = log_source.clone();
4958 async move {
4959 backend
4960 .initial_graph_data(log_source, log_order, request_tx)
4961 .await
4962 .map_err(|err| SharedString::from(err.to_string()))
4963 }
4964 });
4965
4966 let graph_data_key = (log_source, log_order);
4967
4968 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4969 this.update(cx, |repository, cx| {
4970 let graph_data = repository
4971 .initial_graph_data
4972 .entry(graph_data_key.clone())
4973 .and_modify(|graph_data| {
4974 for commit_data in initial_graph_commit_data {
4975 graph_data
4976 .commit_oid_to_index
4977 .insert(commit_data.sha, graph_data.commit_data.len());
4978 graph_data.commit_data.push(commit_data);
4979 }
4980 cx.emit(RepositoryEvent::GraphEvent(
4981 graph_data_key.clone(),
4982 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4983 ));
4984 });
4985
4986 match &graph_data {
4987 Entry::Occupied(_) => {}
4988 Entry::Vacant(_) => {
4989 debug_panic!("This task should be dropped if data doesn't exist");
4990 }
4991 }
4992 })
4993 .ok();
4994 }
4995
4996 task.await?;
4997 Ok(())
4998 }
4999
5000 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
5001 if !self.commit_data.contains_key(&sha) {
5002 match &self.graph_commit_data_handler {
5003 GraphCommitHandlerState::Open(handler) => {
5004 if handler.commit_data_request.try_send(sha).is_ok() {
5005 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
5006 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
5007 }
5008 }
5009 GraphCommitHandlerState::Closed => {
5010 self.open_graph_commit_data_handler(cx);
5011 }
5012 GraphCommitHandlerState::Starting => {}
5013 }
5014 }
5015
5016 self.commit_data
5017 .get(&sha)
5018 .unwrap_or(&CommitDataState::Loading)
5019 }
5020
5021 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
5022 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
5023
5024 let state = self.repository_state.clone();
5025 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
5026 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
5027
5028 let foreground_task = cx.spawn(async move |this, cx| {
5029 while let Ok((sha, commit_data)) = result_rx.recv().await {
5030 let result = this.update(cx, |this, cx| {
5031 let old_value = this
5032 .commit_data
5033 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
5034 debug_assert!(
5035 !matches!(old_value, Some(CommitDataState::Loaded(_))),
5036 "We should never overwrite commit data"
5037 );
5038
5039 cx.notify();
5040 });
5041 if result.is_err() {
5042 break;
5043 }
5044 }
5045
5046 this.update(cx, |this, _cx| {
5047 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
5048 })
5049 .ok();
5050 });
5051
5052 let request_tx_for_handler = request_tx;
5053 let background_executor = cx.background_executor().clone();
5054
5055 cx.background_spawn(async move {
5056 let backend = match state.await {
5057 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
5058 Ok(RepositoryState::Remote(_)) => {
5059 log::error!("commit_data_reader not supported for remote repositories");
5060 return;
5061 }
5062 Err(error) => {
5063 log::error!("failed to get repository state: {error}");
5064 return;
5065 }
5066 };
5067
5068 let reader = match backend.commit_data_reader() {
5069 Ok(reader) => reader,
5070 Err(error) => {
5071 log::error!("failed to create commit data reader: {error:?}");
5072 return;
5073 }
5074 };
5075
5076 loop {
5077 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
5078
5079 futures::select_biased! {
5080 sha = futures::FutureExt::fuse(request_rx.recv()) => {
5081 let Ok(sha) = sha else {
5082 break;
5083 };
5084
5085 match reader.read(sha).await {
5086 Ok(commit_data) => {
5087 if result_tx.send((sha, commit_data)).await.is_err() {
5088 break;
5089 }
5090 }
5091 Err(error) => {
5092 log::error!("failed to read commit data for {sha}: {error:?}");
5093 }
5094 }
5095 }
5096 _ = futures::FutureExt::fuse(timeout) => {
5097 break;
5098 }
5099 }
5100 }
5101
5102 drop(result_tx);
5103 })
5104 .detach();
5105
5106 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
5107 _task: foreground_task,
5108 commit_data_request: request_tx_for_handler,
5109 });
5110 }
5111
5112 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
5113 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
5114 }
5115
5116 fn save_buffers<'a>(
5117 &self,
5118 entries: impl IntoIterator<Item = &'a RepoPath>,
5119 cx: &mut Context<Self>,
5120 ) -> Vec<Task<anyhow::Result<()>>> {
5121 let mut save_futures = Vec::new();
5122 if let Some(buffer_store) = self.buffer_store(cx) {
5123 buffer_store.update(cx, |buffer_store, cx| {
5124 for path in entries {
5125 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5126 continue;
5127 };
5128 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5129 && buffer
5130 .read(cx)
5131 .file()
5132 .is_some_and(|file| file.disk_state().exists())
5133 && buffer.read(cx).has_unsaved_edits()
5134 {
5135 save_futures.push(buffer_store.save_buffer(buffer, cx));
5136 }
5137 }
5138 })
5139 }
5140 save_futures
5141 }
5142
5143 pub fn stage_entries(
5144 &mut self,
5145 entries: Vec<RepoPath>,
5146 cx: &mut Context<Self>,
5147 ) -> Task<anyhow::Result<()>> {
5148 self.stage_or_unstage_entries(true, entries, cx)
5149 }
5150
5151 pub fn unstage_entries(
5152 &mut self,
5153 entries: Vec<RepoPath>,
5154 cx: &mut Context<Self>,
5155 ) -> Task<anyhow::Result<()>> {
5156 self.stage_or_unstage_entries(false, entries, cx)
5157 }
5158
5159 fn stage_or_unstage_entries(
5160 &mut self,
5161 stage: bool,
5162 entries: Vec<RepoPath>,
5163 cx: &mut Context<Self>,
5164 ) -> Task<anyhow::Result<()>> {
5165 if entries.is_empty() {
5166 return Task::ready(Ok(()));
5167 }
5168 let Some(git_store) = self.git_store.upgrade() else {
5169 return Task::ready(Ok(()));
5170 };
5171 let id = self.id;
5172 let save_tasks = self.save_buffers(&entries, cx);
5173 let paths = entries
5174 .iter()
5175 .map(|p| p.as_unix_str())
5176 .collect::<Vec<_>>()
5177 .join(" ");
5178 let status = if stage {
5179 format!("git add {paths}")
5180 } else {
5181 format!("git reset {paths}")
5182 };
5183 let job_key = GitJobKey::WriteIndex(entries.clone());
5184
5185 self.spawn_job_with_tracking(
5186 entries.clone(),
5187 if stage {
5188 pending_op::GitStatus::Staged
5189 } else {
5190 pending_op::GitStatus::Unstaged
5191 },
5192 cx,
5193 async move |this, cx| {
5194 for save_task in save_tasks {
5195 save_task.await?;
5196 }
5197
5198 this.update(cx, |this, cx| {
5199 let weak_this = cx.weak_entity();
5200 this.send_keyed_job(
5201 Some(job_key),
5202 Some(status.into()),
5203 move |git_repo, mut cx| async move {
5204 let hunk_staging_operation_counts = weak_this
5205 .update(&mut cx, |this, cx| {
5206 let mut hunk_staging_operation_counts = HashMap::default();
5207 for path in &entries {
5208 let Some(project_path) =
5209 this.repo_path_to_project_path(path, cx)
5210 else {
5211 continue;
5212 };
5213 let Some(buffer) = git_store
5214 .read(cx)
5215 .buffer_store
5216 .read(cx)
5217 .get_by_path(&project_path)
5218 else {
5219 continue;
5220 };
5221 let Some(diff_state) = git_store
5222 .read(cx)
5223 .diffs
5224 .get(&buffer.read(cx).remote_id())
5225 .cloned()
5226 else {
5227 continue;
5228 };
5229 let Some(uncommitted_diff) =
5230 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5231 |uncommitted_diff| uncommitted_diff.upgrade(),
5232 )
5233 else {
5234 continue;
5235 };
5236 let buffer_snapshot = buffer.read(cx).text_snapshot();
5237 let file_exists = buffer
5238 .read(cx)
5239 .file()
5240 .is_some_and(|file| file.disk_state().exists());
5241 let hunk_staging_operation_count =
5242 diff_state.update(cx, |diff_state, cx| {
5243 uncommitted_diff.update(
5244 cx,
5245 |uncommitted_diff, cx| {
5246 uncommitted_diff
5247 .stage_or_unstage_all_hunks(
5248 stage,
5249 &buffer_snapshot,
5250 file_exists,
5251 cx,
5252 );
5253 },
5254 );
5255
5256 diff_state.hunk_staging_operation_count += 1;
5257 diff_state.hunk_staging_operation_count
5258 });
5259 hunk_staging_operation_counts.insert(
5260 diff_state.downgrade(),
5261 hunk_staging_operation_count,
5262 );
5263 }
5264 hunk_staging_operation_counts
5265 })
5266 .unwrap_or_default();
5267
5268 let result = match git_repo {
5269 RepositoryState::Local(LocalRepositoryState {
5270 backend,
5271 environment,
5272 ..
5273 }) => {
5274 if stage {
5275 backend.stage_paths(entries, environment.clone()).await
5276 } else {
5277 backend.unstage_paths(entries, environment.clone()).await
5278 }
5279 }
5280 RepositoryState::Remote(RemoteRepositoryState {
5281 project_id,
5282 client,
5283 }) => {
5284 if stage {
5285 client
5286 .request(proto::Stage {
5287 project_id: project_id.0,
5288 repository_id: id.to_proto(),
5289 paths: entries
5290 .into_iter()
5291 .map(|repo_path| repo_path.to_proto())
5292 .collect(),
5293 })
5294 .await
5295 .context("sending stage request")
5296 .map(|_| ())
5297 } else {
5298 client
5299 .request(proto::Unstage {
5300 project_id: project_id.0,
5301 repository_id: id.to_proto(),
5302 paths: entries
5303 .into_iter()
5304 .map(|repo_path| repo_path.to_proto())
5305 .collect(),
5306 })
5307 .await
5308 .context("sending unstage request")
5309 .map(|_| ())
5310 }
5311 }
5312 };
5313
5314 for (diff_state, hunk_staging_operation_count) in
5315 hunk_staging_operation_counts
5316 {
5317 diff_state
5318 .update(&mut cx, |diff_state, cx| {
5319 if result.is_ok() {
5320 diff_state.hunk_staging_operation_count_as_of_write =
5321 hunk_staging_operation_count;
5322 } else if let Some(uncommitted_diff) =
5323 &diff_state.uncommitted_diff
5324 {
5325 uncommitted_diff
5326 .update(cx, |uncommitted_diff, cx| {
5327 uncommitted_diff.clear_pending_hunks(cx);
5328 })
5329 .ok();
5330 }
5331 })
5332 .ok();
5333 }
5334
5335 result
5336 },
5337 )
5338 })?
5339 .await?
5340 },
5341 )
5342 }
5343
5344 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5345 let snapshot = self.snapshot.clone();
5346 let pending_ops = self.pending_ops.clone();
5347 let to_stage = cx.background_spawn(async move {
5348 snapshot
5349 .status()
5350 .filter_map(|entry| {
5351 if let Some(ops) =
5352 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5353 {
5354 if ops.staging() || ops.staged() {
5355 None
5356 } else {
5357 Some(entry.repo_path)
5358 }
5359 } else if entry.status.staging().is_fully_staged() {
5360 None
5361 } else {
5362 Some(entry.repo_path)
5363 }
5364 })
5365 .collect()
5366 });
5367
5368 cx.spawn(async move |this, cx| {
5369 let to_stage = to_stage.await;
5370 this.update(cx, |this, cx| {
5371 this.stage_or_unstage_entries(true, to_stage, cx)
5372 })?
5373 .await
5374 })
5375 }
5376
5377 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5378 let snapshot = self.snapshot.clone();
5379 let pending_ops = self.pending_ops.clone();
5380 let to_unstage = cx.background_spawn(async move {
5381 snapshot
5382 .status()
5383 .filter_map(|entry| {
5384 if let Some(ops) =
5385 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5386 {
5387 if !ops.staging() && !ops.staged() {
5388 None
5389 } else {
5390 Some(entry.repo_path)
5391 }
5392 } else if entry.status.staging().is_fully_unstaged() {
5393 None
5394 } else {
5395 Some(entry.repo_path)
5396 }
5397 })
5398 .collect()
5399 });
5400
5401 cx.spawn(async move |this, cx| {
5402 let to_unstage = to_unstage.await;
5403 this.update(cx, |this, cx| {
5404 this.stage_or_unstage_entries(false, to_unstage, cx)
5405 })?
5406 .await
5407 })
5408 }
5409
5410 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5411 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5412
5413 self.stash_entries(to_stash, cx)
5414 }
5415
5416 pub fn stash_entries(
5417 &mut self,
5418 entries: Vec<RepoPath>,
5419 cx: &mut Context<Self>,
5420 ) -> Task<anyhow::Result<()>> {
5421 let id = self.id;
5422
5423 cx.spawn(async move |this, cx| {
5424 this.update(cx, |this, _| {
5425 this.send_job(None, move |git_repo, _cx| async move {
5426 match git_repo {
5427 RepositoryState::Local(LocalRepositoryState {
5428 backend,
5429 environment,
5430 ..
5431 }) => backend.stash_paths(entries, environment).await,
5432 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5433 client
5434 .request(proto::Stash {
5435 project_id: project_id.0,
5436 repository_id: id.to_proto(),
5437 paths: entries
5438 .into_iter()
5439 .map(|repo_path| repo_path.to_proto())
5440 .collect(),
5441 })
5442 .await?;
5443 Ok(())
5444 }
5445 }
5446 })
5447 })?
5448 .await??;
5449 Ok(())
5450 })
5451 }
5452
5453 pub fn stash_pop(
5454 &mut self,
5455 index: Option<usize>,
5456 cx: &mut Context<Self>,
5457 ) -> Task<anyhow::Result<()>> {
5458 let id = self.id;
5459 cx.spawn(async move |this, cx| {
5460 this.update(cx, |this, _| {
5461 this.send_job(None, move |git_repo, _cx| async move {
5462 match git_repo {
5463 RepositoryState::Local(LocalRepositoryState {
5464 backend,
5465 environment,
5466 ..
5467 }) => backend.stash_pop(index, environment).await,
5468 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5469 client
5470 .request(proto::StashPop {
5471 project_id: project_id.0,
5472 repository_id: id.to_proto(),
5473 stash_index: index.map(|i| i as u64),
5474 })
5475 .await
5476 .context("sending stash pop request")?;
5477 Ok(())
5478 }
5479 }
5480 })
5481 })?
5482 .await??;
5483 Ok(())
5484 })
5485 }
5486
5487 pub fn stash_apply(
5488 &mut self,
5489 index: Option<usize>,
5490 cx: &mut Context<Self>,
5491 ) -> Task<anyhow::Result<()>> {
5492 let id = self.id;
5493 cx.spawn(async move |this, cx| {
5494 this.update(cx, |this, _| {
5495 this.send_job(None, move |git_repo, _cx| async move {
5496 match git_repo {
5497 RepositoryState::Local(LocalRepositoryState {
5498 backend,
5499 environment,
5500 ..
5501 }) => backend.stash_apply(index, environment).await,
5502 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5503 client
5504 .request(proto::StashApply {
5505 project_id: project_id.0,
5506 repository_id: id.to_proto(),
5507 stash_index: index.map(|i| i as u64),
5508 })
5509 .await
5510 .context("sending stash apply request")?;
5511 Ok(())
5512 }
5513 }
5514 })
5515 })?
5516 .await??;
5517 Ok(())
5518 })
5519 }
5520
5521 pub fn stash_drop(
5522 &mut self,
5523 index: Option<usize>,
5524 cx: &mut Context<Self>,
5525 ) -> oneshot::Receiver<anyhow::Result<()>> {
5526 let id = self.id;
5527 let updates_tx = self
5528 .git_store()
5529 .and_then(|git_store| match &git_store.read(cx).state {
5530 GitStoreState::Local { downstream, .. } => downstream
5531 .as_ref()
5532 .map(|downstream| downstream.updates_tx.clone()),
5533 _ => None,
5534 });
5535 let this = cx.weak_entity();
5536 self.send_job(None, move |git_repo, mut cx| async move {
5537 match git_repo {
5538 RepositoryState::Local(LocalRepositoryState {
5539 backend,
5540 environment,
5541 ..
5542 }) => {
5543 // TODO would be nice to not have to do this manually
5544 let result = backend.stash_drop(index, environment).await;
5545 if result.is_ok()
5546 && let Ok(stash_entries) = backend.stash_entries().await
5547 {
5548 let snapshot = this.update(&mut cx, |this, cx| {
5549 this.snapshot.stash_entries = stash_entries;
5550 cx.emit(RepositoryEvent::StashEntriesChanged);
5551 this.snapshot.clone()
5552 })?;
5553 if let Some(updates_tx) = updates_tx {
5554 updates_tx
5555 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5556 .ok();
5557 }
5558 }
5559
5560 result
5561 }
5562 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5563 client
5564 .request(proto::StashDrop {
5565 project_id: project_id.0,
5566 repository_id: id.to_proto(),
5567 stash_index: index.map(|i| i as u64),
5568 })
5569 .await
5570 .context("sending stash pop request")?;
5571 Ok(())
5572 }
5573 }
5574 })
5575 }
5576
5577 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5578 let id = self.id;
5579 self.send_job(
5580 Some(format!("git hook {}", hook.as_str()).into()),
5581 move |git_repo, _cx| async move {
5582 match git_repo {
5583 RepositoryState::Local(LocalRepositoryState {
5584 backend,
5585 environment,
5586 ..
5587 }) => backend.run_hook(hook, environment.clone()).await,
5588 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5589 client
5590 .request(proto::RunGitHook {
5591 project_id: project_id.0,
5592 repository_id: id.to_proto(),
5593 hook: hook.to_proto(),
5594 })
5595 .await?;
5596
5597 Ok(())
5598 }
5599 }
5600 },
5601 )
5602 }
5603
5604 pub fn commit(
5605 &mut self,
5606 message: SharedString,
5607 name_and_email: Option<(SharedString, SharedString)>,
5608 options: CommitOptions,
5609 askpass: AskPassDelegate,
5610 cx: &mut App,
5611 ) -> oneshot::Receiver<Result<()>> {
5612 let id = self.id;
5613 let askpass_delegates = self.askpass_delegates.clone();
5614 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5615
5616 let rx = self.run_hook(RunHook::PreCommit, cx);
5617
5618 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5619 rx.await??;
5620
5621 match git_repo {
5622 RepositoryState::Local(LocalRepositoryState {
5623 backend,
5624 environment,
5625 ..
5626 }) => {
5627 backend
5628 .commit(message, name_and_email, options, askpass, environment)
5629 .await
5630 }
5631 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5632 askpass_delegates.lock().insert(askpass_id, askpass);
5633 let _defer = util::defer(|| {
5634 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5635 debug_assert!(askpass_delegate.is_some());
5636 });
5637 let (name, email) = name_and_email.unzip();
5638 client
5639 .request(proto::Commit {
5640 project_id: project_id.0,
5641 repository_id: id.to_proto(),
5642 message: String::from(message),
5643 name: name.map(String::from),
5644 email: email.map(String::from),
5645 options: Some(proto::commit::CommitOptions {
5646 amend: options.amend,
5647 signoff: options.signoff,
5648 allow_empty: options.allow_empty,
5649 }),
5650 askpass_id,
5651 })
5652 .await?;
5653
5654 Ok(())
5655 }
5656 }
5657 })
5658 }
5659
5660 pub fn fetch(
5661 &mut self,
5662 fetch_options: FetchOptions,
5663 askpass: AskPassDelegate,
5664 _cx: &mut App,
5665 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5666 let askpass_delegates = self.askpass_delegates.clone();
5667 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5668 let id = self.id;
5669
5670 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5671 match git_repo {
5672 RepositoryState::Local(LocalRepositoryState {
5673 backend,
5674 environment,
5675 ..
5676 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5677 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5678 askpass_delegates.lock().insert(askpass_id, askpass);
5679 let _defer = util::defer(|| {
5680 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5681 debug_assert!(askpass_delegate.is_some());
5682 });
5683
5684 let response = client
5685 .request(proto::Fetch {
5686 project_id: project_id.0,
5687 repository_id: id.to_proto(),
5688 askpass_id,
5689 remote: fetch_options.to_proto(),
5690 })
5691 .await?;
5692
5693 Ok(RemoteCommandOutput {
5694 stdout: response.stdout,
5695 stderr: response.stderr,
5696 })
5697 }
5698 }
5699 })
5700 }
5701
5702 pub fn push(
5703 &mut self,
5704 branch: SharedString,
5705 remote_branch: SharedString,
5706 remote: SharedString,
5707 options: Option<PushOptions>,
5708 askpass: AskPassDelegate,
5709 cx: &mut Context<Self>,
5710 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5711 let askpass_delegates = self.askpass_delegates.clone();
5712 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5713 let id = self.id;
5714
5715 let args = options
5716 .map(|option| match option {
5717 PushOptions::SetUpstream => " --set-upstream",
5718 PushOptions::Force => " --force-with-lease",
5719 })
5720 .unwrap_or("");
5721
5722 let updates_tx = self
5723 .git_store()
5724 .and_then(|git_store| match &git_store.read(cx).state {
5725 GitStoreState::Local { downstream, .. } => downstream
5726 .as_ref()
5727 .map(|downstream| downstream.updates_tx.clone()),
5728 _ => None,
5729 });
5730
5731 let this = cx.weak_entity();
5732 self.send_job(
5733 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5734 move |git_repo, mut cx| async move {
5735 match git_repo {
5736 RepositoryState::Local(LocalRepositoryState {
5737 backend,
5738 environment,
5739 ..
5740 }) => {
5741 let result = backend
5742 .push(
5743 branch.to_string(),
5744 remote_branch.to_string(),
5745 remote.to_string(),
5746 options,
5747 askpass,
5748 environment.clone(),
5749 cx.clone(),
5750 )
5751 .await;
5752 // TODO would be nice to not have to do this manually
5753 if result.is_ok() {
5754 let branches = backend.branches().await?;
5755 let branch = branches.into_iter().find(|branch| branch.is_head);
5756 log::info!("head branch after scan is {branch:?}");
5757 let snapshot = this.update(&mut cx, |this, cx| {
5758 this.snapshot.branch = branch;
5759 cx.emit(RepositoryEvent::HeadChanged);
5760 this.snapshot.clone()
5761 })?;
5762 if let Some(updates_tx) = updates_tx {
5763 updates_tx
5764 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5765 .ok();
5766 }
5767 }
5768 result
5769 }
5770 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5771 askpass_delegates.lock().insert(askpass_id, askpass);
5772 let _defer = util::defer(|| {
5773 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5774 debug_assert!(askpass_delegate.is_some());
5775 });
5776 let response = client
5777 .request(proto::Push {
5778 project_id: project_id.0,
5779 repository_id: id.to_proto(),
5780 askpass_id,
5781 branch_name: branch.to_string(),
5782 remote_branch_name: remote_branch.to_string(),
5783 remote_name: remote.to_string(),
5784 options: options.map(|options| match options {
5785 PushOptions::Force => proto::push::PushOptions::Force,
5786 PushOptions::SetUpstream => {
5787 proto::push::PushOptions::SetUpstream
5788 }
5789 }
5790 as i32),
5791 })
5792 .await?;
5793
5794 Ok(RemoteCommandOutput {
5795 stdout: response.stdout,
5796 stderr: response.stderr,
5797 })
5798 }
5799 }
5800 },
5801 )
5802 }
5803
5804 pub fn pull(
5805 &mut self,
5806 branch: Option<SharedString>,
5807 remote: SharedString,
5808 rebase: bool,
5809 askpass: AskPassDelegate,
5810 _cx: &mut App,
5811 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5812 let askpass_delegates = self.askpass_delegates.clone();
5813 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5814 let id = self.id;
5815
5816 let mut status = "git pull".to_string();
5817 if rebase {
5818 status.push_str(" --rebase");
5819 }
5820 status.push_str(&format!(" {}", remote));
5821 if let Some(b) = &branch {
5822 status.push_str(&format!(" {}", b));
5823 }
5824
5825 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5826 match git_repo {
5827 RepositoryState::Local(LocalRepositoryState {
5828 backend,
5829 environment,
5830 ..
5831 }) => {
5832 backend
5833 .pull(
5834 branch.as_ref().map(|b| b.to_string()),
5835 remote.to_string(),
5836 rebase,
5837 askpass,
5838 environment.clone(),
5839 cx,
5840 )
5841 .await
5842 }
5843 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5844 askpass_delegates.lock().insert(askpass_id, askpass);
5845 let _defer = util::defer(|| {
5846 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5847 debug_assert!(askpass_delegate.is_some());
5848 });
5849 let response = client
5850 .request(proto::Pull {
5851 project_id: project_id.0,
5852 repository_id: id.to_proto(),
5853 askpass_id,
5854 rebase,
5855 branch_name: branch.as_ref().map(|b| b.to_string()),
5856 remote_name: remote.to_string(),
5857 })
5858 .await?;
5859
5860 Ok(RemoteCommandOutput {
5861 stdout: response.stdout,
5862 stderr: response.stderr,
5863 })
5864 }
5865 }
5866 })
5867 }
5868
5869 fn spawn_set_index_text_job(
5870 &mut self,
5871 path: RepoPath,
5872 content: Option<String>,
5873 hunk_staging_operation_count: Option<usize>,
5874 cx: &mut Context<Self>,
5875 ) -> oneshot::Receiver<anyhow::Result<()>> {
5876 let id = self.id;
5877 let this = cx.weak_entity();
5878 let git_store = self.git_store.clone();
5879 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5880 self.send_keyed_job(
5881 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5882 None,
5883 move |git_repo, mut cx| async move {
5884 log::debug!(
5885 "start updating index text for buffer {}",
5886 path.as_unix_str()
5887 );
5888
5889 match git_repo {
5890 RepositoryState::Local(LocalRepositoryState {
5891 fs,
5892 backend,
5893 environment,
5894 ..
5895 }) => {
5896 let executable = match fs.metadata(&abs_path).await {
5897 Ok(Some(meta)) => meta.is_executable,
5898 Ok(None) => false,
5899 Err(_err) => false,
5900 };
5901 backend
5902 .set_index_text(path.clone(), content, environment.clone(), executable)
5903 .await?;
5904 }
5905 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5906 client
5907 .request(proto::SetIndexText {
5908 project_id: project_id.0,
5909 repository_id: id.to_proto(),
5910 path: path.to_proto(),
5911 text: content,
5912 })
5913 .await?;
5914 }
5915 }
5916 log::debug!(
5917 "finish updating index text for buffer {}",
5918 path.as_unix_str()
5919 );
5920
5921 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5922 let project_path = this
5923 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5924 .ok()
5925 .flatten();
5926 git_store
5927 .update(&mut cx, |git_store, cx| {
5928 let buffer_id = git_store
5929 .buffer_store
5930 .read(cx)
5931 .get_by_path(&project_path?)?
5932 .read(cx)
5933 .remote_id();
5934 let diff_state = git_store.diffs.get(&buffer_id)?;
5935 diff_state.update(cx, |diff_state, _| {
5936 diff_state.hunk_staging_operation_count_as_of_write =
5937 hunk_staging_operation_count;
5938 });
5939 Some(())
5940 })
5941 .context("Git store dropped")?;
5942 }
5943 Ok(())
5944 },
5945 )
5946 }
5947
5948 pub fn create_remote(
5949 &mut self,
5950 remote_name: String,
5951 remote_url: String,
5952 ) -> oneshot::Receiver<Result<()>> {
5953 let id = self.id;
5954 self.send_job(
5955 Some(format!("git remote add {remote_name} {remote_url}").into()),
5956 move |repo, _cx| async move {
5957 match repo {
5958 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5959 backend.create_remote(remote_name, remote_url).await
5960 }
5961 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5962 client
5963 .request(proto::GitCreateRemote {
5964 project_id: project_id.0,
5965 repository_id: id.to_proto(),
5966 remote_name,
5967 remote_url,
5968 })
5969 .await?;
5970
5971 Ok(())
5972 }
5973 }
5974 },
5975 )
5976 }
5977
5978 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5979 let id = self.id;
5980 self.send_job(
5981 Some(format!("git remove remote {remote_name}").into()),
5982 move |repo, _cx| async move {
5983 match repo {
5984 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5985 backend.remove_remote(remote_name).await
5986 }
5987 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5988 client
5989 .request(proto::GitRemoveRemote {
5990 project_id: project_id.0,
5991 repository_id: id.to_proto(),
5992 remote_name,
5993 })
5994 .await?;
5995
5996 Ok(())
5997 }
5998 }
5999 },
6000 )
6001 }
6002
6003 pub fn get_remotes(
6004 &mut self,
6005 branch_name: Option<String>,
6006 is_push: bool,
6007 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
6008 let id = self.id;
6009 self.send_job(None, move |repo, _cx| async move {
6010 match repo {
6011 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6012 let remote = if let Some(branch_name) = branch_name {
6013 if is_push {
6014 backend.get_push_remote(branch_name).await?
6015 } else {
6016 backend.get_branch_remote(branch_name).await?
6017 }
6018 } else {
6019 None
6020 };
6021
6022 match remote {
6023 Some(remote) => Ok(vec![remote]),
6024 None => backend.get_all_remotes().await,
6025 }
6026 }
6027 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6028 let response = client
6029 .request(proto::GetRemotes {
6030 project_id: project_id.0,
6031 repository_id: id.to_proto(),
6032 branch_name,
6033 is_push,
6034 })
6035 .await?;
6036
6037 let remotes = response
6038 .remotes
6039 .into_iter()
6040 .map(|remotes| Remote {
6041 name: remotes.name.into(),
6042 })
6043 .collect();
6044
6045 Ok(remotes)
6046 }
6047 }
6048 })
6049 }
6050
6051 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
6052 let id = self.id;
6053 self.send_job(None, move |repo, _| async move {
6054 match repo {
6055 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6056 backend.branches().await
6057 }
6058 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6059 let response = client
6060 .request(proto::GitGetBranches {
6061 project_id: project_id.0,
6062 repository_id: id.to_proto(),
6063 })
6064 .await?;
6065
6066 let branches = response
6067 .branches
6068 .into_iter()
6069 .map(|branch| proto_to_branch(&branch))
6070 .collect();
6071
6072 Ok(branches)
6073 }
6074 }
6075 })
6076 }
6077
6078 /// If this is a linked worktree (*NOT* the main checkout of a repository),
6079 /// returns the pathed for the linked worktree.
6080 ///
6081 /// Returns None if this is the main checkout.
6082 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
6083 if self.work_directory_abs_path != self.original_repo_abs_path {
6084 Some(&self.work_directory_abs_path)
6085 } else {
6086 None
6087 }
6088 }
6089
6090 pub fn path_for_new_linked_worktree(
6091 &self,
6092 branch_name: &str,
6093 worktree_directory_setting: &str,
6094 ) -> Result<PathBuf> {
6095 let original_repo = self.original_repo_abs_path.clone();
6096 let project_name = original_repo
6097 .file_name()
6098 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
6099 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
6100 Ok(directory.join(branch_name).join(project_name))
6101 }
6102
6103 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
6104 let id = self.id;
6105 self.send_job(None, move |repo, _| async move {
6106 match repo {
6107 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6108 backend.worktrees().await
6109 }
6110 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6111 let response = client
6112 .request(proto::GitGetWorktrees {
6113 project_id: project_id.0,
6114 repository_id: id.to_proto(),
6115 })
6116 .await?;
6117
6118 let worktrees = response
6119 .worktrees
6120 .into_iter()
6121 .map(|worktree| proto_to_worktree(&worktree))
6122 .collect();
6123
6124 Ok(worktrees)
6125 }
6126 }
6127 })
6128 }
6129
6130 pub fn create_worktree(
6131 &mut self,
6132 target: CreateWorktreeTarget,
6133 path: PathBuf,
6134 ) -> oneshot::Receiver<Result<()>> {
6135 let id = self.id;
6136 let job_description = match target.branch_name() {
6137 Some(branch_name) => format!("git worktree add: {branch_name}"),
6138 None => "git worktree add (detached)".to_string(),
6139 };
6140 self.send_job(Some(job_description.into()), move |repo, _cx| async move {
6141 match repo {
6142 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6143 backend.create_worktree(target, path).await
6144 }
6145 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6146 let (name, commit, use_existing_branch) = match target {
6147 CreateWorktreeTarget::ExistingBranch { branch_name } => {
6148 (Some(branch_name), None, true)
6149 }
6150 CreateWorktreeTarget::NewBranch {
6151 branch_name,
6152 base_sha,
6153 } => (Some(branch_name), base_sha, false),
6154 CreateWorktreeTarget::Detached { base_sha } => (None, base_sha, false),
6155 };
6156
6157 client
6158 .request(proto::GitCreateWorktree {
6159 project_id: project_id.0,
6160 repository_id: id.to_proto(),
6161 name: name.unwrap_or_default(),
6162 directory: path.to_string_lossy().to_string(),
6163 commit,
6164 use_existing_branch,
6165 })
6166 .await?;
6167
6168 Ok(())
6169 }
6170 }
6171 })
6172 }
6173
6174 pub fn create_worktree_detached(
6175 &mut self,
6176 path: PathBuf,
6177 commit: String,
6178 ) -> oneshot::Receiver<Result<()>> {
6179 self.create_worktree(
6180 CreateWorktreeTarget::Detached {
6181 base_sha: Some(commit),
6182 },
6183 path,
6184 )
6185 }
6186
6187 pub fn checkout_branch_in_worktree(
6188 &mut self,
6189 branch_name: String,
6190 worktree_path: PathBuf,
6191 create: bool,
6192 ) -> oneshot::Receiver<Result<()>> {
6193 let description = if create {
6194 format!("git checkout -b {branch_name}")
6195 } else {
6196 format!("git checkout {branch_name}")
6197 };
6198 self.send_job(Some(description.into()), move |repo, _cx| async move {
6199 match repo {
6200 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6201 backend
6202 .checkout_branch_in_worktree(branch_name, worktree_path, create)
6203 .await
6204 }
6205 RepositoryState::Remote(_) => {
6206 log::warn!("checkout_branch_in_worktree not supported for remote repositories");
6207 Ok(())
6208 }
6209 }
6210 })
6211 }
6212
6213 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6214 let id = self.id;
6215 self.send_job(None, move |repo, _cx| async move {
6216 match repo {
6217 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6218 Ok(backend.head_sha().await)
6219 }
6220 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6221 let response = client
6222 .request(proto::GitGetHeadSha {
6223 project_id: project_id.0,
6224 repository_id: id.to_proto(),
6225 })
6226 .await?;
6227
6228 Ok(response.sha)
6229 }
6230 }
6231 })
6232 }
6233
6234 fn edit_ref(
6235 &mut self,
6236 ref_name: String,
6237 commit: Option<String>,
6238 ) -> oneshot::Receiver<Result<()>> {
6239 let id = self.id;
6240 self.send_job(None, move |repo, _cx| async move {
6241 match repo {
6242 RepositoryState::Local(LocalRepositoryState { backend, .. }) => match commit {
6243 Some(commit) => backend.update_ref(ref_name, commit).await,
6244 None => backend.delete_ref(ref_name).await,
6245 },
6246 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6247 let action = match commit {
6248 Some(sha) => proto::git_edit_ref::Action::UpdateToCommit(sha),
6249 None => {
6250 proto::git_edit_ref::Action::Delete(proto::git_edit_ref::DeleteRef {})
6251 }
6252 };
6253 client
6254 .request(proto::GitEditRef {
6255 project_id: project_id.0,
6256 repository_id: id.to_proto(),
6257 ref_name,
6258 action: Some(action),
6259 })
6260 .await?;
6261 Ok(())
6262 }
6263 }
6264 })
6265 }
6266
6267 pub fn update_ref(
6268 &mut self,
6269 ref_name: String,
6270 commit: String,
6271 ) -> oneshot::Receiver<Result<()>> {
6272 self.edit_ref(ref_name, Some(commit))
6273 }
6274
6275 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6276 self.edit_ref(ref_name, None)
6277 }
6278
6279 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6280 let id = self.id;
6281 self.send_job(None, move |repo, _cx| async move {
6282 match repo {
6283 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6284 backend.repair_worktrees().await
6285 }
6286 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6287 client
6288 .request(proto::GitRepairWorktrees {
6289 project_id: project_id.0,
6290 repository_id: id.to_proto(),
6291 })
6292 .await?;
6293 Ok(())
6294 }
6295 }
6296 })
6297 }
6298
6299 pub fn create_archive_checkpoint(&mut self) -> oneshot::Receiver<Result<(String, String)>> {
6300 let id = self.id;
6301 self.send_job(None, move |repo, _cx| async move {
6302 match repo {
6303 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6304 backend.create_archive_checkpoint().await
6305 }
6306 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6307 let response = client
6308 .request(proto::GitCreateArchiveCheckpoint {
6309 project_id: project_id.0,
6310 repository_id: id.to_proto(),
6311 })
6312 .await?;
6313 Ok((response.staged_commit_sha, response.unstaged_commit_sha))
6314 }
6315 }
6316 })
6317 }
6318
6319 pub fn restore_archive_checkpoint(
6320 &mut self,
6321 staged_sha: String,
6322 unstaged_sha: String,
6323 ) -> oneshot::Receiver<Result<()>> {
6324 let id = self.id;
6325 self.send_job(None, move |repo, _cx| async move {
6326 match repo {
6327 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6328 backend
6329 .restore_archive_checkpoint(staged_sha, unstaged_sha)
6330 .await
6331 }
6332 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6333 client
6334 .request(proto::GitRestoreArchiveCheckpoint {
6335 project_id: project_id.0,
6336 repository_id: id.to_proto(),
6337 staged_commit_sha: staged_sha,
6338 unstaged_commit_sha: unstaged_sha,
6339 })
6340 .await?;
6341 Ok(())
6342 }
6343 }
6344 })
6345 }
6346
6347 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6348 let id = self.id;
6349 self.send_job(
6350 Some(format!("git worktree remove: {}", path.display()).into()),
6351 move |repo, _cx| async move {
6352 match repo {
6353 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6354 backend.remove_worktree(path, force).await
6355 }
6356 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6357 client
6358 .request(proto::GitRemoveWorktree {
6359 project_id: project_id.0,
6360 repository_id: id.to_proto(),
6361 path: path.to_string_lossy().to_string(),
6362 force,
6363 })
6364 .await?;
6365
6366 Ok(())
6367 }
6368 }
6369 },
6370 )
6371 }
6372
6373 pub fn rename_worktree(
6374 &mut self,
6375 old_path: PathBuf,
6376 new_path: PathBuf,
6377 ) -> oneshot::Receiver<Result<()>> {
6378 let id = self.id;
6379 self.send_job(
6380 Some(format!("git worktree move: {}", old_path.display()).into()),
6381 move |repo, _cx| async move {
6382 match repo {
6383 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6384 backend.rename_worktree(old_path, new_path).await
6385 }
6386 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6387 client
6388 .request(proto::GitRenameWorktree {
6389 project_id: project_id.0,
6390 repository_id: id.to_proto(),
6391 old_path: old_path.to_string_lossy().to_string(),
6392 new_path: new_path.to_string_lossy().to_string(),
6393 })
6394 .await?;
6395
6396 Ok(())
6397 }
6398 }
6399 },
6400 )
6401 }
6402
6403 pub fn default_branch(
6404 &mut self,
6405 include_remote_name: bool,
6406 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6407 let id = self.id;
6408 self.send_job(None, move |repo, _| async move {
6409 match repo {
6410 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6411 backend.default_branch(include_remote_name).await
6412 }
6413 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6414 let response = client
6415 .request(proto::GetDefaultBranch {
6416 project_id: project_id.0,
6417 repository_id: id.to_proto(),
6418 })
6419 .await?;
6420
6421 anyhow::Ok(response.branch.map(SharedString::from))
6422 }
6423 }
6424 })
6425 }
6426
6427 pub fn diff_tree(
6428 &mut self,
6429 diff_type: DiffTreeType,
6430 _cx: &App,
6431 ) -> oneshot::Receiver<Result<TreeDiff>> {
6432 let repository_id = self.snapshot.id;
6433 self.send_job(None, move |repo, _cx| async move {
6434 match repo {
6435 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6436 backend.diff_tree(diff_type).await
6437 }
6438 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6439 let response = client
6440 .request(proto::GetTreeDiff {
6441 project_id: project_id.0,
6442 repository_id: repository_id.0,
6443 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6444 base: diff_type.base().to_string(),
6445 head: diff_type.head().to_string(),
6446 })
6447 .await?;
6448
6449 let entries = response
6450 .entries
6451 .into_iter()
6452 .filter_map(|entry| {
6453 let status = match entry.status() {
6454 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6455 proto::tree_diff_status::Status::Modified => {
6456 TreeDiffStatus::Modified {
6457 old: git::Oid::from_str(
6458 &entry.oid.context("missing oid").log_err()?,
6459 )
6460 .log_err()?,
6461 }
6462 }
6463 proto::tree_diff_status::Status::Deleted => {
6464 TreeDiffStatus::Deleted {
6465 old: git::Oid::from_str(
6466 &entry.oid.context("missing oid").log_err()?,
6467 )
6468 .log_err()?,
6469 }
6470 }
6471 };
6472 Some((
6473 RepoPath::from_rel_path(
6474 &RelPath::from_proto(&entry.path).log_err()?,
6475 ),
6476 status,
6477 ))
6478 })
6479 .collect();
6480
6481 Ok(TreeDiff { entries })
6482 }
6483 }
6484 })
6485 }
6486
6487 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6488 let id = self.id;
6489 self.send_job(None, move |repo, _cx| async move {
6490 match repo {
6491 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6492 backend.diff(diff_type).await
6493 }
6494 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6495 let (proto_diff_type, merge_base_ref) = match &diff_type {
6496 DiffType::HeadToIndex => {
6497 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6498 }
6499 DiffType::HeadToWorktree => {
6500 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6501 }
6502 DiffType::MergeBase { base_ref } => (
6503 proto::git_diff::DiffType::MergeBase.into(),
6504 Some(base_ref.to_string()),
6505 ),
6506 };
6507 let response = client
6508 .request(proto::GitDiff {
6509 project_id: project_id.0,
6510 repository_id: id.to_proto(),
6511 diff_type: proto_diff_type,
6512 merge_base_ref,
6513 })
6514 .await?;
6515
6516 Ok(response.diff)
6517 }
6518 }
6519 })
6520 }
6521
6522 pub fn create_branch(
6523 &mut self,
6524 branch_name: String,
6525 base_branch: Option<String>,
6526 ) -> oneshot::Receiver<Result<()>> {
6527 let id = self.id;
6528 let status_msg = if let Some(ref base) = base_branch {
6529 format!("git switch -c {branch_name} {base}").into()
6530 } else {
6531 format!("git switch -c {branch_name}").into()
6532 };
6533 self.send_job(Some(status_msg), move |repo, _cx| async move {
6534 match repo {
6535 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6536 backend.create_branch(branch_name, base_branch).await
6537 }
6538 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6539 client
6540 .request(proto::GitCreateBranch {
6541 project_id: project_id.0,
6542 repository_id: id.to_proto(),
6543 branch_name,
6544 })
6545 .await?;
6546
6547 Ok(())
6548 }
6549 }
6550 })
6551 }
6552
6553 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6554 let id = self.id;
6555 self.send_job(
6556 Some(format!("git switch {branch_name}").into()),
6557 move |repo, _cx| async move {
6558 match repo {
6559 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6560 backend.change_branch(branch_name).await
6561 }
6562 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6563 client
6564 .request(proto::GitChangeBranch {
6565 project_id: project_id.0,
6566 repository_id: id.to_proto(),
6567 branch_name,
6568 })
6569 .await?;
6570
6571 Ok(())
6572 }
6573 }
6574 },
6575 )
6576 }
6577
6578 pub fn delete_branch(
6579 &mut self,
6580 is_remote: bool,
6581 branch_name: String,
6582 ) -> oneshot::Receiver<Result<()>> {
6583 let id = self.id;
6584 self.send_job(
6585 Some(
6586 format!(
6587 "git branch {} {}",
6588 if is_remote { "-dr" } else { "-d" },
6589 branch_name
6590 )
6591 .into(),
6592 ),
6593 move |repo, _cx| async move {
6594 match repo {
6595 RepositoryState::Local(state) => {
6596 state.backend.delete_branch(is_remote, branch_name).await
6597 }
6598 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6599 client
6600 .request(proto::GitDeleteBranch {
6601 project_id: project_id.0,
6602 repository_id: id.to_proto(),
6603 is_remote,
6604 branch_name,
6605 })
6606 .await?;
6607
6608 Ok(())
6609 }
6610 }
6611 },
6612 )
6613 }
6614
6615 pub fn rename_branch(
6616 &mut self,
6617 branch: String,
6618 new_name: String,
6619 ) -> oneshot::Receiver<Result<()>> {
6620 let id = self.id;
6621 self.send_job(
6622 Some(format!("git branch -m {branch} {new_name}").into()),
6623 move |repo, _cx| async move {
6624 match repo {
6625 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6626 backend.rename_branch(branch, new_name).await
6627 }
6628 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6629 client
6630 .request(proto::GitRenameBranch {
6631 project_id: project_id.0,
6632 repository_id: id.to_proto(),
6633 branch,
6634 new_name,
6635 })
6636 .await?;
6637
6638 Ok(())
6639 }
6640 }
6641 },
6642 )
6643 }
6644
6645 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6646 let id = self.id;
6647 self.send_job(None, move |repo, _cx| async move {
6648 match repo {
6649 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6650 backend.check_for_pushed_commit().await
6651 }
6652 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6653 let response = client
6654 .request(proto::CheckForPushedCommits {
6655 project_id: project_id.0,
6656 repository_id: id.to_proto(),
6657 })
6658 .await?;
6659
6660 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6661
6662 Ok(branches)
6663 }
6664 }
6665 })
6666 }
6667
6668 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6669 let id = self.id;
6670 self.send_job(None, move |repo, _cx| async move {
6671 match repo {
6672 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6673 backend.checkpoint().await
6674 }
6675 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6676 let response = client
6677 .request(proto::GitCreateCheckpoint {
6678 project_id: project_id.0,
6679 repository_id: id.to_proto(),
6680 })
6681 .await?;
6682
6683 Ok(GitRepositoryCheckpoint {
6684 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6685 })
6686 }
6687 }
6688 })
6689 }
6690
6691 pub fn restore_checkpoint(
6692 &mut self,
6693 checkpoint: GitRepositoryCheckpoint,
6694 ) -> oneshot::Receiver<Result<()>> {
6695 let id = self.id;
6696 self.send_job(None, move |repo, _cx| async move {
6697 match repo {
6698 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6699 backend.restore_checkpoint(checkpoint).await
6700 }
6701 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6702 client
6703 .request(proto::GitRestoreCheckpoint {
6704 project_id: project_id.0,
6705 repository_id: id.to_proto(),
6706 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6707 })
6708 .await?;
6709 Ok(())
6710 }
6711 }
6712 })
6713 }
6714
6715 pub(crate) fn apply_remote_update(
6716 &mut self,
6717 update: proto::UpdateRepository,
6718 cx: &mut Context<Self>,
6719 ) -> Result<()> {
6720 if let Some(main_path) = &update.original_repo_abs_path {
6721 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6722 }
6723
6724 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6725 let new_head_commit = update
6726 .head_commit_details
6727 .as_ref()
6728 .map(proto_to_commit_details);
6729 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6730 cx.emit(RepositoryEvent::HeadChanged)
6731 }
6732 self.snapshot.branch = new_branch;
6733 self.snapshot.head_commit = new_head_commit;
6734
6735 // We don't store any merge head state for downstream projects; the upstream
6736 // will track it and we will just get the updated conflicts
6737 let new_merge_heads = TreeMap::from_ordered_entries(
6738 update
6739 .current_merge_conflicts
6740 .into_iter()
6741 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6742 );
6743 let conflicts_changed =
6744 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6745 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6746 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6747 let new_stash_entries = GitStash {
6748 entries: update
6749 .stash_entries
6750 .iter()
6751 .filter_map(|entry| proto_to_stash(entry).ok())
6752 .collect(),
6753 };
6754 if self.snapshot.stash_entries != new_stash_entries {
6755 cx.emit(RepositoryEvent::StashEntriesChanged)
6756 }
6757 self.snapshot.stash_entries = new_stash_entries;
6758 let new_linked_worktrees: Arc<[GitWorktree]> = update
6759 .linked_worktrees
6760 .iter()
6761 .map(proto_to_worktree)
6762 .collect();
6763 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6764 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6765 }
6766 self.snapshot.linked_worktrees = new_linked_worktrees;
6767 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6768 self.snapshot.remote_origin_url = update.remote_origin_url;
6769
6770 let edits = update
6771 .removed_statuses
6772 .into_iter()
6773 .filter_map(|path| {
6774 Some(sum_tree::Edit::Remove(PathKey(
6775 RelPath::from_proto(&path).log_err()?,
6776 )))
6777 })
6778 .chain(
6779 update
6780 .updated_statuses
6781 .into_iter()
6782 .filter_map(|updated_status| {
6783 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6784 }),
6785 )
6786 .collect::<Vec<_>>();
6787 if conflicts_changed || !edits.is_empty() {
6788 cx.emit(RepositoryEvent::StatusesChanged);
6789 }
6790 self.snapshot.statuses_by_path.edit(edits, ());
6791
6792 if update.is_last_update {
6793 self.snapshot.scan_id = update.scan_id;
6794 }
6795 self.clear_pending_ops(cx);
6796 Ok(())
6797 }
6798
6799 pub fn compare_checkpoints(
6800 &mut self,
6801 left: GitRepositoryCheckpoint,
6802 right: GitRepositoryCheckpoint,
6803 ) -> oneshot::Receiver<Result<bool>> {
6804 let id = self.id;
6805 self.send_job(None, move |repo, _cx| async move {
6806 match repo {
6807 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6808 backend.compare_checkpoints(left, right).await
6809 }
6810 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6811 let response = client
6812 .request(proto::GitCompareCheckpoints {
6813 project_id: project_id.0,
6814 repository_id: id.to_proto(),
6815 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6816 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6817 })
6818 .await?;
6819 Ok(response.equal)
6820 }
6821 }
6822 })
6823 }
6824
6825 pub fn diff_checkpoints(
6826 &mut self,
6827 base_checkpoint: GitRepositoryCheckpoint,
6828 target_checkpoint: GitRepositoryCheckpoint,
6829 ) -> oneshot::Receiver<Result<String>> {
6830 let id = self.id;
6831 self.send_job(None, move |repo, _cx| async move {
6832 match repo {
6833 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6834 backend
6835 .diff_checkpoints(base_checkpoint, target_checkpoint)
6836 .await
6837 }
6838 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6839 let response = client
6840 .request(proto::GitDiffCheckpoints {
6841 project_id: project_id.0,
6842 repository_id: id.to_proto(),
6843 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6844 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6845 })
6846 .await?;
6847 Ok(response.diff)
6848 }
6849 }
6850 })
6851 }
6852
6853 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6854 let updated = SumTree::from_iter(
6855 self.pending_ops.iter().filter_map(|ops| {
6856 let inner_ops: Vec<PendingOp> =
6857 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6858 if inner_ops.is_empty() {
6859 None
6860 } else {
6861 Some(PendingOps {
6862 repo_path: ops.repo_path.clone(),
6863 ops: inner_ops,
6864 })
6865 }
6866 }),
6867 (),
6868 );
6869
6870 if updated != self.pending_ops {
6871 cx.emit(RepositoryEvent::PendingOpsChanged {
6872 pending_ops: self.pending_ops.clone(),
6873 })
6874 }
6875
6876 self.pending_ops = updated;
6877 }
6878
6879 fn schedule_scan(
6880 &mut self,
6881 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6882 cx: &mut Context<Self>,
6883 ) {
6884 let this = cx.weak_entity();
6885 let _ = self.send_keyed_job(
6886 Some(GitJobKey::ReloadGitState),
6887 None,
6888 |state, mut cx| async move {
6889 log::debug!("run scheduled git status scan");
6890
6891 let Some(this) = this.upgrade() else {
6892 return Ok(());
6893 };
6894 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6895 bail!("not a local repository")
6896 };
6897 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6898 this.update(&mut cx, |this, cx| {
6899 this.clear_pending_ops(cx);
6900 });
6901 if let Some(updates_tx) = updates_tx {
6902 updates_tx
6903 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6904 .ok();
6905 }
6906 Ok(())
6907 },
6908 );
6909 }
6910
6911 fn spawn_local_git_worker(
6912 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6913 cx: &mut Context<Self>,
6914 ) -> mpsc::UnboundedSender<GitJob> {
6915 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6916
6917 cx.spawn(async move |_, cx| {
6918 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6919 if let Some(git_hosting_provider_registry) =
6920 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6921 {
6922 git_hosting_providers::register_additional_providers(
6923 git_hosting_provider_registry,
6924 state.backend.clone(),
6925 )
6926 .await;
6927 }
6928 let state = RepositoryState::Local(state);
6929 let mut jobs = VecDeque::new();
6930 loop {
6931 while let Ok(next_job) = job_rx.try_recv() {
6932 jobs.push_back(next_job);
6933 }
6934
6935 if let Some(job) = jobs.pop_front() {
6936 if let Some(current_key) = &job.key
6937 && jobs
6938 .iter()
6939 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6940 {
6941 continue;
6942 }
6943 (job.job)(state.clone(), cx).await;
6944 } else if let Some(job) = job_rx.next().await {
6945 jobs.push_back(job);
6946 } else {
6947 break;
6948 }
6949 }
6950 anyhow::Ok(())
6951 })
6952 .detach_and_log_err(cx);
6953
6954 job_tx
6955 }
6956
6957 fn spawn_remote_git_worker(
6958 state: RemoteRepositoryState,
6959 cx: &mut Context<Self>,
6960 ) -> mpsc::UnboundedSender<GitJob> {
6961 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6962
6963 cx.spawn(async move |_, cx| {
6964 let state = RepositoryState::Remote(state);
6965 let mut jobs = VecDeque::new();
6966 loop {
6967 while let Ok(next_job) = job_rx.try_recv() {
6968 jobs.push_back(next_job);
6969 }
6970
6971 if let Some(job) = jobs.pop_front() {
6972 if let Some(current_key) = &job.key
6973 && jobs
6974 .iter()
6975 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6976 {
6977 continue;
6978 }
6979 (job.job)(state.clone(), cx).await;
6980 } else if let Some(job) = job_rx.next().await {
6981 jobs.push_back(job);
6982 } else {
6983 break;
6984 }
6985 }
6986 anyhow::Ok(())
6987 })
6988 .detach_and_log_err(cx);
6989
6990 job_tx
6991 }
6992
6993 fn load_staged_text(
6994 &mut self,
6995 buffer_id: BufferId,
6996 repo_path: RepoPath,
6997 cx: &App,
6998 ) -> Task<Result<Option<String>>> {
6999 let rx = self.send_job(None, move |state, _| async move {
7000 match state {
7001 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7002 anyhow::Ok(backend.load_index_text(repo_path).await)
7003 }
7004 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7005 let response = client
7006 .request(proto::OpenUnstagedDiff {
7007 project_id: project_id.to_proto(),
7008 buffer_id: buffer_id.to_proto(),
7009 })
7010 .await?;
7011 Ok(response.staged_text)
7012 }
7013 }
7014 });
7015 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7016 }
7017
7018 fn load_committed_text(
7019 &mut self,
7020 buffer_id: BufferId,
7021 repo_path: RepoPath,
7022 cx: &App,
7023 ) -> Task<Result<DiffBasesChange>> {
7024 let rx = self.send_job(None, move |state, _| async move {
7025 match state {
7026 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7027 let committed_text = backend.load_committed_text(repo_path.clone()).await;
7028 let staged_text = backend.load_index_text(repo_path).await;
7029 let diff_bases_change = if committed_text == staged_text {
7030 DiffBasesChange::SetBoth(committed_text)
7031 } else {
7032 DiffBasesChange::SetEach {
7033 index: staged_text,
7034 head: committed_text,
7035 }
7036 };
7037 anyhow::Ok(diff_bases_change)
7038 }
7039 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7040 use proto::open_uncommitted_diff_response::Mode;
7041
7042 let response = client
7043 .request(proto::OpenUncommittedDiff {
7044 project_id: project_id.to_proto(),
7045 buffer_id: buffer_id.to_proto(),
7046 })
7047 .await?;
7048 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
7049 let bases = match mode {
7050 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
7051 Mode::IndexAndHead => DiffBasesChange::SetEach {
7052 head: response.committed_text,
7053 index: response.staged_text,
7054 },
7055 };
7056 Ok(bases)
7057 }
7058 }
7059 });
7060
7061 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7062 }
7063
7064 pub fn load_commit_template_text(
7065 &mut self,
7066 ) -> oneshot::Receiver<Result<Option<GitCommitTemplate>>> {
7067 self.send_job(None, move |git_repo, _cx| async move {
7068 match git_repo {
7069 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7070 backend.load_commit_template().await
7071 }
7072 RepositoryState::Remote(_) => Ok(None),
7073 }
7074 })
7075 }
7076
7077 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
7078 let repository_id = self.snapshot.id;
7079 let rx = self.send_job(None, move |state, _| async move {
7080 match state {
7081 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7082 backend.load_blob_content(oid).await
7083 }
7084 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
7085 let response = client
7086 .request(proto::GetBlobContent {
7087 project_id: project_id.to_proto(),
7088 repository_id: repository_id.0,
7089 oid: oid.to_string(),
7090 })
7091 .await?;
7092 Ok(response.content)
7093 }
7094 }
7095 });
7096 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7097 }
7098
7099 fn paths_changed(
7100 &mut self,
7101 paths: Vec<RepoPath>,
7102 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
7103 cx: &mut Context<Self>,
7104 ) {
7105 if !paths.is_empty() {
7106 self.paths_needing_status_update.push(paths);
7107 }
7108
7109 let this = cx.weak_entity();
7110 let _ = self.send_keyed_job(
7111 Some(GitJobKey::RefreshStatuses),
7112 None,
7113 |state, mut cx| async move {
7114 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
7115 (
7116 this.snapshot.clone(),
7117 mem::take(&mut this.paths_needing_status_update),
7118 )
7119 })?;
7120 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
7121 bail!("not a local repository")
7122 };
7123
7124 if changed_paths.is_empty() {
7125 return Ok(());
7126 }
7127
7128 let has_head = prev_snapshot.head_commit.is_some();
7129
7130 let stash_entries = backend.stash_entries().await?;
7131 let changed_path_statuses = cx
7132 .background_spawn(async move {
7133 let mut changed_paths =
7134 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
7135 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
7136
7137 let status_task = backend.status(&changed_paths_vec);
7138 let diff_stat_future = if has_head {
7139 backend.diff_stat(&changed_paths_vec)
7140 } else {
7141 future::ready(Ok(status::GitDiffStat {
7142 entries: Arc::default(),
7143 }))
7144 .boxed()
7145 };
7146
7147 let (statuses, diff_stats) =
7148 futures::future::try_join(status_task, diff_stat_future).await?;
7149
7150 let diff_stats: HashMap<RepoPath, DiffStat> =
7151 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
7152
7153 let mut changed_path_statuses = Vec::new();
7154 let prev_statuses = prev_snapshot.statuses_by_path.clone();
7155 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7156
7157 for (repo_path, status) in &*statuses.entries {
7158 let current_diff_stat = diff_stats.get(repo_path).copied();
7159
7160 changed_paths.remove(repo_path);
7161 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
7162 && cursor.item().is_some_and(|entry| {
7163 entry.status == *status && entry.diff_stat == current_diff_stat
7164 })
7165 {
7166 continue;
7167 }
7168
7169 changed_path_statuses.push(Edit::Insert(StatusEntry {
7170 repo_path: repo_path.clone(),
7171 status: *status,
7172 diff_stat: current_diff_stat,
7173 }));
7174 }
7175 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7176 for path in changed_paths.into_iter() {
7177 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
7178 changed_path_statuses
7179 .push(Edit::Remove(PathKey(path.as_ref().clone())));
7180 }
7181 }
7182 anyhow::Ok(changed_path_statuses)
7183 })
7184 .await?;
7185
7186 this.update(&mut cx, |this, cx| {
7187 if this.snapshot.stash_entries != stash_entries {
7188 cx.emit(RepositoryEvent::StashEntriesChanged);
7189 this.snapshot.stash_entries = stash_entries;
7190 }
7191
7192 if !changed_path_statuses.is_empty() {
7193 cx.emit(RepositoryEvent::StatusesChanged);
7194 this.snapshot
7195 .statuses_by_path
7196 .edit(changed_path_statuses, ());
7197 this.snapshot.scan_id += 1;
7198 }
7199
7200 if let Some(updates_tx) = updates_tx {
7201 updates_tx
7202 .unbounded_send(DownstreamUpdate::UpdateRepository(
7203 this.snapshot.clone(),
7204 ))
7205 .ok();
7206 }
7207 })
7208 },
7209 );
7210 }
7211
7212 /// currently running git command and when it started
7213 pub fn current_job(&self) -> Option<JobInfo> {
7214 self.active_jobs.values().next().cloned()
7215 }
7216
7217 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7218 self.send_job(None, |_, _| async {})
7219 }
7220
7221 fn spawn_job_with_tracking<AsyncFn>(
7222 &mut self,
7223 paths: Vec<RepoPath>,
7224 git_status: pending_op::GitStatus,
7225 cx: &mut Context<Self>,
7226 f: AsyncFn,
7227 ) -> Task<Result<()>>
7228 where
7229 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7230 {
7231 let ids = self.new_pending_ops_for_paths(paths, git_status);
7232
7233 cx.spawn(async move |this, cx| {
7234 let (job_status, result) = match f(this.clone(), cx).await {
7235 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7236 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7237 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7238 };
7239
7240 this.update(cx, |this, _| {
7241 let mut edits = Vec::with_capacity(ids.len());
7242 for (id, entry) in ids {
7243 if let Some(mut ops) = this
7244 .pending_ops
7245 .get(&PathKey(entry.as_ref().clone()), ())
7246 .cloned()
7247 {
7248 if let Some(op) = ops.op_by_id_mut(id) {
7249 op.job_status = job_status;
7250 }
7251 edits.push(sum_tree::Edit::Insert(ops));
7252 }
7253 }
7254 this.pending_ops.edit(edits, ());
7255 })?;
7256
7257 result
7258 })
7259 }
7260
7261 fn new_pending_ops_for_paths(
7262 &mut self,
7263 paths: Vec<RepoPath>,
7264 git_status: pending_op::GitStatus,
7265 ) -> Vec<(PendingOpId, RepoPath)> {
7266 let mut edits = Vec::with_capacity(paths.len());
7267 let mut ids = Vec::with_capacity(paths.len());
7268 for path in paths {
7269 let mut ops = self
7270 .pending_ops
7271 .get(&PathKey(path.as_ref().clone()), ())
7272 .cloned()
7273 .unwrap_or_else(|| PendingOps::new(&path));
7274 let id = ops.max_id() + 1;
7275 ops.ops.push(PendingOp {
7276 id,
7277 git_status,
7278 job_status: pending_op::JobStatus::Running,
7279 });
7280 edits.push(sum_tree::Edit::Insert(ops));
7281 ids.push((id, path));
7282 }
7283 self.pending_ops.edit(edits, ());
7284 ids
7285 }
7286 pub fn default_remote_url(&self) -> Option<String> {
7287 self.remote_upstream_url
7288 .clone()
7289 .or(self.remote_origin_url.clone())
7290 }
7291}
7292
7293/// If `path` is a git linked worktree checkout, resolves it to the main
7294/// repository's working directory path. Returns `None` if `path` is a normal
7295/// repository, not a git repo, or if resolution fails.
7296///
7297/// Resolution works by:
7298/// 1. Reading the `.git` file to get the `gitdir:` pointer
7299/// 2. Following that to the worktree-specific git directory
7300/// 3. Reading the `commondir` file to find the shared `.git` directory
7301/// 4. Deriving the main repo's working directory from the common dir
7302pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7303 let dot_git = path.join(".git");
7304 let metadata = fs.metadata(&dot_git).await.ok()??;
7305 if metadata.is_dir {
7306 return None; // Normal repo, not a linked worktree
7307 }
7308 // It's a .git file — parse the gitdir: pointer
7309 let content = fs.load(&dot_git).await.ok()?;
7310 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7311 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7312 // Read commondir to find the main .git directory
7313 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7314 let common_dir = fs
7315 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7316 .await
7317 .ok()?;
7318 git::repository::original_repo_path_from_common_dir(&common_dir)
7319}
7320
7321/// Validates that the resolved worktree directory is acceptable:
7322/// - The setting must not be an absolute path.
7323/// - The resolved path must be either a subdirectory of the working
7324/// directory or a subdirectory of its parent (i.e., a sibling).
7325///
7326/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7327pub fn worktrees_directory_for_repo(
7328 original_repo_abs_path: &Path,
7329 worktree_directory_setting: &str,
7330) -> Result<PathBuf> {
7331 // Check the original setting before trimming, since a path like "///"
7332 // is absolute but becomes "" after stripping trailing separators.
7333 // Also check for leading `/` or `\` explicitly, because on Windows
7334 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7335 // would slip through even though it's clearly not a relative path.
7336 if Path::new(worktree_directory_setting).is_absolute()
7337 || worktree_directory_setting.starts_with('/')
7338 || worktree_directory_setting.starts_with('\\')
7339 {
7340 anyhow::bail!(
7341 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7342 );
7343 }
7344
7345 if worktree_directory_setting.is_empty() {
7346 anyhow::bail!("git.worktree_directory must not be empty");
7347 }
7348
7349 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7350 if trimmed == ".." {
7351 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7352 }
7353
7354 let joined = original_repo_abs_path.join(trimmed);
7355 let resolved = util::normalize_path(&joined);
7356 let resolved = if resolved.starts_with(original_repo_abs_path) {
7357 resolved
7358 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7359 resolved.join(repo_dir_name)
7360 } else {
7361 resolved
7362 };
7363
7364 let parent = original_repo_abs_path
7365 .parent()
7366 .unwrap_or(original_repo_abs_path);
7367
7368 if !resolved.starts_with(parent) {
7369 anyhow::bail!(
7370 "git.worktree_directory resolved to {resolved:?}, which is outside \
7371 the project root and its parent directory. It must resolve to a \
7372 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7373 );
7374 }
7375
7376 Ok(resolved)
7377}
7378
7379/// Returns a short name for a linked worktree suitable for UI display
7380///
7381/// Uses the main worktree path to come up with a short name that disambiguates
7382/// the linked worktree from the main worktree.
7383pub fn linked_worktree_short_name(
7384 main_worktree_path: &Path,
7385 linked_worktree_path: &Path,
7386) -> Option<SharedString> {
7387 if main_worktree_path == linked_worktree_path {
7388 return None;
7389 }
7390
7391 let project_name = main_worktree_path.file_name()?.to_str()?;
7392 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7393 let name = if directory_name != project_name {
7394 directory_name.to_string()
7395 } else {
7396 linked_worktree_path
7397 .parent()?
7398 .file_name()?
7399 .to_str()?
7400 .to_string()
7401 };
7402 Some(name.into())
7403}
7404
7405fn get_permalink_in_rust_registry_src(
7406 provider_registry: Arc<GitHostingProviderRegistry>,
7407 path: PathBuf,
7408 selection: Range<u32>,
7409) -> Result<url::Url> {
7410 #[derive(Deserialize)]
7411 struct CargoVcsGit {
7412 sha1: String,
7413 }
7414
7415 #[derive(Deserialize)]
7416 struct CargoVcsInfo {
7417 git: CargoVcsGit,
7418 path_in_vcs: String,
7419 }
7420
7421 #[derive(Deserialize)]
7422 struct CargoPackage {
7423 repository: String,
7424 }
7425
7426 #[derive(Deserialize)]
7427 struct CargoToml {
7428 package: CargoPackage,
7429 }
7430
7431 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7432 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7433 Some((dir, json))
7434 }) else {
7435 bail!("No .cargo_vcs_info.json found in parent directories")
7436 };
7437 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7438 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7439 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7440 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7441 .context("parsing package.repository field of manifest")?;
7442 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7443 let permalink = provider.build_permalink(
7444 remote,
7445 BuildPermalinkParams::new(
7446 &cargo_vcs_info.git.sha1,
7447 &RepoPath::from_rel_path(
7448 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7449 ),
7450 Some(selection),
7451 ),
7452 );
7453 Ok(permalink)
7454}
7455
7456fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7457 let Some(blame) = blame else {
7458 return proto::BlameBufferResponse {
7459 blame_response: None,
7460 };
7461 };
7462
7463 let entries = blame
7464 .entries
7465 .into_iter()
7466 .map(|entry| proto::BlameEntry {
7467 sha: entry.sha.as_bytes().into(),
7468 start_line: entry.range.start,
7469 end_line: entry.range.end,
7470 original_line_number: entry.original_line_number,
7471 author: entry.author,
7472 author_mail: entry.author_mail,
7473 author_time: entry.author_time,
7474 author_tz: entry.author_tz,
7475 committer: entry.committer_name,
7476 committer_mail: entry.committer_email,
7477 committer_time: entry.committer_time,
7478 committer_tz: entry.committer_tz,
7479 summary: entry.summary,
7480 previous: entry.previous,
7481 filename: entry.filename,
7482 })
7483 .collect::<Vec<_>>();
7484
7485 let messages = blame
7486 .messages
7487 .into_iter()
7488 .map(|(oid, message)| proto::CommitMessage {
7489 oid: oid.as_bytes().into(),
7490 message,
7491 })
7492 .collect::<Vec<_>>();
7493
7494 proto::BlameBufferResponse {
7495 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7496 }
7497}
7498
7499fn deserialize_blame_buffer_response(
7500 response: proto::BlameBufferResponse,
7501) -> Option<git::blame::Blame> {
7502 let response = response.blame_response?;
7503 let entries = response
7504 .entries
7505 .into_iter()
7506 .filter_map(|entry| {
7507 Some(git::blame::BlameEntry {
7508 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7509 range: entry.start_line..entry.end_line,
7510 original_line_number: entry.original_line_number,
7511 committer_name: entry.committer,
7512 committer_time: entry.committer_time,
7513 committer_tz: entry.committer_tz,
7514 committer_email: entry.committer_mail,
7515 author: entry.author,
7516 author_mail: entry.author_mail,
7517 author_time: entry.author_time,
7518 author_tz: entry.author_tz,
7519 summary: entry.summary,
7520 previous: entry.previous,
7521 filename: entry.filename,
7522 })
7523 })
7524 .collect::<Vec<_>>();
7525
7526 let messages = response
7527 .messages
7528 .into_iter()
7529 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7530 .collect::<HashMap<_, _>>();
7531
7532 Some(Blame { entries, messages })
7533}
7534
7535fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7536 proto::Branch {
7537 is_head: branch.is_head,
7538 ref_name: branch.ref_name.to_string(),
7539 unix_timestamp: branch
7540 .most_recent_commit
7541 .as_ref()
7542 .map(|commit| commit.commit_timestamp as u64),
7543 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7544 ref_name: upstream.ref_name.to_string(),
7545 tracking: upstream
7546 .tracking
7547 .status()
7548 .map(|upstream| proto::UpstreamTracking {
7549 ahead: upstream.ahead as u64,
7550 behind: upstream.behind as u64,
7551 }),
7552 }),
7553 most_recent_commit: branch
7554 .most_recent_commit
7555 .as_ref()
7556 .map(|commit| proto::CommitSummary {
7557 sha: commit.sha.to_string(),
7558 subject: commit.subject.to_string(),
7559 commit_timestamp: commit.commit_timestamp,
7560 author_name: commit.author_name.to_string(),
7561 }),
7562 }
7563}
7564
7565fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7566 proto::Worktree {
7567 path: worktree.path.to_string_lossy().to_string(),
7568 ref_name: worktree
7569 .ref_name
7570 .as_ref()
7571 .map(|s| s.to_string())
7572 .unwrap_or_default(),
7573 sha: worktree.sha.to_string(),
7574 is_main: worktree.is_main,
7575 is_bare: worktree.is_bare,
7576 }
7577}
7578
7579fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7580 git::repository::Worktree {
7581 path: PathBuf::from(proto.path.clone()),
7582 ref_name: if proto.ref_name.is_empty() {
7583 None
7584 } else {
7585 Some(SharedString::from(&proto.ref_name))
7586 },
7587 sha: proto.sha.clone().into(),
7588 is_main: proto.is_main,
7589 is_bare: proto.is_bare,
7590 }
7591}
7592
7593fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7594 git::repository::Branch {
7595 is_head: proto.is_head,
7596 ref_name: proto.ref_name.clone().into(),
7597 upstream: proto
7598 .upstream
7599 .as_ref()
7600 .map(|upstream| git::repository::Upstream {
7601 ref_name: upstream.ref_name.to_string().into(),
7602 tracking: upstream
7603 .tracking
7604 .as_ref()
7605 .map(|tracking| {
7606 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7607 ahead: tracking.ahead as u32,
7608 behind: tracking.behind as u32,
7609 })
7610 })
7611 .unwrap_or(git::repository::UpstreamTracking::Gone),
7612 }),
7613 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7614 git::repository::CommitSummary {
7615 sha: commit.sha.to_string().into(),
7616 subject: commit.subject.to_string().into(),
7617 commit_timestamp: commit.commit_timestamp,
7618 author_name: commit.author_name.to_string().into(),
7619 has_parent: true,
7620 }
7621 }),
7622 }
7623}
7624
7625fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7626 proto::GitCommitDetails {
7627 sha: commit.sha.to_string(),
7628 message: commit.message.to_string(),
7629 commit_timestamp: commit.commit_timestamp,
7630 author_email: commit.author_email.to_string(),
7631 author_name: commit.author_name.to_string(),
7632 }
7633}
7634
7635fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7636 CommitDetails {
7637 sha: proto.sha.clone().into(),
7638 message: proto.message.clone().into(),
7639 commit_timestamp: proto.commit_timestamp,
7640 author_email: proto.author_email.clone().into(),
7641 author_name: proto.author_name.clone().into(),
7642 }
7643}
7644
7645/// This snapshot computes the repository state on the foreground thread while
7646/// running the git commands on the background thread. We update branch, head,
7647/// remotes, and worktrees first so the UI can react sooner, then compute file
7648/// state and emit those events immediately after.
7649async fn compute_snapshot(
7650 this: Entity<Repository>,
7651 backend: Arc<dyn GitRepository>,
7652 cx: &mut AsyncApp,
7653) -> Result<RepositorySnapshot> {
7654 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7655 this.paths_needing_status_update.clear();
7656 (
7657 this.id,
7658 this.work_directory_abs_path.clone(),
7659 this.snapshot.clone(),
7660 )
7661 });
7662
7663 let head_commit_future = {
7664 let backend = backend.clone();
7665 async move {
7666 Ok(match backend.head_sha().await {
7667 Some(head_sha) => backend.show(head_sha).await.log_err(),
7668 None => None,
7669 })
7670 }
7671 };
7672 let (branches, head_commit, all_worktrees) = cx
7673 .background_spawn({
7674 let backend = backend.clone();
7675 async move {
7676 futures::future::try_join3(
7677 backend.branches(),
7678 head_commit_future,
7679 backend.worktrees(),
7680 )
7681 .await
7682 }
7683 })
7684 .await?;
7685 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7686 let branch_list: Arc<[Branch]> = branches.into();
7687
7688 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7689 .into_iter()
7690 .filter(|wt| wt.path != *work_directory_abs_path)
7691 .collect();
7692
7693 let (remote_origin_url, remote_upstream_url) = cx
7694 .background_spawn({
7695 let backend = backend.clone();
7696 async move {
7697 Ok::<_, anyhow::Error>(
7698 futures::future::join(
7699 backend.remote_url("origin"),
7700 backend.remote_url("upstream"),
7701 )
7702 .await,
7703 )
7704 }
7705 })
7706 .await?;
7707
7708 let snapshot = this.update(cx, |this, cx| {
7709 let head_changed =
7710 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7711 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7712 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7713
7714 this.snapshot = RepositorySnapshot {
7715 id,
7716 work_directory_abs_path,
7717 branch,
7718 branch_list: branch_list.clone(),
7719 head_commit,
7720 remote_origin_url,
7721 remote_upstream_url,
7722 linked_worktrees,
7723 scan_id: prev_snapshot.scan_id + 1,
7724 ..prev_snapshot
7725 };
7726
7727 if head_changed {
7728 cx.emit(RepositoryEvent::HeadChanged);
7729 }
7730
7731 if branch_list_changed {
7732 cx.emit(RepositoryEvent::BranchListChanged);
7733 }
7734
7735 if worktrees_changed {
7736 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7737 }
7738
7739 this.snapshot.clone()
7740 });
7741
7742 let (statuses, diff_stats, stash_entries) = cx
7743 .background_spawn({
7744 let backend = backend.clone();
7745 let snapshot = snapshot.clone();
7746 async move {
7747 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7748 if snapshot.head_commit.is_some() {
7749 backend.diff_stat(&[])
7750 } else {
7751 future::ready(Ok(status::GitDiffStat {
7752 entries: Arc::default(),
7753 }))
7754 .boxed()
7755 };
7756 futures::future::try_join3(
7757 backend.status(&[RepoPath::from_rel_path(
7758 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7759 )]),
7760 diff_stat_future,
7761 backend.stash_entries(),
7762 )
7763 .await
7764 }
7765 })
7766 .await?;
7767
7768 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7769 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7770 let mut conflicted_paths = Vec::new();
7771 let statuses_by_path = SumTree::from_iter(
7772 statuses.entries.iter().map(|(repo_path, status)| {
7773 if status.is_conflicted() {
7774 conflicted_paths.push(repo_path.clone());
7775 }
7776 StatusEntry {
7777 repo_path: repo_path.clone(),
7778 status: *status,
7779 diff_stat: diff_stat_map.get(repo_path).copied(),
7780 }
7781 }),
7782 (),
7783 );
7784
7785 let merge_details = cx
7786 .background_spawn({
7787 let backend = backend.clone();
7788 let mut merge_details = snapshot.merge.clone();
7789 async move {
7790 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7791 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7792 }
7793 })
7794 .await?;
7795 let (merge_details, conflicts_changed) = merge_details;
7796 log::debug!("new merge details: {merge_details:?}");
7797
7798 Ok(this.update(cx, |this, cx| {
7799 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7800 cx.emit(RepositoryEvent::StatusesChanged);
7801 }
7802 if stash_entries != this.snapshot.stash_entries {
7803 cx.emit(RepositoryEvent::StashEntriesChanged);
7804 }
7805
7806 this.snapshot.scan_id += 1;
7807 this.snapshot.merge = merge_details;
7808 this.snapshot.statuses_by_path = statuses_by_path;
7809 this.snapshot.stash_entries = stash_entries;
7810
7811 this.snapshot.clone()
7812 }))
7813}
7814
7815fn status_from_proto(
7816 simple_status: i32,
7817 status: Option<proto::GitFileStatus>,
7818) -> anyhow::Result<FileStatus> {
7819 use proto::git_file_status::Variant;
7820
7821 let Some(variant) = status.and_then(|status| status.variant) else {
7822 let code = proto::GitStatus::from_i32(simple_status)
7823 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7824 let result = match code {
7825 proto::GitStatus::Added => TrackedStatus {
7826 worktree_status: StatusCode::Added,
7827 index_status: StatusCode::Unmodified,
7828 }
7829 .into(),
7830 proto::GitStatus::Modified => TrackedStatus {
7831 worktree_status: StatusCode::Modified,
7832 index_status: StatusCode::Unmodified,
7833 }
7834 .into(),
7835 proto::GitStatus::Conflict => UnmergedStatus {
7836 first_head: UnmergedStatusCode::Updated,
7837 second_head: UnmergedStatusCode::Updated,
7838 }
7839 .into(),
7840 proto::GitStatus::Deleted => TrackedStatus {
7841 worktree_status: StatusCode::Deleted,
7842 index_status: StatusCode::Unmodified,
7843 }
7844 .into(),
7845 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7846 };
7847 return Ok(result);
7848 };
7849
7850 let result = match variant {
7851 Variant::Untracked(_) => FileStatus::Untracked,
7852 Variant::Ignored(_) => FileStatus::Ignored,
7853 Variant::Unmerged(unmerged) => {
7854 let [first_head, second_head] =
7855 [unmerged.first_head, unmerged.second_head].map(|head| {
7856 let code = proto::GitStatus::from_i32(head)
7857 .with_context(|| format!("Invalid git status code: {head}"))?;
7858 let result = match code {
7859 proto::GitStatus::Added => UnmergedStatusCode::Added,
7860 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7861 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7862 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7863 };
7864 Ok(result)
7865 });
7866 let [first_head, second_head] = [first_head?, second_head?];
7867 UnmergedStatus {
7868 first_head,
7869 second_head,
7870 }
7871 .into()
7872 }
7873 Variant::Tracked(tracked) => {
7874 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7875 .map(|status| {
7876 let code = proto::GitStatus::from_i32(status)
7877 .with_context(|| format!("Invalid git status code: {status}"))?;
7878 let result = match code {
7879 proto::GitStatus::Modified => StatusCode::Modified,
7880 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7881 proto::GitStatus::Added => StatusCode::Added,
7882 proto::GitStatus::Deleted => StatusCode::Deleted,
7883 proto::GitStatus::Renamed => StatusCode::Renamed,
7884 proto::GitStatus::Copied => StatusCode::Copied,
7885 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7886 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7887 };
7888 Ok(result)
7889 });
7890 let [index_status, worktree_status] = [index_status?, worktree_status?];
7891 TrackedStatus {
7892 index_status,
7893 worktree_status,
7894 }
7895 .into()
7896 }
7897 };
7898 Ok(result)
7899}
7900
7901fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7902 use proto::git_file_status::{Tracked, Unmerged, Variant};
7903
7904 let variant = match status {
7905 FileStatus::Untracked => Variant::Untracked(Default::default()),
7906 FileStatus::Ignored => Variant::Ignored(Default::default()),
7907 FileStatus::Unmerged(UnmergedStatus {
7908 first_head,
7909 second_head,
7910 }) => Variant::Unmerged(Unmerged {
7911 first_head: unmerged_status_to_proto(first_head),
7912 second_head: unmerged_status_to_proto(second_head),
7913 }),
7914 FileStatus::Tracked(TrackedStatus {
7915 index_status,
7916 worktree_status,
7917 }) => Variant::Tracked(Tracked {
7918 index_status: tracked_status_to_proto(index_status),
7919 worktree_status: tracked_status_to_proto(worktree_status),
7920 }),
7921 };
7922 proto::GitFileStatus {
7923 variant: Some(variant),
7924 }
7925}
7926
7927fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7928 match code {
7929 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7930 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7931 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7932 }
7933}
7934
7935fn tracked_status_to_proto(code: StatusCode) -> i32 {
7936 match code {
7937 StatusCode::Added => proto::GitStatus::Added as _,
7938 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7939 StatusCode::Modified => proto::GitStatus::Modified as _,
7940 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7941 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7942 StatusCode::Copied => proto::GitStatus::Copied as _,
7943 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7944 }
7945}