1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 project_settings::ProjectSettings,
10 trusted_worktrees::{
11 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
12 },
13 worktree_store::{WorktreeStore, WorktreeStoreEvent},
14};
15use anyhow::{Context as _, Result, anyhow, bail};
16use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
17use buffer_diff::{BufferDiff, BufferDiffEvent};
18use client::ProjectId;
19use collections::HashMap;
20pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
21use fs::{Fs, RemoveOptions};
22use futures::{
23 FutureExt, StreamExt,
24 channel::{
25 mpsc,
26 oneshot::{self, Canceled},
27 },
28 future::{self, BoxFuture, Shared},
29 stream::FuturesOrdered,
30};
31use git::{
32 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
33 blame::Blame,
34 parse_git_remote_url,
35 repository::{
36 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
37 DiffType, FetchOptions, GitCommitTemplate, GitRepository, GitRepositoryCheckpoint,
38 GraphCommitData, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote,
39 RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus,
40 Worktree as GitWorktree,
41 },
42 stash::{GitStash, StashEntry},
43 status::{
44 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
45 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
46 },
47};
48use gpui::{
49 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
50 WeakEntity,
51};
52use language::{
53 Buffer, BufferEvent, Language, LanguageRegistry,
54 proto::{deserialize_version, serialize_version},
55};
56use parking_lot::Mutex;
57use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
58use postage::stream::Stream as _;
59use rpc::{
60 AnyProtoClient, TypedEnvelope,
61 proto::{self, git_reset, split_repository_update},
62};
63use serde::Deserialize;
64use settings::{Settings, WorktreeId};
65use smol::future::yield_now;
66use std::{
67 cmp::Ordering,
68 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
69 future::Future,
70 mem,
71 ops::Range,
72 path::{Path, PathBuf},
73 str::FromStr,
74 sync::{
75 Arc,
76 atomic::{self, AtomicU64},
77 },
78 time::Instant,
79};
80use sum_tree::{Edit, SumTree, TreeMap};
81use task::Shell;
82use text::{Bias, BufferId};
83use util::{
84 ResultExt, debug_panic,
85 paths::{PathStyle, SanitizedPath},
86 post_inc,
87 rel_path::RelPath,
88};
89use worktree::{
90 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
91 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
92};
93use zeroize::Zeroize;
94
95pub struct GitStore {
96 state: GitStoreState,
97 buffer_store: Entity<BufferStore>,
98 worktree_store: Entity<WorktreeStore>,
99 repositories: HashMap<RepositoryId, Entity<Repository>>,
100 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
101 active_repo_id: Option<RepositoryId>,
102 #[allow(clippy::type_complexity)]
103 loading_diffs:
104 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
105 diffs: HashMap<BufferId, Entity<BufferGitState>>,
106 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
107 _subscriptions: Vec<Subscription>,
108}
109
110#[derive(Default)]
111struct SharedDiffs {
112 unstaged: Option<Entity<BufferDiff>>,
113 uncommitted: Option<Entity<BufferDiff>>,
114}
115
116struct BufferGitState {
117 unstaged_diff: Option<WeakEntity<BufferDiff>>,
118 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
119 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
120 conflict_set: Option<WeakEntity<ConflictSet>>,
121 recalculate_diff_task: Option<Task<Result<()>>>,
122 reparse_conflict_markers_task: Option<Task<Result<()>>>,
123 language: Option<Arc<Language>>,
124 language_registry: Option<Arc<LanguageRegistry>>,
125 conflict_updated_futures: Vec<oneshot::Sender<()>>,
126 recalculating_tx: postage::watch::Sender<bool>,
127
128 /// These operation counts are used to ensure that head and index text
129 /// values read from the git repository are up-to-date with any hunk staging
130 /// operations that have been performed on the BufferDiff.
131 ///
132 /// The operation count is incremented immediately when the user initiates a
133 /// hunk stage/unstage operation. Then, upon finishing writing the new index
134 /// text do disk, the `operation count as of write` is updated to reflect
135 /// the operation count that prompted the write.
136 hunk_staging_operation_count: usize,
137 hunk_staging_operation_count_as_of_write: usize,
138
139 head_text: Option<Arc<str>>,
140 index_text: Option<Arc<str>>,
141 oid_texts: HashMap<git::Oid, Arc<str>>,
142 head_changed: bool,
143 index_changed: bool,
144 language_changed: bool,
145}
146
147#[derive(Clone, Debug)]
148enum DiffBasesChange {
149 SetIndex(Option<String>),
150 SetHead(Option<String>),
151 SetEach {
152 index: Option<String>,
153 head: Option<String>,
154 },
155 SetBoth(Option<String>),
156}
157
158#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
159enum DiffKind {
160 Unstaged,
161 Uncommitted,
162 SinceOid(Option<git::Oid>),
163}
164
165enum GitStoreState {
166 Local {
167 next_repository_id: Arc<AtomicU64>,
168 downstream: Option<LocalDownstreamState>,
169 project_environment: Entity<ProjectEnvironment>,
170 fs: Arc<dyn Fs>,
171 },
172 Remote {
173 upstream_client: AnyProtoClient,
174 upstream_project_id: u64,
175 downstream: Option<(AnyProtoClient, ProjectId)>,
176 },
177}
178
179enum DownstreamUpdate {
180 UpdateRepository(RepositorySnapshot),
181 RemoveRepository(RepositoryId),
182}
183
184struct LocalDownstreamState {
185 client: AnyProtoClient,
186 project_id: ProjectId,
187 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
188 _task: Task<Result<()>>,
189}
190
191#[derive(Clone, Debug)]
192pub struct GitStoreCheckpoint {
193 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
194}
195
196#[derive(Clone, Debug, PartialEq, Eq)]
197pub struct StatusEntry {
198 pub repo_path: RepoPath,
199 pub status: FileStatus,
200 pub diff_stat: Option<DiffStat>,
201}
202
203impl StatusEntry {
204 fn to_proto(&self) -> proto::StatusEntry {
205 let simple_status = match self.status {
206 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
207 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
208 FileStatus::Tracked(TrackedStatus {
209 index_status,
210 worktree_status,
211 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
212 worktree_status
213 } else {
214 index_status
215 }),
216 };
217
218 proto::StatusEntry {
219 repo_path: self.repo_path.to_proto(),
220 simple_status,
221 status: Some(status_to_proto(self.status)),
222 diff_stat_added: self.diff_stat.map(|ds| ds.added),
223 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
224 }
225 }
226}
227
228impl TryFrom<proto::StatusEntry> for StatusEntry {
229 type Error = anyhow::Error;
230
231 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
232 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
233 let status = status_from_proto(value.simple_status, value.status)?;
234 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
235 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
236 _ => None,
237 };
238 Ok(Self {
239 repo_path,
240 status,
241 diff_stat,
242 })
243 }
244}
245
246impl sum_tree::Item for StatusEntry {
247 type Summary = PathSummary<GitSummary>;
248
249 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
250 PathSummary {
251 max_path: self.repo_path.as_ref().clone(),
252 item_summary: self.status.summary(),
253 }
254 }
255}
256
257impl sum_tree::KeyedItem for StatusEntry {
258 type Key = PathKey;
259
260 fn key(&self) -> Self::Key {
261 PathKey(self.repo_path.as_ref().clone())
262 }
263}
264
265#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
266pub struct RepositoryId(pub u64);
267
268#[derive(Clone, Debug, Default, PartialEq, Eq)]
269pub struct MergeDetails {
270 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
271 pub message: Option<SharedString>,
272}
273
274#[derive(Clone)]
275pub enum CommitDataState {
276 Loading,
277 Loaded(Arc<GraphCommitData>),
278}
279
280#[derive(Clone, Debug, PartialEq, Eq)]
281pub struct RepositorySnapshot {
282 pub id: RepositoryId,
283 pub statuses_by_path: SumTree<StatusEntry>,
284 pub work_directory_abs_path: Arc<Path>,
285 /// The working directory of the original repository. For a normal
286 /// checkout this equals `work_directory_abs_path`. For a git worktree
287 /// checkout, this is the original repo's working directory — used to
288 /// anchor new worktree creation so they don't nest.
289 pub original_repo_abs_path: Arc<Path>,
290 pub path_style: PathStyle,
291 pub branch: Option<Branch>,
292 pub branch_list: Arc<[Branch]>,
293 pub head_commit: Option<CommitDetails>,
294 pub scan_id: u64,
295 pub merge: MergeDetails,
296 pub remote_origin_url: Option<String>,
297 pub remote_upstream_url: Option<String>,
298 pub stash_entries: GitStash,
299 pub linked_worktrees: Arc<[GitWorktree]>,
300}
301
302type JobId = u64;
303
304#[derive(Clone, Debug, PartialEq, Eq)]
305pub struct JobInfo {
306 pub start: Instant,
307 pub message: SharedString,
308}
309
310struct GraphCommitDataHandler {
311 _task: Task<()>,
312 commit_data_request: smol::channel::Sender<Oid>,
313}
314
315enum GraphCommitHandlerState {
316 Starting,
317 Open(GraphCommitDataHandler),
318 Closed,
319}
320
321pub struct InitialGitGraphData {
322 fetch_task: Task<()>,
323 pub error: Option<SharedString>,
324 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
325 pub commit_oid_to_index: HashMap<Oid, usize>,
326}
327
328pub struct GraphDataResponse<'a> {
329 pub commits: &'a [Arc<InitialGraphCommitData>],
330 pub is_loading: bool,
331 pub error: Option<SharedString>,
332}
333
334pub struct Repository {
335 this: WeakEntity<Self>,
336 snapshot: RepositorySnapshot,
337 commit_message_buffer: Option<Entity<Buffer>>,
338 git_store: WeakEntity<GitStore>,
339 // For a local repository, holds paths that have had worktree events since the last status scan completed,
340 // and that should be examined during the next status scan.
341 paths_needing_status_update: Vec<Vec<RepoPath>>,
342 job_sender: mpsc::UnboundedSender<GitJob>,
343 active_jobs: HashMap<JobId, JobInfo>,
344 pending_ops: SumTree<PendingOps>,
345 job_id: JobId,
346 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
347 latest_askpass_id: u64,
348 repository_state: Shared<Task<Result<RepositoryState, String>>>,
349 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
350 graph_commit_data_handler: GraphCommitHandlerState,
351 commit_data: HashMap<Oid, CommitDataState>,
352}
353
354impl std::ops::Deref for Repository {
355 type Target = RepositorySnapshot;
356
357 fn deref(&self) -> &Self::Target {
358 &self.snapshot
359 }
360}
361
362#[derive(Clone)]
363pub struct LocalRepositoryState {
364 pub fs: Arc<dyn Fs>,
365 pub backend: Arc<dyn GitRepository>,
366 pub environment: Arc<HashMap<String, String>>,
367}
368
369impl LocalRepositoryState {
370 async fn new(
371 work_directory_abs_path: Arc<Path>,
372 dot_git_abs_path: Arc<Path>,
373 project_environment: WeakEntity<ProjectEnvironment>,
374 fs: Arc<dyn Fs>,
375 is_trusted: bool,
376 cx: &mut AsyncApp,
377 ) -> anyhow::Result<Self> {
378 let environment = project_environment
379 .update(cx, |project_environment, cx| {
380 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
381 })?
382 .await
383 .unwrap_or_else(|| {
384 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
385 HashMap::default()
386 });
387 let search_paths = environment.get("PATH").map(|val| val.to_owned());
388 let backend = cx
389 .background_spawn({
390 let fs = fs.clone();
391 async move {
392 let system_git_binary_path = search_paths
393 .and_then(|search_paths| {
394 which::which_in("git", Some(search_paths), &work_directory_abs_path)
395 .ok()
396 })
397 .or_else(|| which::which("git").ok());
398 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
399 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
400 }
401 })
402 .await?;
403 backend.set_trusted(is_trusted);
404 Ok(LocalRepositoryState {
405 backend,
406 environment: Arc::new(environment),
407 fs,
408 })
409 }
410}
411
412#[derive(Clone)]
413pub struct RemoteRepositoryState {
414 pub project_id: ProjectId,
415 pub client: AnyProtoClient,
416}
417
418#[derive(Clone)]
419pub enum RepositoryState {
420 Local(LocalRepositoryState),
421 Remote(RemoteRepositoryState),
422}
423
424#[derive(Clone, Debug, PartialEq, Eq)]
425pub enum GitGraphEvent {
426 CountUpdated(usize),
427 FullyLoaded,
428 LoadingError,
429}
430
431#[derive(Clone, Debug, PartialEq, Eq)]
432pub enum RepositoryEvent {
433 StatusesChanged,
434 HeadChanged,
435 BranchListChanged,
436 StashEntriesChanged,
437 GitWorktreeListChanged,
438 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
439 GraphEvent((LogSource, LogOrder), GitGraphEvent),
440}
441
442#[derive(Clone, Debug)]
443pub struct JobsUpdated;
444
445#[derive(Debug)]
446pub enum GitStoreEvent {
447 ActiveRepositoryChanged(Option<RepositoryId>),
448 /// Bool is true when the repository that's updated is the active repository
449 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
450 RepositoryAdded,
451 RepositoryRemoved(RepositoryId),
452 IndexWriteError(anyhow::Error),
453 JobsUpdated,
454 ConflictsUpdated,
455}
456
457impl EventEmitter<RepositoryEvent> for Repository {}
458impl EventEmitter<JobsUpdated> for Repository {}
459impl EventEmitter<GitStoreEvent> for GitStore {}
460
461pub struct GitJob {
462 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
463 key: Option<GitJobKey>,
464}
465
466#[derive(PartialEq, Eq)]
467enum GitJobKey {
468 WriteIndex(Vec<RepoPath>),
469 ReloadBufferDiffBases,
470 RefreshStatuses,
471 ReloadGitState,
472}
473
474impl GitStore {
475 pub fn local(
476 worktree_store: &Entity<WorktreeStore>,
477 buffer_store: Entity<BufferStore>,
478 environment: Entity<ProjectEnvironment>,
479 fs: Arc<dyn Fs>,
480 cx: &mut Context<Self>,
481 ) -> Self {
482 Self::new(
483 worktree_store.clone(),
484 buffer_store,
485 GitStoreState::Local {
486 next_repository_id: Arc::new(AtomicU64::new(1)),
487 downstream: None,
488 project_environment: environment,
489 fs,
490 },
491 cx,
492 )
493 }
494
495 pub fn remote(
496 worktree_store: &Entity<WorktreeStore>,
497 buffer_store: Entity<BufferStore>,
498 upstream_client: AnyProtoClient,
499 project_id: u64,
500 cx: &mut Context<Self>,
501 ) -> Self {
502 Self::new(
503 worktree_store.clone(),
504 buffer_store,
505 GitStoreState::Remote {
506 upstream_client,
507 upstream_project_id: project_id,
508 downstream: None,
509 },
510 cx,
511 )
512 }
513
514 fn new(
515 worktree_store: Entity<WorktreeStore>,
516 buffer_store: Entity<BufferStore>,
517 state: GitStoreState,
518 cx: &mut Context<Self>,
519 ) -> Self {
520 let mut _subscriptions = vec![
521 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
522 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
523 ];
524
525 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
526 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
527 }
528
529 GitStore {
530 state,
531 buffer_store,
532 worktree_store,
533 repositories: HashMap::default(),
534 worktree_ids: HashMap::default(),
535 active_repo_id: None,
536 _subscriptions,
537 loading_diffs: HashMap::default(),
538 shared_diffs: HashMap::default(),
539 diffs: HashMap::default(),
540 }
541 }
542
543 pub fn init(client: &AnyProtoClient) {
544 client.add_entity_request_handler(Self::handle_get_remotes);
545 client.add_entity_request_handler(Self::handle_get_branches);
546 client.add_entity_request_handler(Self::handle_get_default_branch);
547 client.add_entity_request_handler(Self::handle_change_branch);
548 client.add_entity_request_handler(Self::handle_create_branch);
549 client.add_entity_request_handler(Self::handle_rename_branch);
550 client.add_entity_request_handler(Self::handle_create_remote);
551 client.add_entity_request_handler(Self::handle_remove_remote);
552 client.add_entity_request_handler(Self::handle_delete_branch);
553 client.add_entity_request_handler(Self::handle_git_init);
554 client.add_entity_request_handler(Self::handle_push);
555 client.add_entity_request_handler(Self::handle_pull);
556 client.add_entity_request_handler(Self::handle_fetch);
557 client.add_entity_request_handler(Self::handle_stage);
558 client.add_entity_request_handler(Self::handle_unstage);
559 client.add_entity_request_handler(Self::handle_stash);
560 client.add_entity_request_handler(Self::handle_stash_pop);
561 client.add_entity_request_handler(Self::handle_stash_apply);
562 client.add_entity_request_handler(Self::handle_stash_drop);
563 client.add_entity_request_handler(Self::handle_commit);
564 client.add_entity_request_handler(Self::handle_run_hook);
565 client.add_entity_request_handler(Self::handle_reset);
566 client.add_entity_request_handler(Self::handle_show);
567 client.add_entity_request_handler(Self::handle_create_checkpoint);
568 client.add_entity_request_handler(Self::handle_create_archive_checkpoint);
569 client.add_entity_request_handler(Self::handle_restore_checkpoint);
570 client.add_entity_request_handler(Self::handle_restore_archive_checkpoint);
571 client.add_entity_request_handler(Self::handle_compare_checkpoints);
572 client.add_entity_request_handler(Self::handle_diff_checkpoints);
573 client.add_entity_request_handler(Self::handle_load_commit_diff);
574 client.add_entity_request_handler(Self::handle_file_history);
575 client.add_entity_request_handler(Self::handle_checkout_files);
576 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
577 client.add_entity_request_handler(Self::handle_set_index_text);
578 client.add_entity_request_handler(Self::handle_askpass);
579 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
580 client.add_entity_request_handler(Self::handle_git_diff);
581 client.add_entity_request_handler(Self::handle_tree_diff);
582 client.add_entity_request_handler(Self::handle_get_blob_content);
583 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
584 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
585 client.add_entity_message_handler(Self::handle_update_diff_bases);
586 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
587 client.add_entity_request_handler(Self::handle_blame_buffer);
588 client.add_entity_message_handler(Self::handle_update_repository);
589 client.add_entity_message_handler(Self::handle_remove_repository);
590 client.add_entity_request_handler(Self::handle_git_clone);
591 client.add_entity_request_handler(Self::handle_get_worktrees);
592 client.add_entity_request_handler(Self::handle_create_worktree);
593 client.add_entity_request_handler(Self::handle_remove_worktree);
594 client.add_entity_request_handler(Self::handle_rename_worktree);
595 client.add_entity_request_handler(Self::handle_get_head_sha);
596 client.add_entity_request_handler(Self::handle_edit_ref);
597 client.add_entity_request_handler(Self::handle_repair_worktrees);
598 }
599
600 pub fn is_local(&self) -> bool {
601 matches!(self.state, GitStoreState::Local { .. })
602 }
603
604 fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context<Self>) {
605 if self.active_repo_id != Some(repo_id) {
606 self.active_repo_id = Some(repo_id);
607 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
608 }
609 }
610
611 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
612 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
613 self.set_active_repo_id(repo.read(cx).id, cx);
614 }
615 }
616
617 pub fn set_active_repo_for_worktree(
618 &mut self,
619 worktree_id: WorktreeId,
620 cx: &mut Context<Self>,
621 ) {
622 let Some(worktree) = self
623 .worktree_store
624 .read(cx)
625 .worktree_for_id(worktree_id, cx)
626 else {
627 return;
628 };
629 let worktree_abs_path = worktree.read(cx).abs_path();
630 let Some(repo_id) = self
631 .repositories
632 .values()
633 .filter(|repo| {
634 let repo_path = &repo.read(cx).work_directory_abs_path;
635 *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
636 })
637 .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
638 .map(|repo| repo.read(cx).id)
639 else {
640 return;
641 };
642
643 self.set_active_repo_id(repo_id, cx);
644 }
645
646 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
647 match &mut self.state {
648 GitStoreState::Remote {
649 downstream: downstream_client,
650 ..
651 } => {
652 for repo in self.repositories.values() {
653 let update = repo.read(cx).snapshot.initial_update(project_id);
654 for update in split_repository_update(update) {
655 client.send(update).log_err();
656 }
657 }
658 *downstream_client = Some((client, ProjectId(project_id)));
659 }
660 GitStoreState::Local {
661 downstream: downstream_client,
662 ..
663 } => {
664 let mut snapshots = HashMap::default();
665 let (updates_tx, mut updates_rx) = mpsc::unbounded();
666 for repo in self.repositories.values() {
667 updates_tx
668 .unbounded_send(DownstreamUpdate::UpdateRepository(
669 repo.read(cx).snapshot.clone(),
670 ))
671 .ok();
672 }
673 *downstream_client = Some(LocalDownstreamState {
674 client: client.clone(),
675 project_id: ProjectId(project_id),
676 updates_tx,
677 _task: cx.spawn(async move |this, cx| {
678 cx.background_spawn(async move {
679 while let Some(update) = updates_rx.next().await {
680 match update {
681 DownstreamUpdate::UpdateRepository(snapshot) => {
682 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
683 {
684 let update =
685 snapshot.build_update(old_snapshot, project_id);
686 *old_snapshot = snapshot;
687 for update in split_repository_update(update) {
688 client.send(update)?;
689 }
690 } else {
691 let update = snapshot.initial_update(project_id);
692 for update in split_repository_update(update) {
693 client.send(update)?;
694 }
695 snapshots.insert(snapshot.id, snapshot);
696 }
697 }
698 DownstreamUpdate::RemoveRepository(id) => {
699 client.send(proto::RemoveRepository {
700 project_id,
701 id: id.to_proto(),
702 })?;
703 }
704 }
705 }
706 anyhow::Ok(())
707 })
708 .await
709 .ok();
710 this.update(cx, |this, _| {
711 if let GitStoreState::Local {
712 downstream: downstream_client,
713 ..
714 } = &mut this.state
715 {
716 downstream_client.take();
717 } else {
718 unreachable!("unshared called on remote store");
719 }
720 })
721 }),
722 });
723 }
724 }
725 }
726
727 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
728 match &mut self.state {
729 GitStoreState::Local {
730 downstream: downstream_client,
731 ..
732 } => {
733 downstream_client.take();
734 }
735 GitStoreState::Remote {
736 downstream: downstream_client,
737 ..
738 } => {
739 downstream_client.take();
740 }
741 }
742 self.shared_diffs.clear();
743 }
744
745 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
746 self.shared_diffs.remove(peer_id);
747 }
748
749 pub fn active_repository(&self) -> Option<Entity<Repository>> {
750 self.active_repo_id
751 .as_ref()
752 .map(|id| self.repositories[id].clone())
753 }
754
755 pub fn open_unstaged_diff(
756 &mut self,
757 buffer: Entity<Buffer>,
758 cx: &mut Context<Self>,
759 ) -> Task<Result<Entity<BufferDiff>>> {
760 let buffer_id = buffer.read(cx).remote_id();
761 if let Some(diff_state) = self.diffs.get(&buffer_id)
762 && let Some(unstaged_diff) = diff_state
763 .read(cx)
764 .unstaged_diff
765 .as_ref()
766 .and_then(|weak| weak.upgrade())
767 {
768 if let Some(task) =
769 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
770 {
771 return cx.background_executor().spawn(async move {
772 task.await;
773 Ok(unstaged_diff)
774 });
775 }
776 return Task::ready(Ok(unstaged_diff));
777 }
778
779 let Some((repo, repo_path)) =
780 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
781 else {
782 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
783 };
784
785 let task = self
786 .loading_diffs
787 .entry((buffer_id, DiffKind::Unstaged))
788 .or_insert_with(|| {
789 let staged_text = repo.update(cx, |repo, cx| {
790 repo.load_staged_text(buffer_id, repo_path, cx)
791 });
792 cx.spawn(async move |this, cx| {
793 Self::open_diff_internal(
794 this,
795 DiffKind::Unstaged,
796 staged_text.await.map(DiffBasesChange::SetIndex),
797 buffer,
798 cx,
799 )
800 .await
801 .map_err(Arc::new)
802 })
803 .shared()
804 })
805 .clone();
806
807 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
808 }
809
810 pub fn open_diff_since(
811 &mut self,
812 oid: Option<git::Oid>,
813 buffer: Entity<Buffer>,
814 repo: Entity<Repository>,
815 cx: &mut Context<Self>,
816 ) -> Task<Result<Entity<BufferDiff>>> {
817 let buffer_id = buffer.read(cx).remote_id();
818
819 if let Some(diff_state) = self.diffs.get(&buffer_id)
820 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
821 {
822 if let Some(task) =
823 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
824 {
825 return cx.background_executor().spawn(async move {
826 task.await;
827 Ok(oid_diff)
828 });
829 }
830 return Task::ready(Ok(oid_diff));
831 }
832
833 let diff_kind = DiffKind::SinceOid(oid);
834 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
835 let task = task.clone();
836 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
837 }
838
839 let task = cx
840 .spawn(async move |this, cx| {
841 let result: Result<Entity<BufferDiff>> = async {
842 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
843 let language_registry =
844 buffer.update(cx, |buffer, _| buffer.language_registry());
845 let content: Option<Arc<str>> = match oid {
846 None => None,
847 Some(oid) => Some(
848 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
849 .await?
850 .into(),
851 ),
852 };
853 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
854
855 buffer_diff
856 .update(cx, |buffer_diff, cx| {
857 buffer_diff.language_changed(
858 buffer_snapshot.language().cloned(),
859 language_registry,
860 cx,
861 );
862 buffer_diff.set_base_text(
863 content.clone(),
864 buffer_snapshot.language().cloned(),
865 buffer_snapshot.text,
866 cx,
867 )
868 })
869 .await?;
870 let unstaged_diff = this
871 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
872 .await?;
873 buffer_diff.update(cx, |buffer_diff, _| {
874 buffer_diff.set_secondary_diff(unstaged_diff);
875 });
876
877 this.update(cx, |this, cx| {
878 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
879 .detach();
880
881 this.loading_diffs.remove(&(buffer_id, diff_kind));
882
883 let git_store = cx.weak_entity();
884 let diff_state = this
885 .diffs
886 .entry(buffer_id)
887 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
888
889 diff_state.update(cx, |state, _| {
890 if let Some(oid) = oid {
891 if let Some(content) = content {
892 state.oid_texts.insert(oid, content);
893 }
894 }
895 state.oid_diffs.insert(oid, buffer_diff.downgrade());
896 });
897 })?;
898
899 Ok(buffer_diff)
900 }
901 .await;
902 result.map_err(Arc::new)
903 })
904 .shared();
905
906 self.loading_diffs
907 .insert((buffer_id, diff_kind), task.clone());
908 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
909 }
910
911 #[ztracing::instrument(skip_all)]
912 pub fn open_uncommitted_diff(
913 &mut self,
914 buffer: Entity<Buffer>,
915 cx: &mut Context<Self>,
916 ) -> Task<Result<Entity<BufferDiff>>> {
917 let buffer_id = buffer.read(cx).remote_id();
918
919 if let Some(diff_state) = self.diffs.get(&buffer_id)
920 && let Some(uncommitted_diff) = diff_state
921 .read(cx)
922 .uncommitted_diff
923 .as_ref()
924 .and_then(|weak| weak.upgrade())
925 {
926 if let Some(task) =
927 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
928 {
929 return cx.background_executor().spawn(async move {
930 task.await;
931 Ok(uncommitted_diff)
932 });
933 }
934 return Task::ready(Ok(uncommitted_diff));
935 }
936
937 let Some((repo, repo_path)) =
938 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
939 else {
940 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
941 };
942
943 let task = self
944 .loading_diffs
945 .entry((buffer_id, DiffKind::Uncommitted))
946 .or_insert_with(|| {
947 let changes = repo.update(cx, |repo, cx| {
948 repo.load_committed_text(buffer_id, repo_path, cx)
949 });
950
951 // todo(lw): hot foreground spawn
952 cx.spawn(async move |this, cx| {
953 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
954 .await
955 .map_err(Arc::new)
956 })
957 .shared()
958 })
959 .clone();
960
961 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
962 }
963
964 #[ztracing::instrument(skip_all)]
965 async fn open_diff_internal(
966 this: WeakEntity<Self>,
967 kind: DiffKind,
968 texts: Result<DiffBasesChange>,
969 buffer_entity: Entity<Buffer>,
970 cx: &mut AsyncApp,
971 ) -> Result<Entity<BufferDiff>> {
972 let diff_bases_change = match texts {
973 Err(e) => {
974 this.update(cx, |this, cx| {
975 let buffer = buffer_entity.read(cx);
976 let buffer_id = buffer.remote_id();
977 this.loading_diffs.remove(&(buffer_id, kind));
978 })?;
979 return Err(e);
980 }
981 Ok(change) => change,
982 };
983
984 this.update(cx, |this, cx| {
985 let buffer = buffer_entity.read(cx);
986 let buffer_id = buffer.remote_id();
987 let language = buffer.language().cloned();
988 let language_registry = buffer.language_registry();
989 let text_snapshot = buffer.text_snapshot();
990 this.loading_diffs.remove(&(buffer_id, kind));
991
992 let git_store = cx.weak_entity();
993 let diff_state = this
994 .diffs
995 .entry(buffer_id)
996 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
997
998 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
999
1000 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
1001 diff_state.update(cx, |diff_state, cx| {
1002 diff_state.language_changed = true;
1003 diff_state.language = language;
1004 diff_state.language_registry = language_registry;
1005
1006 match kind {
1007 DiffKind::Unstaged => {
1008 diff_state.unstaged_diff.get_or_insert(diff.downgrade());
1009 }
1010 DiffKind::Uncommitted => {
1011 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1012 diff
1013 } else {
1014 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1015 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1016 unstaged_diff
1017 };
1018
1019 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1020 diff_state.uncommitted_diff = Some(diff.downgrade())
1021 }
1022 DiffKind::SinceOid(_) => {
1023 unreachable!("open_diff_internal is not used for OID diffs")
1024 }
1025 }
1026
1027 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
1028 let rx = diff_state.wait_for_recalculation();
1029
1030 anyhow::Ok(async move {
1031 if let Some(rx) = rx {
1032 rx.await;
1033 }
1034 Ok(diff)
1035 })
1036 })
1037 })??
1038 .await
1039 }
1040
1041 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1042 let diff_state = self.diffs.get(&buffer_id)?;
1043 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1044 }
1045
1046 pub fn get_uncommitted_diff(
1047 &self,
1048 buffer_id: BufferId,
1049 cx: &App,
1050 ) -> Option<Entity<BufferDiff>> {
1051 let diff_state = self.diffs.get(&buffer_id)?;
1052 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1053 }
1054
1055 pub fn get_diff_since_oid(
1056 &self,
1057 buffer_id: BufferId,
1058 oid: Option<git::Oid>,
1059 cx: &App,
1060 ) -> Option<Entity<BufferDiff>> {
1061 let diff_state = self.diffs.get(&buffer_id)?;
1062 diff_state.read(cx).oid_diff(oid)
1063 }
1064
1065 pub fn open_conflict_set(
1066 &mut self,
1067 buffer: Entity<Buffer>,
1068 cx: &mut Context<Self>,
1069 ) -> Entity<ConflictSet> {
1070 log::debug!("open conflict set");
1071 let buffer_id = buffer.read(cx).remote_id();
1072
1073 if let Some(git_state) = self.diffs.get(&buffer_id)
1074 && let Some(conflict_set) = git_state
1075 .read(cx)
1076 .conflict_set
1077 .as_ref()
1078 .and_then(|weak| weak.upgrade())
1079 {
1080 let conflict_set = conflict_set;
1081 let buffer_snapshot = buffer.read(cx).text_snapshot();
1082
1083 git_state.update(cx, |state, cx| {
1084 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1085 });
1086
1087 return conflict_set;
1088 }
1089
1090 let is_unmerged = self
1091 .repository_and_path_for_buffer_id(buffer_id, cx)
1092 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1093 let git_store = cx.weak_entity();
1094 let buffer_git_state = self
1095 .diffs
1096 .entry(buffer_id)
1097 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1098 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1099
1100 self._subscriptions
1101 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1102 cx.emit(GitStoreEvent::ConflictsUpdated);
1103 }));
1104
1105 buffer_git_state.update(cx, |state, cx| {
1106 state.conflict_set = Some(conflict_set.downgrade());
1107 let buffer_snapshot = buffer.read(cx).text_snapshot();
1108 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1109 });
1110
1111 conflict_set
1112 }
1113
1114 pub fn project_path_git_status(
1115 &self,
1116 project_path: &ProjectPath,
1117 cx: &App,
1118 ) -> Option<FileStatus> {
1119 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1120 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1121 }
1122
1123 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1124 let mut work_directory_abs_paths = Vec::new();
1125 let mut checkpoints = Vec::new();
1126 for repository in self.repositories.values() {
1127 repository.update(cx, |repository, _| {
1128 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1129 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1130 });
1131 }
1132
1133 cx.background_executor().spawn(async move {
1134 let checkpoints = future::try_join_all(checkpoints).await?;
1135 Ok(GitStoreCheckpoint {
1136 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1137 .into_iter()
1138 .zip(checkpoints)
1139 .collect(),
1140 })
1141 })
1142 }
1143
1144 pub fn restore_checkpoint(
1145 &self,
1146 checkpoint: GitStoreCheckpoint,
1147 cx: &mut App,
1148 ) -> Task<Result<()>> {
1149 let repositories_by_work_dir_abs_path = self
1150 .repositories
1151 .values()
1152 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1153 .collect::<HashMap<_, _>>();
1154
1155 let mut tasks = Vec::new();
1156 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1157 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1158 let restore = repository.update(cx, |repository, _| {
1159 repository.restore_checkpoint(checkpoint)
1160 });
1161 tasks.push(async move { restore.await? });
1162 }
1163 }
1164 cx.background_spawn(async move {
1165 future::try_join_all(tasks).await?;
1166 Ok(())
1167 })
1168 }
1169
1170 /// Compares two checkpoints, returning true if they are equal.
1171 pub fn compare_checkpoints(
1172 &self,
1173 left: GitStoreCheckpoint,
1174 mut right: GitStoreCheckpoint,
1175 cx: &mut App,
1176 ) -> Task<Result<bool>> {
1177 let repositories_by_work_dir_abs_path = self
1178 .repositories
1179 .values()
1180 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1181 .collect::<HashMap<_, _>>();
1182
1183 let mut tasks = Vec::new();
1184 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1185 if let Some(right_checkpoint) = right
1186 .checkpoints_by_work_dir_abs_path
1187 .remove(&work_dir_abs_path)
1188 {
1189 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1190 {
1191 let compare = repository.update(cx, |repository, _| {
1192 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1193 });
1194
1195 tasks.push(async move { compare.await? });
1196 }
1197 } else {
1198 return Task::ready(Ok(false));
1199 }
1200 }
1201 cx.background_spawn(async move {
1202 Ok(future::try_join_all(tasks)
1203 .await?
1204 .into_iter()
1205 .all(|result| result))
1206 })
1207 }
1208
1209 /// Blames a buffer.
1210 pub fn blame_buffer(
1211 &self,
1212 buffer: &Entity<Buffer>,
1213 version: Option<clock::Global>,
1214 cx: &mut Context<Self>,
1215 ) -> Task<Result<Option<Blame>>> {
1216 let buffer = buffer.read(cx);
1217 let Some((repo, repo_path)) =
1218 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1219 else {
1220 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1221 };
1222 let content = match &version {
1223 Some(version) => buffer.rope_for_version(version),
1224 None => buffer.as_rope().clone(),
1225 };
1226 let line_ending = buffer.line_ending();
1227 let version = version.unwrap_or(buffer.version());
1228 let buffer_id = buffer.remote_id();
1229
1230 let repo = repo.downgrade();
1231 cx.spawn(async move |_, cx| {
1232 let repository_state = repo
1233 .update(cx, |repo, _| repo.repository_state.clone())?
1234 .await
1235 .map_err(|err| anyhow::anyhow!(err))?;
1236 match repository_state {
1237 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1238 .blame(repo_path.clone(), content, line_ending)
1239 .await
1240 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1241 .map(Some),
1242 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1243 let response = client
1244 .request(proto::BlameBuffer {
1245 project_id: project_id.to_proto(),
1246 buffer_id: buffer_id.into(),
1247 version: serialize_version(&version),
1248 })
1249 .await?;
1250 Ok(deserialize_blame_buffer_response(response))
1251 }
1252 }
1253 })
1254 }
1255
1256 pub fn file_history(
1257 &self,
1258 repo: &Entity<Repository>,
1259 path: RepoPath,
1260 cx: &mut App,
1261 ) -> Task<Result<git::repository::FileHistory>> {
1262 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1263
1264 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1265 }
1266
1267 pub fn file_history_paginated(
1268 &self,
1269 repo: &Entity<Repository>,
1270 path: RepoPath,
1271 skip: usize,
1272 limit: Option<usize>,
1273 cx: &mut App,
1274 ) -> Task<Result<git::repository::FileHistory>> {
1275 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1276
1277 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1278 }
1279
1280 pub fn get_permalink_to_line(
1281 &self,
1282 buffer: &Entity<Buffer>,
1283 selection: Range<u32>,
1284 cx: &mut App,
1285 ) -> Task<Result<url::Url>> {
1286 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1287 return Task::ready(Err(anyhow!("buffer has no file")));
1288 };
1289
1290 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1291 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1292 cx,
1293 ) else {
1294 // If we're not in a Git repo, check whether this is a Rust source
1295 // file in the Cargo registry (presumably opened with go-to-definition
1296 // from a normal Rust file). If so, we can put together a permalink
1297 // using crate metadata.
1298 if buffer
1299 .read(cx)
1300 .language()
1301 .is_none_or(|lang| lang.name() != "Rust")
1302 {
1303 return Task::ready(Err(anyhow!("no permalink available")));
1304 }
1305 let file_path = file.worktree.read(cx).absolutize(&file.path);
1306 return cx.spawn(async move |cx| {
1307 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1308 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1309 .context("no permalink available")
1310 });
1311 };
1312
1313 let buffer_id = buffer.read(cx).remote_id();
1314 let branch = repo.read(cx).branch.clone();
1315 let remote = branch
1316 .as_ref()
1317 .and_then(|b| b.upstream.as_ref())
1318 .and_then(|b| b.remote_name())
1319 .unwrap_or("origin")
1320 .to_string();
1321
1322 let rx = repo.update(cx, |repo, _| {
1323 repo.send_job(None, move |state, cx| async move {
1324 match state {
1325 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1326 let origin_url = backend
1327 .remote_url(&remote)
1328 .await
1329 .with_context(|| format!("remote \"{remote}\" not found"))?;
1330
1331 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1332
1333 let provider_registry =
1334 cx.update(GitHostingProviderRegistry::default_global);
1335
1336 let (provider, remote) =
1337 parse_git_remote_url(provider_registry, &origin_url)
1338 .context("parsing Git remote URL")?;
1339
1340 Ok(provider.build_permalink(
1341 remote,
1342 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1343 ))
1344 }
1345 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1346 let response = client
1347 .request(proto::GetPermalinkToLine {
1348 project_id: project_id.to_proto(),
1349 buffer_id: buffer_id.into(),
1350 selection: Some(proto::Range {
1351 start: selection.start as u64,
1352 end: selection.end as u64,
1353 }),
1354 })
1355 .await?;
1356
1357 url::Url::parse(&response.permalink).context("failed to parse permalink")
1358 }
1359 }
1360 })
1361 });
1362 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1363 }
1364
1365 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1366 match &self.state {
1367 GitStoreState::Local {
1368 downstream: downstream_client,
1369 ..
1370 } => downstream_client
1371 .as_ref()
1372 .map(|state| (state.client.clone(), state.project_id)),
1373 GitStoreState::Remote {
1374 downstream: downstream_client,
1375 ..
1376 } => downstream_client.clone(),
1377 }
1378 }
1379
1380 fn upstream_client(&self) -> Option<AnyProtoClient> {
1381 match &self.state {
1382 GitStoreState::Local { .. } => None,
1383 GitStoreState::Remote {
1384 upstream_client, ..
1385 } => Some(upstream_client.clone()),
1386 }
1387 }
1388
1389 fn on_worktree_store_event(
1390 &mut self,
1391 worktree_store: Entity<WorktreeStore>,
1392 event: &WorktreeStoreEvent,
1393 cx: &mut Context<Self>,
1394 ) {
1395 let GitStoreState::Local {
1396 project_environment,
1397 downstream,
1398 next_repository_id,
1399 fs,
1400 } = &self.state
1401 else {
1402 return;
1403 };
1404
1405 match event {
1406 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1407 if let Some(worktree) = self
1408 .worktree_store
1409 .read(cx)
1410 .worktree_for_id(*worktree_id, cx)
1411 {
1412 let paths_by_git_repo =
1413 self.process_updated_entries(&worktree, updated_entries, cx);
1414 let downstream = downstream
1415 .as_ref()
1416 .map(|downstream| downstream.updates_tx.clone());
1417 cx.spawn(async move |_, cx| {
1418 let paths_by_git_repo = paths_by_git_repo.await;
1419 for (repo, paths) in paths_by_git_repo {
1420 repo.update(cx, |repo, cx| {
1421 repo.paths_changed(paths, downstream.clone(), cx);
1422 });
1423 }
1424 })
1425 .detach();
1426 }
1427 }
1428 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1429 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1430 else {
1431 return;
1432 };
1433 if !worktree.read(cx).is_visible() {
1434 log::debug!(
1435 "not adding repositories for local worktree {:?} because it's not visible",
1436 worktree.read(cx).abs_path()
1437 );
1438 return;
1439 }
1440 self.update_repositories_from_worktree(
1441 *worktree_id,
1442 project_environment.clone(),
1443 next_repository_id.clone(),
1444 downstream
1445 .as_ref()
1446 .map(|downstream| downstream.updates_tx.clone()),
1447 changed_repos.clone(),
1448 fs.clone(),
1449 cx,
1450 );
1451 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1452 }
1453 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1454 let repos_without_worktree: Vec<RepositoryId> = self
1455 .worktree_ids
1456 .iter_mut()
1457 .filter_map(|(repo_id, worktree_ids)| {
1458 worktree_ids.remove(worktree_id);
1459 if worktree_ids.is_empty() {
1460 Some(*repo_id)
1461 } else {
1462 None
1463 }
1464 })
1465 .collect();
1466 let is_active_repo_removed = repos_without_worktree
1467 .iter()
1468 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1469
1470 for repo_id in repos_without_worktree {
1471 self.repositories.remove(&repo_id);
1472 self.worktree_ids.remove(&repo_id);
1473 if let Some(updates_tx) =
1474 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1475 {
1476 updates_tx
1477 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1478 .ok();
1479 }
1480 }
1481
1482 if is_active_repo_removed {
1483 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1484 self.active_repo_id = Some(repo_id);
1485 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1486 } else {
1487 self.active_repo_id = None;
1488 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1489 }
1490 }
1491 }
1492 _ => {}
1493 }
1494 }
1495 fn on_repository_event(
1496 &mut self,
1497 repo: Entity<Repository>,
1498 event: &RepositoryEvent,
1499 cx: &mut Context<Self>,
1500 ) {
1501 let id = repo.read(cx).id;
1502 let repo_snapshot = repo.read(cx).snapshot.clone();
1503 for (buffer_id, diff) in self.diffs.iter() {
1504 if let Some((buffer_repo, repo_path)) =
1505 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1506 && buffer_repo == repo
1507 {
1508 diff.update(cx, |diff, cx| {
1509 if let Some(conflict_set) = &diff.conflict_set {
1510 let conflict_status_changed =
1511 conflict_set.update(cx, |conflict_set, cx| {
1512 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1513 conflict_set.set_has_conflict(has_conflict, cx)
1514 })?;
1515 if conflict_status_changed {
1516 let buffer_store = self.buffer_store.read(cx);
1517 if let Some(buffer) = buffer_store.get(*buffer_id) {
1518 let _ = diff
1519 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1520 }
1521 }
1522 }
1523 anyhow::Ok(())
1524 })
1525 .ok();
1526 }
1527 }
1528 cx.emit(GitStoreEvent::RepositoryUpdated(
1529 id,
1530 event.clone(),
1531 self.active_repo_id == Some(id),
1532 ))
1533 }
1534
1535 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1536 cx.emit(GitStoreEvent::JobsUpdated)
1537 }
1538
1539 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1540 fn update_repositories_from_worktree(
1541 &mut self,
1542 worktree_id: WorktreeId,
1543 project_environment: Entity<ProjectEnvironment>,
1544 next_repository_id: Arc<AtomicU64>,
1545 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1546 updated_git_repositories: UpdatedGitRepositoriesSet,
1547 fs: Arc<dyn Fs>,
1548 cx: &mut Context<Self>,
1549 ) {
1550 let mut removed_ids = Vec::new();
1551 for update in updated_git_repositories.iter() {
1552 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1553 let existing_work_directory_abs_path =
1554 repo.read(cx).work_directory_abs_path.clone();
1555 Some(&existing_work_directory_abs_path)
1556 == update.old_work_directory_abs_path.as_ref()
1557 || Some(&existing_work_directory_abs_path)
1558 == update.new_work_directory_abs_path.as_ref()
1559 }) {
1560 let repo_id = *id;
1561 if let Some(new_work_directory_abs_path) =
1562 update.new_work_directory_abs_path.clone()
1563 {
1564 self.worktree_ids
1565 .entry(repo_id)
1566 .or_insert_with(HashSet::new)
1567 .insert(worktree_id);
1568 existing.update(cx, |existing, cx| {
1569 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1570 existing.schedule_scan(updates_tx.clone(), cx);
1571 });
1572 } else {
1573 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1574 worktree_ids.remove(&worktree_id);
1575 if worktree_ids.is_empty() {
1576 removed_ids.push(repo_id);
1577 }
1578 }
1579 }
1580 } else if let UpdatedGitRepository {
1581 new_work_directory_abs_path: Some(work_directory_abs_path),
1582 dot_git_abs_path: Some(dot_git_abs_path),
1583 repository_dir_abs_path: Some(repository_dir_abs_path),
1584 common_dir_abs_path: Some(common_dir_abs_path),
1585 ..
1586 } = update
1587 {
1588 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1589 work_directory_abs_path,
1590 common_dir_abs_path,
1591 repository_dir_abs_path,
1592 )
1593 .into();
1594 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1595 let is_trusted = TrustedWorktrees::try_get_global(cx)
1596 .map(|trusted_worktrees| {
1597 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1598 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1599 })
1600 })
1601 .unwrap_or(false);
1602 let git_store = cx.weak_entity();
1603 let repo = cx.new(|cx| {
1604 let mut repo = Repository::local(
1605 id,
1606 work_directory_abs_path.clone(),
1607 original_repo_abs_path.clone(),
1608 dot_git_abs_path.clone(),
1609 project_environment.downgrade(),
1610 fs.clone(),
1611 is_trusted,
1612 git_store,
1613 cx,
1614 );
1615 if let Some(updates_tx) = updates_tx.as_ref() {
1616 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1617 updates_tx
1618 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1619 .ok();
1620 }
1621 repo.schedule_scan(updates_tx.clone(), cx);
1622 repo
1623 });
1624 self._subscriptions
1625 .push(cx.subscribe(&repo, Self::on_repository_event));
1626 self._subscriptions
1627 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1628 self.repositories.insert(id, repo);
1629 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1630 cx.emit(GitStoreEvent::RepositoryAdded);
1631 self.active_repo_id.get_or_insert_with(|| {
1632 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1633 id
1634 });
1635 }
1636 }
1637
1638 for id in removed_ids {
1639 if self.active_repo_id == Some(id) {
1640 self.active_repo_id = None;
1641 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1642 }
1643 self.repositories.remove(&id);
1644 if let Some(updates_tx) = updates_tx.as_ref() {
1645 updates_tx
1646 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1647 .ok();
1648 }
1649 }
1650 }
1651
1652 fn on_trusted_worktrees_event(
1653 &mut self,
1654 _: Entity<TrustedWorktreesStore>,
1655 event: &TrustedWorktreesEvent,
1656 cx: &mut Context<Self>,
1657 ) {
1658 if !matches!(self.state, GitStoreState::Local { .. }) {
1659 return;
1660 }
1661
1662 let (is_trusted, event_paths) = match event {
1663 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1664 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1665 };
1666
1667 for (repo_id, worktree_ids) in &self.worktree_ids {
1668 if worktree_ids
1669 .iter()
1670 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1671 {
1672 if let Some(repo) = self.repositories.get(repo_id) {
1673 let repository_state = repo.read(cx).repository_state.clone();
1674 cx.background_spawn(async move {
1675 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1676 state.backend.set_trusted(is_trusted);
1677 }
1678 })
1679 .detach();
1680 }
1681 }
1682 }
1683 }
1684
1685 fn on_buffer_store_event(
1686 &mut self,
1687 _: Entity<BufferStore>,
1688 event: &BufferStoreEvent,
1689 cx: &mut Context<Self>,
1690 ) {
1691 match event {
1692 BufferStoreEvent::BufferAdded(buffer) => {
1693 cx.subscribe(buffer, |this, buffer, event, cx| {
1694 if let BufferEvent::LanguageChanged(_) = event {
1695 let buffer_id = buffer.read(cx).remote_id();
1696 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1697 diff_state.update(cx, |diff_state, cx| {
1698 diff_state.buffer_language_changed(buffer, cx);
1699 });
1700 }
1701 }
1702 })
1703 .detach();
1704 }
1705 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1706 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1707 diffs.remove(buffer_id);
1708 }
1709 }
1710 BufferStoreEvent::BufferDropped(buffer_id) => {
1711 self.diffs.remove(buffer_id);
1712 for diffs in self.shared_diffs.values_mut() {
1713 diffs.remove(buffer_id);
1714 }
1715 }
1716 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1717 // Whenever a buffer's file path changes, it's possible that the
1718 // new path is actually a path that is being tracked by a git
1719 // repository. In that case, we'll want to update the buffer's
1720 // `BufferDiffState`, in case it already has one.
1721 let buffer_id = buffer.read(cx).remote_id();
1722 let diff_state = self.diffs.get(&buffer_id);
1723 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1724
1725 if let Some(diff_state) = diff_state
1726 && let Some((repo, repo_path)) = repo
1727 {
1728 let buffer = buffer.clone();
1729 let diff_state = diff_state.clone();
1730
1731 cx.spawn(async move |_git_store, cx| {
1732 async {
1733 let diff_bases_change = repo
1734 .update(cx, |repo, cx| {
1735 repo.load_committed_text(buffer_id, repo_path, cx)
1736 })
1737 .await?;
1738
1739 diff_state.update(cx, |diff_state, cx| {
1740 let buffer_snapshot = buffer.read(cx).text_snapshot();
1741 diff_state.diff_bases_changed(
1742 buffer_snapshot,
1743 Some(diff_bases_change),
1744 cx,
1745 );
1746 });
1747 anyhow::Ok(())
1748 }
1749 .await
1750 .log_err();
1751 })
1752 .detach();
1753 }
1754 }
1755 }
1756 }
1757
1758 pub fn recalculate_buffer_diffs(
1759 &mut self,
1760 buffers: Vec<Entity<Buffer>>,
1761 cx: &mut Context<Self>,
1762 ) -> impl Future<Output = ()> + use<> {
1763 let mut futures = Vec::new();
1764 for buffer in buffers {
1765 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1766 let buffer = buffer.read(cx).text_snapshot();
1767 diff_state.update(cx, |diff_state, cx| {
1768 diff_state.recalculate_diffs(buffer.clone(), cx);
1769 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1770 });
1771 futures.push(diff_state.update(cx, |diff_state, cx| {
1772 diff_state
1773 .reparse_conflict_markers(buffer, cx)
1774 .map(|_| {})
1775 .boxed()
1776 }));
1777 }
1778 }
1779 async move {
1780 futures::future::join_all(futures).await;
1781 }
1782 }
1783
1784 fn on_buffer_diff_event(
1785 &mut self,
1786 diff: Entity<buffer_diff::BufferDiff>,
1787 event: &BufferDiffEvent,
1788 cx: &mut Context<Self>,
1789 ) {
1790 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1791 let buffer_id = diff.read(cx).buffer_id;
1792 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1793 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1794 diff_state.hunk_staging_operation_count += 1;
1795 diff_state.hunk_staging_operation_count
1796 });
1797 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1798 let recv = repo.update(cx, |repo, cx| {
1799 log::debug!("hunks changed for {}", path.as_unix_str());
1800 repo.spawn_set_index_text_job(
1801 path,
1802 new_index_text.as_ref().map(|rope| rope.to_string()),
1803 Some(hunk_staging_operation_count),
1804 cx,
1805 )
1806 });
1807 let diff = diff.downgrade();
1808 cx.spawn(async move |this, cx| {
1809 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1810 diff.update(cx, |diff, cx| {
1811 diff.clear_pending_hunks(cx);
1812 })
1813 .ok();
1814 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1815 .ok();
1816 }
1817 })
1818 .detach();
1819 }
1820 }
1821 }
1822 }
1823
1824 fn local_worktree_git_repos_changed(
1825 &mut self,
1826 worktree: Entity<Worktree>,
1827 changed_repos: &UpdatedGitRepositoriesSet,
1828 cx: &mut Context<Self>,
1829 ) {
1830 log::debug!("local worktree repos changed");
1831 debug_assert!(worktree.read(cx).is_local());
1832
1833 for repository in self.repositories.values() {
1834 repository.update(cx, |repository, cx| {
1835 let repo_abs_path = &repository.work_directory_abs_path;
1836 if changed_repos.iter().any(|update| {
1837 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1838 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1839 }) {
1840 repository.reload_buffer_diff_bases(cx);
1841 }
1842 });
1843 }
1844 }
1845
1846 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1847 &self.repositories
1848 }
1849
1850 /// Returns the original (main) repository working directory for the given worktree.
1851 /// For normal checkouts this equals the worktree's own path; for linked
1852 /// worktrees it points back to the original repo.
1853 pub fn original_repo_path_for_worktree(
1854 &self,
1855 worktree_id: WorktreeId,
1856 cx: &App,
1857 ) -> Option<Arc<Path>> {
1858 self.active_repo_id
1859 .iter()
1860 .chain(self.worktree_ids.keys())
1861 .find(|repo_id| {
1862 self.worktree_ids
1863 .get(repo_id)
1864 .is_some_and(|ids| ids.contains(&worktree_id))
1865 })
1866 .and_then(|repo_id| self.repositories.get(repo_id))
1867 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1868 }
1869
1870 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1871 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1872 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1873 Some(status.status)
1874 }
1875
1876 pub fn repository_and_path_for_buffer_id(
1877 &self,
1878 buffer_id: BufferId,
1879 cx: &App,
1880 ) -> Option<(Entity<Repository>, RepoPath)> {
1881 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1882 let project_path = buffer.read(cx).project_path(cx)?;
1883 self.repository_and_path_for_project_path(&project_path, cx)
1884 }
1885
1886 pub fn repository_and_path_for_project_path(
1887 &self,
1888 path: &ProjectPath,
1889 cx: &App,
1890 ) -> Option<(Entity<Repository>, RepoPath)> {
1891 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1892 self.repositories
1893 .values()
1894 .filter_map(|repo| {
1895 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1896 Some((repo.clone(), repo_path))
1897 })
1898 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1899 }
1900
1901 pub fn git_init(
1902 &self,
1903 path: Arc<Path>,
1904 fallback_branch_name: String,
1905 cx: &App,
1906 ) -> Task<Result<()>> {
1907 match &self.state {
1908 GitStoreState::Local { fs, .. } => {
1909 let fs = fs.clone();
1910 cx.background_executor()
1911 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1912 }
1913 GitStoreState::Remote {
1914 upstream_client,
1915 upstream_project_id: project_id,
1916 ..
1917 } => {
1918 let client = upstream_client.clone();
1919 let project_id = *project_id;
1920 cx.background_executor().spawn(async move {
1921 client
1922 .request(proto::GitInit {
1923 project_id: project_id,
1924 abs_path: path.to_string_lossy().into_owned(),
1925 fallback_branch_name,
1926 })
1927 .await?;
1928 Ok(())
1929 })
1930 }
1931 }
1932 }
1933
1934 pub fn git_clone(
1935 &self,
1936 repo: String,
1937 path: impl Into<Arc<std::path::Path>>,
1938 cx: &App,
1939 ) -> Task<Result<()>> {
1940 let path = path.into();
1941 match &self.state {
1942 GitStoreState::Local { fs, .. } => {
1943 let fs = fs.clone();
1944 cx.background_executor()
1945 .spawn(async move { fs.git_clone(&repo, &path).await })
1946 }
1947 GitStoreState::Remote {
1948 upstream_client,
1949 upstream_project_id,
1950 ..
1951 } => {
1952 if upstream_client.is_via_collab() {
1953 return Task::ready(Err(anyhow!(
1954 "Git Clone isn't supported for project guests"
1955 )));
1956 }
1957 let request = upstream_client.request(proto::GitClone {
1958 project_id: *upstream_project_id,
1959 abs_path: path.to_string_lossy().into_owned(),
1960 remote_repo: repo,
1961 });
1962
1963 cx.background_spawn(async move {
1964 let result = request.await?;
1965
1966 match result.success {
1967 true => Ok(()),
1968 false => Err(anyhow!("Git Clone failed")),
1969 }
1970 })
1971 }
1972 }
1973 }
1974
1975 async fn handle_update_repository(
1976 this: Entity<Self>,
1977 envelope: TypedEnvelope<proto::UpdateRepository>,
1978 mut cx: AsyncApp,
1979 ) -> Result<()> {
1980 this.update(&mut cx, |this, cx| {
1981 let path_style = this.worktree_store.read(cx).path_style();
1982 let mut update = envelope.payload;
1983
1984 let id = RepositoryId::from_proto(update.id);
1985 let client = this.upstream_client().context("no upstream client")?;
1986
1987 let original_repo_abs_path: Option<Arc<Path>> = update
1988 .original_repo_abs_path
1989 .as_deref()
1990 .map(|p| Path::new(p).into());
1991
1992 let mut repo_subscription = None;
1993 let repo = this.repositories.entry(id).or_insert_with(|| {
1994 let git_store = cx.weak_entity();
1995 let repo = cx.new(|cx| {
1996 Repository::remote(
1997 id,
1998 Path::new(&update.abs_path).into(),
1999 original_repo_abs_path.clone(),
2000 path_style,
2001 ProjectId(update.project_id),
2002 client,
2003 git_store,
2004 cx,
2005 )
2006 });
2007 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
2008 cx.emit(GitStoreEvent::RepositoryAdded);
2009 repo
2010 });
2011 this._subscriptions.extend(repo_subscription);
2012
2013 repo.update(cx, {
2014 let update = update.clone();
2015 |repo, cx| repo.apply_remote_update(update, cx)
2016 })?;
2017
2018 this.active_repo_id.get_or_insert_with(|| {
2019 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2020 id
2021 });
2022
2023 if let Some((client, project_id)) = this.downstream_client() {
2024 update.project_id = project_id.to_proto();
2025 client.send(update).log_err();
2026 }
2027 Ok(())
2028 })
2029 }
2030
2031 async fn handle_remove_repository(
2032 this: Entity<Self>,
2033 envelope: TypedEnvelope<proto::RemoveRepository>,
2034 mut cx: AsyncApp,
2035 ) -> Result<()> {
2036 this.update(&mut cx, |this, cx| {
2037 let mut update = envelope.payload;
2038 let id = RepositoryId::from_proto(update.id);
2039 this.repositories.remove(&id);
2040 if let Some((client, project_id)) = this.downstream_client() {
2041 update.project_id = project_id.to_proto();
2042 client.send(update).log_err();
2043 }
2044 if this.active_repo_id == Some(id) {
2045 this.active_repo_id = None;
2046 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2047 }
2048 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2049 });
2050 Ok(())
2051 }
2052
2053 async fn handle_git_init(
2054 this: Entity<Self>,
2055 envelope: TypedEnvelope<proto::GitInit>,
2056 cx: AsyncApp,
2057 ) -> Result<proto::Ack> {
2058 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2059 let name = envelope.payload.fallback_branch_name;
2060 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2061 .await?;
2062
2063 Ok(proto::Ack {})
2064 }
2065
2066 async fn handle_git_clone(
2067 this: Entity<Self>,
2068 envelope: TypedEnvelope<proto::GitClone>,
2069 cx: AsyncApp,
2070 ) -> Result<proto::GitCloneResponse> {
2071 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2072 let repo_name = envelope.payload.remote_repo;
2073 let result = cx
2074 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2075 .await;
2076
2077 Ok(proto::GitCloneResponse {
2078 success: result.is_ok(),
2079 })
2080 }
2081
2082 async fn handle_fetch(
2083 this: Entity<Self>,
2084 envelope: TypedEnvelope<proto::Fetch>,
2085 mut cx: AsyncApp,
2086 ) -> Result<proto::RemoteMessageResponse> {
2087 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2088 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2089 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2090 let askpass_id = envelope.payload.askpass_id;
2091
2092 let askpass = make_remote_delegate(
2093 this,
2094 envelope.payload.project_id,
2095 repository_id,
2096 askpass_id,
2097 &mut cx,
2098 );
2099
2100 let remote_output = repository_handle
2101 .update(&mut cx, |repository_handle, cx| {
2102 repository_handle.fetch(fetch_options, askpass, cx)
2103 })
2104 .await??;
2105
2106 Ok(proto::RemoteMessageResponse {
2107 stdout: remote_output.stdout,
2108 stderr: remote_output.stderr,
2109 })
2110 }
2111
2112 async fn handle_push(
2113 this: Entity<Self>,
2114 envelope: TypedEnvelope<proto::Push>,
2115 mut cx: AsyncApp,
2116 ) -> Result<proto::RemoteMessageResponse> {
2117 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2118 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2119
2120 let askpass_id = envelope.payload.askpass_id;
2121 let askpass = make_remote_delegate(
2122 this,
2123 envelope.payload.project_id,
2124 repository_id,
2125 askpass_id,
2126 &mut cx,
2127 );
2128
2129 let options = envelope
2130 .payload
2131 .options
2132 .as_ref()
2133 .map(|_| match envelope.payload.options() {
2134 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2135 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2136 });
2137
2138 let branch_name = envelope.payload.branch_name.into();
2139 let remote_branch_name = envelope.payload.remote_branch_name.into();
2140 let remote_name = envelope.payload.remote_name.into();
2141
2142 let remote_output = repository_handle
2143 .update(&mut cx, |repository_handle, cx| {
2144 repository_handle.push(
2145 branch_name,
2146 remote_branch_name,
2147 remote_name,
2148 options,
2149 askpass,
2150 cx,
2151 )
2152 })
2153 .await??;
2154 Ok(proto::RemoteMessageResponse {
2155 stdout: remote_output.stdout,
2156 stderr: remote_output.stderr,
2157 })
2158 }
2159
2160 async fn handle_pull(
2161 this: Entity<Self>,
2162 envelope: TypedEnvelope<proto::Pull>,
2163 mut cx: AsyncApp,
2164 ) -> Result<proto::RemoteMessageResponse> {
2165 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2166 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2167 let askpass_id = envelope.payload.askpass_id;
2168 let askpass = make_remote_delegate(
2169 this,
2170 envelope.payload.project_id,
2171 repository_id,
2172 askpass_id,
2173 &mut cx,
2174 );
2175
2176 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2177 let remote_name = envelope.payload.remote_name.into();
2178 let rebase = envelope.payload.rebase;
2179
2180 let remote_message = repository_handle
2181 .update(&mut cx, |repository_handle, cx| {
2182 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2183 })
2184 .await??;
2185
2186 Ok(proto::RemoteMessageResponse {
2187 stdout: remote_message.stdout,
2188 stderr: remote_message.stderr,
2189 })
2190 }
2191
2192 async fn handle_stage(
2193 this: Entity<Self>,
2194 envelope: TypedEnvelope<proto::Stage>,
2195 mut cx: AsyncApp,
2196 ) -> Result<proto::Ack> {
2197 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2198 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2199
2200 let entries = envelope
2201 .payload
2202 .paths
2203 .into_iter()
2204 .map(|path| RepoPath::new(&path))
2205 .collect::<Result<Vec<_>>>()?;
2206
2207 repository_handle
2208 .update(&mut cx, |repository_handle, cx| {
2209 repository_handle.stage_entries(entries, cx)
2210 })
2211 .await?;
2212 Ok(proto::Ack {})
2213 }
2214
2215 async fn handle_unstage(
2216 this: Entity<Self>,
2217 envelope: TypedEnvelope<proto::Unstage>,
2218 mut cx: AsyncApp,
2219 ) -> Result<proto::Ack> {
2220 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2221 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2222
2223 let entries = envelope
2224 .payload
2225 .paths
2226 .into_iter()
2227 .map(|path| RepoPath::new(&path))
2228 .collect::<Result<Vec<_>>>()?;
2229
2230 repository_handle
2231 .update(&mut cx, |repository_handle, cx| {
2232 repository_handle.unstage_entries(entries, cx)
2233 })
2234 .await?;
2235
2236 Ok(proto::Ack {})
2237 }
2238
2239 async fn handle_stash(
2240 this: Entity<Self>,
2241 envelope: TypedEnvelope<proto::Stash>,
2242 mut cx: AsyncApp,
2243 ) -> Result<proto::Ack> {
2244 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2245 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2246
2247 let entries = envelope
2248 .payload
2249 .paths
2250 .into_iter()
2251 .map(|path| RepoPath::new(&path))
2252 .collect::<Result<Vec<_>>>()?;
2253
2254 repository_handle
2255 .update(&mut cx, |repository_handle, cx| {
2256 repository_handle.stash_entries(entries, cx)
2257 })
2258 .await?;
2259
2260 Ok(proto::Ack {})
2261 }
2262
2263 async fn handle_stash_pop(
2264 this: Entity<Self>,
2265 envelope: TypedEnvelope<proto::StashPop>,
2266 mut cx: AsyncApp,
2267 ) -> Result<proto::Ack> {
2268 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2269 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2270 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2271
2272 repository_handle
2273 .update(&mut cx, |repository_handle, cx| {
2274 repository_handle.stash_pop(stash_index, cx)
2275 })
2276 .await?;
2277
2278 Ok(proto::Ack {})
2279 }
2280
2281 async fn handle_stash_apply(
2282 this: Entity<Self>,
2283 envelope: TypedEnvelope<proto::StashApply>,
2284 mut cx: AsyncApp,
2285 ) -> Result<proto::Ack> {
2286 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2287 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2288 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2289
2290 repository_handle
2291 .update(&mut cx, |repository_handle, cx| {
2292 repository_handle.stash_apply(stash_index, cx)
2293 })
2294 .await?;
2295
2296 Ok(proto::Ack {})
2297 }
2298
2299 async fn handle_stash_drop(
2300 this: Entity<Self>,
2301 envelope: TypedEnvelope<proto::StashDrop>,
2302 mut cx: AsyncApp,
2303 ) -> Result<proto::Ack> {
2304 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2305 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2306 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2307
2308 repository_handle
2309 .update(&mut cx, |repository_handle, cx| {
2310 repository_handle.stash_drop(stash_index, cx)
2311 })
2312 .await??;
2313
2314 Ok(proto::Ack {})
2315 }
2316
2317 async fn handle_set_index_text(
2318 this: Entity<Self>,
2319 envelope: TypedEnvelope<proto::SetIndexText>,
2320 mut cx: AsyncApp,
2321 ) -> Result<proto::Ack> {
2322 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2323 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2324 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2325
2326 repository_handle
2327 .update(&mut cx, |repository_handle, cx| {
2328 repository_handle.spawn_set_index_text_job(
2329 repo_path,
2330 envelope.payload.text,
2331 None,
2332 cx,
2333 )
2334 })
2335 .await??;
2336 Ok(proto::Ack {})
2337 }
2338
2339 async fn handle_run_hook(
2340 this: Entity<Self>,
2341 envelope: TypedEnvelope<proto::RunGitHook>,
2342 mut cx: AsyncApp,
2343 ) -> Result<proto::Ack> {
2344 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2345 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2346 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2347 repository_handle
2348 .update(&mut cx, |repository_handle, cx| {
2349 repository_handle.run_hook(hook, cx)
2350 })
2351 .await??;
2352 Ok(proto::Ack {})
2353 }
2354
2355 async fn handle_commit(
2356 this: Entity<Self>,
2357 envelope: TypedEnvelope<proto::Commit>,
2358 mut cx: AsyncApp,
2359 ) -> Result<proto::Ack> {
2360 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2361 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2362 let askpass_id = envelope.payload.askpass_id;
2363
2364 let askpass = make_remote_delegate(
2365 this,
2366 envelope.payload.project_id,
2367 repository_id,
2368 askpass_id,
2369 &mut cx,
2370 );
2371
2372 let message = SharedString::from(envelope.payload.message);
2373 let name = envelope.payload.name.map(SharedString::from);
2374 let email = envelope.payload.email.map(SharedString::from);
2375 let options = envelope.payload.options.unwrap_or_default();
2376
2377 repository_handle
2378 .update(&mut cx, |repository_handle, cx| {
2379 repository_handle.commit(
2380 message,
2381 name.zip(email),
2382 CommitOptions {
2383 amend: options.amend,
2384 signoff: options.signoff,
2385 allow_empty: options.allow_empty,
2386 },
2387 askpass,
2388 cx,
2389 )
2390 })
2391 .await??;
2392 Ok(proto::Ack {})
2393 }
2394
2395 async fn handle_get_remotes(
2396 this: Entity<Self>,
2397 envelope: TypedEnvelope<proto::GetRemotes>,
2398 mut cx: AsyncApp,
2399 ) -> Result<proto::GetRemotesResponse> {
2400 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2401 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2402
2403 let branch_name = envelope.payload.branch_name;
2404 let is_push = envelope.payload.is_push;
2405
2406 let remotes = repository_handle
2407 .update(&mut cx, |repository_handle, _| {
2408 repository_handle.get_remotes(branch_name, is_push)
2409 })
2410 .await??;
2411
2412 Ok(proto::GetRemotesResponse {
2413 remotes: remotes
2414 .into_iter()
2415 .map(|remotes| proto::get_remotes_response::Remote {
2416 name: remotes.name.to_string(),
2417 })
2418 .collect::<Vec<_>>(),
2419 })
2420 }
2421
2422 async fn handle_get_worktrees(
2423 this: Entity<Self>,
2424 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2425 mut cx: AsyncApp,
2426 ) -> Result<proto::GitWorktreesResponse> {
2427 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2428 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2429
2430 let worktrees = repository_handle
2431 .update(&mut cx, |repository_handle, _| {
2432 repository_handle.worktrees()
2433 })
2434 .await??;
2435
2436 Ok(proto::GitWorktreesResponse {
2437 worktrees: worktrees
2438 .into_iter()
2439 .map(|worktree| worktree_to_proto(&worktree))
2440 .collect::<Vec<_>>(),
2441 })
2442 }
2443
2444 async fn handle_create_worktree(
2445 this: Entity<Self>,
2446 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2447 mut cx: AsyncApp,
2448 ) -> Result<proto::Ack> {
2449 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2450 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2451 let directory = PathBuf::from(envelope.payload.directory);
2452 let name = envelope.payload.name;
2453 let commit = envelope.payload.commit;
2454 let use_existing_branch = envelope.payload.use_existing_branch;
2455 let target = if name.is_empty() {
2456 CreateWorktreeTarget::Detached { base_sha: commit }
2457 } else if use_existing_branch {
2458 CreateWorktreeTarget::ExistingBranch { branch_name: name }
2459 } else {
2460 CreateWorktreeTarget::NewBranch {
2461 branch_name: name,
2462 base_sha: commit,
2463 }
2464 };
2465
2466 repository_handle
2467 .update(&mut cx, |repository_handle, _| {
2468 repository_handle.create_worktree(target, directory)
2469 })
2470 .await??;
2471
2472 Ok(proto::Ack {})
2473 }
2474
2475 async fn handle_remove_worktree(
2476 this: Entity<Self>,
2477 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2478 mut cx: AsyncApp,
2479 ) -> Result<proto::Ack> {
2480 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2481 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2482 let path = PathBuf::from(envelope.payload.path);
2483 let force = envelope.payload.force;
2484
2485 repository_handle
2486 .update(&mut cx, |repository_handle, _| {
2487 repository_handle.remove_worktree(path, force)
2488 })
2489 .await??;
2490
2491 Ok(proto::Ack {})
2492 }
2493
2494 async fn handle_rename_worktree(
2495 this: Entity<Self>,
2496 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2497 mut cx: AsyncApp,
2498 ) -> Result<proto::Ack> {
2499 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2500 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2501 let old_path = PathBuf::from(envelope.payload.old_path);
2502 let new_path = PathBuf::from(envelope.payload.new_path);
2503
2504 repository_handle
2505 .update(&mut cx, |repository_handle, _| {
2506 repository_handle.rename_worktree(old_path, new_path)
2507 })
2508 .await??;
2509
2510 Ok(proto::Ack {})
2511 }
2512
2513 async fn handle_get_head_sha(
2514 this: Entity<Self>,
2515 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2516 mut cx: AsyncApp,
2517 ) -> Result<proto::GitGetHeadShaResponse> {
2518 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2519 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2520
2521 let head_sha = repository_handle
2522 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2523 .await??;
2524
2525 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2526 }
2527
2528 async fn handle_edit_ref(
2529 this: Entity<Self>,
2530 envelope: TypedEnvelope<proto::GitEditRef>,
2531 mut cx: AsyncApp,
2532 ) -> Result<proto::Ack> {
2533 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2534 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2535 let ref_name = envelope.payload.ref_name;
2536 let commit = match envelope.payload.action {
2537 Some(proto::git_edit_ref::Action::UpdateToCommit(sha)) => Some(sha),
2538 Some(proto::git_edit_ref::Action::Delete(_)) => None,
2539 None => anyhow::bail!("GitEditRef missing action"),
2540 };
2541
2542 repository_handle
2543 .update(&mut cx, |repository_handle, _| {
2544 repository_handle.edit_ref(ref_name, commit)
2545 })
2546 .await??;
2547
2548 Ok(proto::Ack {})
2549 }
2550
2551 async fn handle_repair_worktrees(
2552 this: Entity<Self>,
2553 envelope: TypedEnvelope<proto::GitRepairWorktrees>,
2554 mut cx: AsyncApp,
2555 ) -> Result<proto::Ack> {
2556 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2557 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2558
2559 repository_handle
2560 .update(&mut cx, |repository_handle, _| {
2561 repository_handle.repair_worktrees()
2562 })
2563 .await??;
2564
2565 Ok(proto::Ack {})
2566 }
2567
2568 async fn handle_get_branches(
2569 this: Entity<Self>,
2570 envelope: TypedEnvelope<proto::GitGetBranches>,
2571 mut cx: AsyncApp,
2572 ) -> Result<proto::GitBranchesResponse> {
2573 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2574 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2575
2576 let branches = repository_handle
2577 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2578 .await??;
2579
2580 Ok(proto::GitBranchesResponse {
2581 branches: branches
2582 .into_iter()
2583 .map(|branch| branch_to_proto(&branch))
2584 .collect::<Vec<_>>(),
2585 })
2586 }
2587 async fn handle_get_default_branch(
2588 this: Entity<Self>,
2589 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2590 mut cx: AsyncApp,
2591 ) -> Result<proto::GetDefaultBranchResponse> {
2592 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2593 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2594
2595 let branch = repository_handle
2596 .update(&mut cx, |repository_handle, _| {
2597 repository_handle.default_branch(false)
2598 })
2599 .await??
2600 .map(Into::into);
2601
2602 Ok(proto::GetDefaultBranchResponse { branch })
2603 }
2604 async fn handle_create_branch(
2605 this: Entity<Self>,
2606 envelope: TypedEnvelope<proto::GitCreateBranch>,
2607 mut cx: AsyncApp,
2608 ) -> Result<proto::Ack> {
2609 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2610 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2611 let branch_name = envelope.payload.branch_name;
2612
2613 repository_handle
2614 .update(&mut cx, |repository_handle, _| {
2615 repository_handle.create_branch(branch_name, None)
2616 })
2617 .await??;
2618
2619 Ok(proto::Ack {})
2620 }
2621
2622 async fn handle_change_branch(
2623 this: Entity<Self>,
2624 envelope: TypedEnvelope<proto::GitChangeBranch>,
2625 mut cx: AsyncApp,
2626 ) -> Result<proto::Ack> {
2627 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2628 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2629 let branch_name = envelope.payload.branch_name;
2630
2631 repository_handle
2632 .update(&mut cx, |repository_handle, _| {
2633 repository_handle.change_branch(branch_name)
2634 })
2635 .await??;
2636
2637 Ok(proto::Ack {})
2638 }
2639
2640 async fn handle_rename_branch(
2641 this: Entity<Self>,
2642 envelope: TypedEnvelope<proto::GitRenameBranch>,
2643 mut cx: AsyncApp,
2644 ) -> Result<proto::Ack> {
2645 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2646 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2647 let branch = envelope.payload.branch;
2648 let new_name = envelope.payload.new_name;
2649
2650 repository_handle
2651 .update(&mut cx, |repository_handle, _| {
2652 repository_handle.rename_branch(branch, new_name)
2653 })
2654 .await??;
2655
2656 Ok(proto::Ack {})
2657 }
2658
2659 async fn handle_create_remote(
2660 this: Entity<Self>,
2661 envelope: TypedEnvelope<proto::GitCreateRemote>,
2662 mut cx: AsyncApp,
2663 ) -> Result<proto::Ack> {
2664 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2665 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2666 let remote_name = envelope.payload.remote_name;
2667 let remote_url = envelope.payload.remote_url;
2668
2669 repository_handle
2670 .update(&mut cx, |repository_handle, _| {
2671 repository_handle.create_remote(remote_name, remote_url)
2672 })
2673 .await??;
2674
2675 Ok(proto::Ack {})
2676 }
2677
2678 async fn handle_delete_branch(
2679 this: Entity<Self>,
2680 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2681 mut cx: AsyncApp,
2682 ) -> Result<proto::Ack> {
2683 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2684 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2685 let is_remote = envelope.payload.is_remote;
2686 let branch_name = envelope.payload.branch_name;
2687
2688 repository_handle
2689 .update(&mut cx, |repository_handle, _| {
2690 repository_handle.delete_branch(is_remote, branch_name)
2691 })
2692 .await??;
2693
2694 Ok(proto::Ack {})
2695 }
2696
2697 async fn handle_remove_remote(
2698 this: Entity<Self>,
2699 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2700 mut cx: AsyncApp,
2701 ) -> Result<proto::Ack> {
2702 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2703 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2704 let remote_name = envelope.payload.remote_name;
2705
2706 repository_handle
2707 .update(&mut cx, |repository_handle, _| {
2708 repository_handle.remove_remote(remote_name)
2709 })
2710 .await??;
2711
2712 Ok(proto::Ack {})
2713 }
2714
2715 async fn handle_show(
2716 this: Entity<Self>,
2717 envelope: TypedEnvelope<proto::GitShow>,
2718 mut cx: AsyncApp,
2719 ) -> Result<proto::GitCommitDetails> {
2720 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2721 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2722
2723 let commit = repository_handle
2724 .update(&mut cx, |repository_handle, _| {
2725 repository_handle.show(envelope.payload.commit)
2726 })
2727 .await??;
2728 Ok(proto::GitCommitDetails {
2729 sha: commit.sha.into(),
2730 message: commit.message.into(),
2731 commit_timestamp: commit.commit_timestamp,
2732 author_email: commit.author_email.into(),
2733 author_name: commit.author_name.into(),
2734 })
2735 }
2736
2737 async fn handle_create_checkpoint(
2738 this: Entity<Self>,
2739 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2740 mut cx: AsyncApp,
2741 ) -> Result<proto::GitCreateCheckpointResponse> {
2742 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2743 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2744
2745 let checkpoint = repository_handle
2746 .update(&mut cx, |repository, _| repository.checkpoint())
2747 .await??;
2748
2749 Ok(proto::GitCreateCheckpointResponse {
2750 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2751 })
2752 }
2753
2754 async fn handle_create_archive_checkpoint(
2755 this: Entity<Self>,
2756 envelope: TypedEnvelope<proto::GitCreateArchiveCheckpoint>,
2757 mut cx: AsyncApp,
2758 ) -> Result<proto::GitCreateArchiveCheckpointResponse> {
2759 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2760 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2761
2762 let (staged_commit_sha, unstaged_commit_sha) = repository_handle
2763 .update(&mut cx, |repository, _| {
2764 repository.create_archive_checkpoint()
2765 })
2766 .await??;
2767
2768 Ok(proto::GitCreateArchiveCheckpointResponse {
2769 staged_commit_sha,
2770 unstaged_commit_sha,
2771 })
2772 }
2773
2774 async fn handle_restore_checkpoint(
2775 this: Entity<Self>,
2776 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2777 mut cx: AsyncApp,
2778 ) -> Result<proto::Ack> {
2779 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2780 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2781
2782 let checkpoint = GitRepositoryCheckpoint {
2783 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2784 };
2785
2786 repository_handle
2787 .update(&mut cx, |repository, _| {
2788 repository.restore_checkpoint(checkpoint)
2789 })
2790 .await??;
2791
2792 Ok(proto::Ack {})
2793 }
2794
2795 async fn handle_restore_archive_checkpoint(
2796 this: Entity<Self>,
2797 envelope: TypedEnvelope<proto::GitRestoreArchiveCheckpoint>,
2798 mut cx: AsyncApp,
2799 ) -> Result<proto::Ack> {
2800 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2801 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2802 let staged_commit_sha = envelope.payload.staged_commit_sha;
2803 let unstaged_commit_sha = envelope.payload.unstaged_commit_sha;
2804
2805 repository_handle
2806 .update(&mut cx, |repository, _| {
2807 repository.restore_archive_checkpoint(staged_commit_sha, unstaged_commit_sha)
2808 })
2809 .await??;
2810
2811 Ok(proto::Ack {})
2812 }
2813
2814 async fn handle_compare_checkpoints(
2815 this: Entity<Self>,
2816 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2817 mut cx: AsyncApp,
2818 ) -> Result<proto::GitCompareCheckpointsResponse> {
2819 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2820 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2821
2822 let left = GitRepositoryCheckpoint {
2823 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2824 };
2825 let right = GitRepositoryCheckpoint {
2826 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2827 };
2828
2829 let equal = repository_handle
2830 .update(&mut cx, |repository, _| {
2831 repository.compare_checkpoints(left, right)
2832 })
2833 .await??;
2834
2835 Ok(proto::GitCompareCheckpointsResponse { equal })
2836 }
2837
2838 async fn handle_diff_checkpoints(
2839 this: Entity<Self>,
2840 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2841 mut cx: AsyncApp,
2842 ) -> Result<proto::GitDiffCheckpointsResponse> {
2843 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2844 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2845
2846 let base = GitRepositoryCheckpoint {
2847 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2848 };
2849 let target = GitRepositoryCheckpoint {
2850 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2851 };
2852
2853 let diff = repository_handle
2854 .update(&mut cx, |repository, _| {
2855 repository.diff_checkpoints(base, target)
2856 })
2857 .await??;
2858
2859 Ok(proto::GitDiffCheckpointsResponse { diff })
2860 }
2861
2862 async fn handle_load_commit_diff(
2863 this: Entity<Self>,
2864 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2865 mut cx: AsyncApp,
2866 ) -> Result<proto::LoadCommitDiffResponse> {
2867 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2868 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2869
2870 let commit_diff = repository_handle
2871 .update(&mut cx, |repository_handle, _| {
2872 repository_handle.load_commit_diff(envelope.payload.commit)
2873 })
2874 .await??;
2875 Ok(proto::LoadCommitDiffResponse {
2876 files: commit_diff
2877 .files
2878 .into_iter()
2879 .map(|file| proto::CommitFile {
2880 path: file.path.to_proto(),
2881 old_text: file.old_text,
2882 new_text: file.new_text,
2883 is_binary: file.is_binary,
2884 })
2885 .collect(),
2886 })
2887 }
2888
2889 async fn handle_file_history(
2890 this: Entity<Self>,
2891 envelope: TypedEnvelope<proto::GitFileHistory>,
2892 mut cx: AsyncApp,
2893 ) -> Result<proto::GitFileHistoryResponse> {
2894 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2895 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2896 let path = RepoPath::from_proto(&envelope.payload.path)?;
2897 let skip = envelope.payload.skip as usize;
2898 let limit = envelope.payload.limit.map(|l| l as usize);
2899
2900 let file_history = repository_handle
2901 .update(&mut cx, |repository_handle, _| {
2902 repository_handle.file_history_paginated(path, skip, limit)
2903 })
2904 .await??;
2905
2906 Ok(proto::GitFileHistoryResponse {
2907 entries: file_history
2908 .entries
2909 .into_iter()
2910 .map(|entry| proto::FileHistoryEntry {
2911 sha: entry.sha.to_string(),
2912 subject: entry.subject.to_string(),
2913 message: entry.message.to_string(),
2914 commit_timestamp: entry.commit_timestamp,
2915 author_name: entry.author_name.to_string(),
2916 author_email: entry.author_email.to_string(),
2917 })
2918 .collect(),
2919 path: file_history.path.to_proto(),
2920 })
2921 }
2922
2923 async fn handle_reset(
2924 this: Entity<Self>,
2925 envelope: TypedEnvelope<proto::GitReset>,
2926 mut cx: AsyncApp,
2927 ) -> Result<proto::Ack> {
2928 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2929 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2930
2931 let mode = match envelope.payload.mode() {
2932 git_reset::ResetMode::Soft => ResetMode::Soft,
2933 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2934 };
2935
2936 repository_handle
2937 .update(&mut cx, |repository_handle, cx| {
2938 repository_handle.reset(envelope.payload.commit, mode, cx)
2939 })
2940 .await??;
2941 Ok(proto::Ack {})
2942 }
2943
2944 async fn handle_checkout_files(
2945 this: Entity<Self>,
2946 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2947 mut cx: AsyncApp,
2948 ) -> Result<proto::Ack> {
2949 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2950 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2951 let paths = envelope
2952 .payload
2953 .paths
2954 .iter()
2955 .map(|s| RepoPath::from_proto(s))
2956 .collect::<Result<Vec<_>>>()?;
2957
2958 repository_handle
2959 .update(&mut cx, |repository_handle, cx| {
2960 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2961 })
2962 .await?;
2963 Ok(proto::Ack {})
2964 }
2965
2966 async fn handle_open_commit_message_buffer(
2967 this: Entity<Self>,
2968 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2969 mut cx: AsyncApp,
2970 ) -> Result<proto::OpenBufferResponse> {
2971 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2972 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2973 let buffer = repository
2974 .update(&mut cx, |repository, cx| {
2975 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2976 })
2977 .await?;
2978
2979 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2980 this.update(&mut cx, |this, cx| {
2981 this.buffer_store.update(cx, |buffer_store, cx| {
2982 buffer_store
2983 .create_buffer_for_peer(
2984 &buffer,
2985 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2986 cx,
2987 )
2988 .detach_and_log_err(cx);
2989 })
2990 });
2991
2992 Ok(proto::OpenBufferResponse {
2993 buffer_id: buffer_id.to_proto(),
2994 })
2995 }
2996
2997 async fn handle_askpass(
2998 this: Entity<Self>,
2999 envelope: TypedEnvelope<proto::AskPassRequest>,
3000 mut cx: AsyncApp,
3001 ) -> Result<proto::AskPassResponse> {
3002 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3003 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
3004
3005 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
3006 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
3007 debug_panic!("no askpass found");
3008 anyhow::bail!("no askpass found");
3009 };
3010
3011 let response = askpass
3012 .ask_password(envelope.payload.prompt)
3013 .await
3014 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
3015
3016 delegates
3017 .lock()
3018 .insert(envelope.payload.askpass_id, askpass);
3019
3020 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
3021 Ok(proto::AskPassResponse {
3022 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
3023 })
3024 }
3025
3026 async fn handle_check_for_pushed_commits(
3027 this: Entity<Self>,
3028 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
3029 mut cx: AsyncApp,
3030 ) -> Result<proto::CheckForPushedCommitsResponse> {
3031 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3032 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3033
3034 let branches = repository_handle
3035 .update(&mut cx, |repository_handle, _| {
3036 repository_handle.check_for_pushed_commits()
3037 })
3038 .await??;
3039 Ok(proto::CheckForPushedCommitsResponse {
3040 pushed_to: branches
3041 .into_iter()
3042 .map(|commit| commit.to_string())
3043 .collect(),
3044 })
3045 }
3046
3047 async fn handle_git_diff(
3048 this: Entity<Self>,
3049 envelope: TypedEnvelope<proto::GitDiff>,
3050 mut cx: AsyncApp,
3051 ) -> Result<proto::GitDiffResponse> {
3052 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3053 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3054 let diff_type = match envelope.payload.diff_type() {
3055 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
3056 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
3057 proto::git_diff::DiffType::MergeBase => {
3058 let base_ref = envelope
3059 .payload
3060 .merge_base_ref
3061 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
3062 DiffType::MergeBase {
3063 base_ref: base_ref.into(),
3064 }
3065 }
3066 };
3067
3068 let mut diff = repository_handle
3069 .update(&mut cx, |repository_handle, cx| {
3070 repository_handle.diff(diff_type, cx)
3071 })
3072 .await??;
3073 const ONE_MB: usize = 1_000_000;
3074 if diff.len() > ONE_MB {
3075 diff = diff.chars().take(ONE_MB).collect()
3076 }
3077
3078 Ok(proto::GitDiffResponse { diff })
3079 }
3080
3081 async fn handle_tree_diff(
3082 this: Entity<Self>,
3083 request: TypedEnvelope<proto::GetTreeDiff>,
3084 mut cx: AsyncApp,
3085 ) -> Result<proto::GetTreeDiffResponse> {
3086 let repository_id = RepositoryId(request.payload.repository_id);
3087 let diff_type = if request.payload.is_merge {
3088 DiffTreeType::MergeBase {
3089 base: request.payload.base.into(),
3090 head: request.payload.head.into(),
3091 }
3092 } else {
3093 DiffTreeType::Since {
3094 base: request.payload.base.into(),
3095 head: request.payload.head.into(),
3096 }
3097 };
3098
3099 let diff = this
3100 .update(&mut cx, |this, cx| {
3101 let repository = this.repositories().get(&repository_id)?;
3102 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
3103 })
3104 .context("missing repository")?
3105 .await??;
3106
3107 Ok(proto::GetTreeDiffResponse {
3108 entries: diff
3109 .entries
3110 .into_iter()
3111 .map(|(path, status)| proto::TreeDiffStatus {
3112 path: path.as_ref().to_proto(),
3113 status: match status {
3114 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
3115 TreeDiffStatus::Modified { .. } => {
3116 proto::tree_diff_status::Status::Modified.into()
3117 }
3118 TreeDiffStatus::Deleted { .. } => {
3119 proto::tree_diff_status::Status::Deleted.into()
3120 }
3121 },
3122 oid: match status {
3123 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3124 Some(old.to_string())
3125 }
3126 TreeDiffStatus::Added => None,
3127 },
3128 })
3129 .collect(),
3130 })
3131 }
3132
3133 async fn handle_get_blob_content(
3134 this: Entity<Self>,
3135 request: TypedEnvelope<proto::GetBlobContent>,
3136 mut cx: AsyncApp,
3137 ) -> Result<proto::GetBlobContentResponse> {
3138 let oid = git::Oid::from_str(&request.payload.oid)?;
3139 let repository_id = RepositoryId(request.payload.repository_id);
3140 let content = this
3141 .update(&mut cx, |this, cx| {
3142 let repository = this.repositories().get(&repository_id)?;
3143 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3144 })
3145 .context("missing repository")?
3146 .await?;
3147 Ok(proto::GetBlobContentResponse { content })
3148 }
3149
3150 async fn handle_open_unstaged_diff(
3151 this: Entity<Self>,
3152 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3153 mut cx: AsyncApp,
3154 ) -> Result<proto::OpenUnstagedDiffResponse> {
3155 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3156 let diff = this
3157 .update(&mut cx, |this, cx| {
3158 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3159 Some(this.open_unstaged_diff(buffer, cx))
3160 })
3161 .context("missing buffer")?
3162 .await?;
3163 this.update(&mut cx, |this, _| {
3164 let shared_diffs = this
3165 .shared_diffs
3166 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3167 .or_default();
3168 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3169 });
3170 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3171 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3172 }
3173
3174 async fn handle_open_uncommitted_diff(
3175 this: Entity<Self>,
3176 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3177 mut cx: AsyncApp,
3178 ) -> Result<proto::OpenUncommittedDiffResponse> {
3179 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3180 let diff = this
3181 .update(&mut cx, |this, cx| {
3182 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3183 Some(this.open_uncommitted_diff(buffer, cx))
3184 })
3185 .context("missing buffer")?
3186 .await?;
3187 this.update(&mut cx, |this, _| {
3188 let shared_diffs = this
3189 .shared_diffs
3190 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3191 .or_default();
3192 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3193 });
3194 Ok(diff.read_with(&cx, |diff, cx| {
3195 use proto::open_uncommitted_diff_response::Mode;
3196
3197 let unstaged_diff = diff.secondary_diff();
3198 let index_snapshot = unstaged_diff.and_then(|diff| {
3199 let diff = diff.read(cx);
3200 diff.base_text_exists().then(|| diff.base_text(cx))
3201 });
3202
3203 let mode;
3204 let staged_text;
3205 let committed_text;
3206 if diff.base_text_exists() {
3207 let committed_snapshot = diff.base_text(cx);
3208 committed_text = Some(committed_snapshot.text());
3209 if let Some(index_text) = index_snapshot {
3210 if index_text.remote_id() == committed_snapshot.remote_id() {
3211 mode = Mode::IndexMatchesHead;
3212 staged_text = None;
3213 } else {
3214 mode = Mode::IndexAndHead;
3215 staged_text = Some(index_text.text());
3216 }
3217 } else {
3218 mode = Mode::IndexAndHead;
3219 staged_text = None;
3220 }
3221 } else {
3222 mode = Mode::IndexAndHead;
3223 committed_text = None;
3224 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3225 }
3226
3227 proto::OpenUncommittedDiffResponse {
3228 committed_text,
3229 staged_text,
3230 mode: mode.into(),
3231 }
3232 }))
3233 }
3234
3235 async fn handle_update_diff_bases(
3236 this: Entity<Self>,
3237 request: TypedEnvelope<proto::UpdateDiffBases>,
3238 mut cx: AsyncApp,
3239 ) -> Result<()> {
3240 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3241 this.update(&mut cx, |this, cx| {
3242 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3243 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3244 {
3245 let buffer = buffer.read(cx).text_snapshot();
3246 diff_state.update(cx, |diff_state, cx| {
3247 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3248 })
3249 }
3250 });
3251 Ok(())
3252 }
3253
3254 async fn handle_blame_buffer(
3255 this: Entity<Self>,
3256 envelope: TypedEnvelope<proto::BlameBuffer>,
3257 mut cx: AsyncApp,
3258 ) -> Result<proto::BlameBufferResponse> {
3259 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3260 let version = deserialize_version(&envelope.payload.version);
3261 let buffer = this.read_with(&cx, |this, cx| {
3262 this.buffer_store.read(cx).get_existing(buffer_id)
3263 })?;
3264 buffer
3265 .update(&mut cx, |buffer, _| {
3266 buffer.wait_for_version(version.clone())
3267 })
3268 .await?;
3269 let blame = this
3270 .update(&mut cx, |this, cx| {
3271 this.blame_buffer(&buffer, Some(version), cx)
3272 })
3273 .await?;
3274 Ok(serialize_blame_buffer_response(blame))
3275 }
3276
3277 async fn handle_get_permalink_to_line(
3278 this: Entity<Self>,
3279 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3280 mut cx: AsyncApp,
3281 ) -> Result<proto::GetPermalinkToLineResponse> {
3282 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3283 // let version = deserialize_version(&envelope.payload.version);
3284 let selection = {
3285 let proto_selection = envelope
3286 .payload
3287 .selection
3288 .context("no selection to get permalink for defined")?;
3289 proto_selection.start as u32..proto_selection.end as u32
3290 };
3291 let buffer = this.read_with(&cx, |this, cx| {
3292 this.buffer_store.read(cx).get_existing(buffer_id)
3293 })?;
3294 let permalink = this
3295 .update(&mut cx, |this, cx| {
3296 this.get_permalink_to_line(&buffer, selection, cx)
3297 })
3298 .await?;
3299 Ok(proto::GetPermalinkToLineResponse {
3300 permalink: permalink.to_string(),
3301 })
3302 }
3303
3304 fn repository_for_request(
3305 this: &Entity<Self>,
3306 id: RepositoryId,
3307 cx: &mut AsyncApp,
3308 ) -> Result<Entity<Repository>> {
3309 this.read_with(cx, |this, _| {
3310 this.repositories
3311 .get(&id)
3312 .context("missing repository handle")
3313 .cloned()
3314 })
3315 }
3316
3317 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3318 self.repositories
3319 .iter()
3320 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3321 .collect()
3322 }
3323
3324 fn process_updated_entries(
3325 &self,
3326 worktree: &Entity<Worktree>,
3327 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3328 cx: &mut App,
3329 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3330 let path_style = worktree.read(cx).path_style();
3331 let mut repo_paths = self
3332 .repositories
3333 .values()
3334 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3335 .collect::<Vec<_>>();
3336 let mut entries: Vec<_> = updated_entries
3337 .iter()
3338 .map(|(path, _, _)| path.clone())
3339 .collect();
3340 entries.sort();
3341 let worktree = worktree.read(cx);
3342
3343 let entries = entries
3344 .into_iter()
3345 .map(|path| worktree.absolutize(&path))
3346 .collect::<Arc<[_]>>();
3347
3348 let executor = cx.background_executor().clone();
3349 cx.background_executor().spawn(async move {
3350 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3351 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3352 let mut tasks = FuturesOrdered::new();
3353 for (repo_path, repo) in repo_paths.into_iter().rev() {
3354 let entries = entries.clone();
3355 let task = executor.spawn(async move {
3356 // Find all repository paths that belong to this repo
3357 let mut ix = entries.partition_point(|path| path < &*repo_path);
3358 if ix == entries.len() {
3359 return None;
3360 };
3361
3362 let mut paths = Vec::new();
3363 // All paths prefixed by a given repo will constitute a continuous range.
3364 while let Some(path) = entries.get(ix)
3365 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3366 &repo_path, path, path_style,
3367 )
3368 {
3369 paths.push((repo_path, ix));
3370 ix += 1;
3371 }
3372 if paths.is_empty() {
3373 None
3374 } else {
3375 Some((repo, paths))
3376 }
3377 });
3378 tasks.push_back(task);
3379 }
3380
3381 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3382 let mut path_was_used = vec![false; entries.len()];
3383 let tasks = tasks.collect::<Vec<_>>().await;
3384 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3385 // We always want to assign a path to it's innermost repository.
3386 for t in tasks {
3387 let Some((repo, paths)) = t else {
3388 continue;
3389 };
3390 let entry = paths_by_git_repo.entry(repo).or_default();
3391 for (repo_path, ix) in paths {
3392 if path_was_used[ix] {
3393 continue;
3394 }
3395 path_was_used[ix] = true;
3396 entry.push(repo_path);
3397 }
3398 }
3399
3400 paths_by_git_repo
3401 })
3402 }
3403}
3404
3405impl BufferGitState {
3406 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3407 Self {
3408 unstaged_diff: Default::default(),
3409 uncommitted_diff: Default::default(),
3410 oid_diffs: Default::default(),
3411 recalculate_diff_task: Default::default(),
3412 language: Default::default(),
3413 language_registry: Default::default(),
3414 recalculating_tx: postage::watch::channel_with(false).0,
3415 hunk_staging_operation_count: 0,
3416 hunk_staging_operation_count_as_of_write: 0,
3417 head_text: Default::default(),
3418 index_text: Default::default(),
3419 oid_texts: Default::default(),
3420 head_changed: Default::default(),
3421 index_changed: Default::default(),
3422 language_changed: Default::default(),
3423 conflict_updated_futures: Default::default(),
3424 conflict_set: Default::default(),
3425 reparse_conflict_markers_task: Default::default(),
3426 }
3427 }
3428
3429 #[ztracing::instrument(skip_all)]
3430 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3431 self.language = buffer.read(cx).language().cloned();
3432 self.language_changed = true;
3433 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3434 }
3435
3436 fn reparse_conflict_markers(
3437 &mut self,
3438 buffer: text::BufferSnapshot,
3439 cx: &mut Context<Self>,
3440 ) -> oneshot::Receiver<()> {
3441 let (tx, rx) = oneshot::channel();
3442
3443 let Some(conflict_set) = self
3444 .conflict_set
3445 .as_ref()
3446 .and_then(|conflict_set| conflict_set.upgrade())
3447 else {
3448 return rx;
3449 };
3450
3451 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3452 if conflict_set.has_conflict {
3453 Some(conflict_set.snapshot())
3454 } else {
3455 None
3456 }
3457 });
3458
3459 if let Some(old_snapshot) = old_snapshot {
3460 self.conflict_updated_futures.push(tx);
3461 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3462 let (snapshot, changed_range) = cx
3463 .background_spawn(async move {
3464 let new_snapshot = ConflictSet::parse(&buffer);
3465 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3466 (new_snapshot, changed_range)
3467 })
3468 .await;
3469 this.update(cx, |this, cx| {
3470 if let Some(conflict_set) = &this.conflict_set {
3471 conflict_set
3472 .update(cx, |conflict_set, cx| {
3473 conflict_set.set_snapshot(snapshot, changed_range, cx);
3474 })
3475 .ok();
3476 }
3477 let futures = std::mem::take(&mut this.conflict_updated_futures);
3478 for tx in futures {
3479 tx.send(()).ok();
3480 }
3481 })
3482 }))
3483 }
3484
3485 rx
3486 }
3487
3488 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3489 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3490 }
3491
3492 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3493 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3494 }
3495
3496 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3497 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3498 }
3499
3500 fn handle_base_texts_updated(
3501 &mut self,
3502 buffer: text::BufferSnapshot,
3503 message: proto::UpdateDiffBases,
3504 cx: &mut Context<Self>,
3505 ) {
3506 use proto::update_diff_bases::Mode;
3507
3508 let Some(mode) = Mode::from_i32(message.mode) else {
3509 return;
3510 };
3511
3512 let diff_bases_change = match mode {
3513 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3514 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3515 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3516 Mode::IndexAndHead => DiffBasesChange::SetEach {
3517 index: message.staged_text,
3518 head: message.committed_text,
3519 },
3520 };
3521
3522 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3523 }
3524
3525 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3526 if *self.recalculating_tx.borrow() {
3527 let mut rx = self.recalculating_tx.subscribe();
3528 Some(async move {
3529 loop {
3530 let is_recalculating = rx.recv().await;
3531 if is_recalculating != Some(true) {
3532 break;
3533 }
3534 }
3535 })
3536 } else {
3537 None
3538 }
3539 }
3540
3541 fn diff_bases_changed(
3542 &mut self,
3543 buffer: text::BufferSnapshot,
3544 diff_bases_change: Option<DiffBasesChange>,
3545 cx: &mut Context<Self>,
3546 ) {
3547 match diff_bases_change {
3548 Some(DiffBasesChange::SetIndex(index)) => {
3549 self.index_text = index.map(|mut index| {
3550 text::LineEnding::normalize(&mut index);
3551 Arc::from(index.as_str())
3552 });
3553 self.index_changed = true;
3554 }
3555 Some(DiffBasesChange::SetHead(head)) => {
3556 self.head_text = head.map(|mut head| {
3557 text::LineEnding::normalize(&mut head);
3558 Arc::from(head.as_str())
3559 });
3560 self.head_changed = true;
3561 }
3562 Some(DiffBasesChange::SetBoth(text)) => {
3563 let text = text.map(|mut text| {
3564 text::LineEnding::normalize(&mut text);
3565 Arc::from(text.as_str())
3566 });
3567 self.head_text = text.clone();
3568 self.index_text = text;
3569 self.head_changed = true;
3570 self.index_changed = true;
3571 }
3572 Some(DiffBasesChange::SetEach { index, head }) => {
3573 self.index_text = index.map(|mut index| {
3574 text::LineEnding::normalize(&mut index);
3575 Arc::from(index.as_str())
3576 });
3577 self.index_changed = true;
3578 self.head_text = head.map(|mut head| {
3579 text::LineEnding::normalize(&mut head);
3580 Arc::from(head.as_str())
3581 });
3582 self.head_changed = true;
3583 }
3584 None => {}
3585 }
3586
3587 self.recalculate_diffs(buffer, cx)
3588 }
3589
3590 #[ztracing::instrument(skip_all)]
3591 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3592 *self.recalculating_tx.borrow_mut() = true;
3593
3594 let language = self.language.clone();
3595 let language_registry = self.language_registry.clone();
3596 let unstaged_diff = self.unstaged_diff();
3597 let uncommitted_diff = self.uncommitted_diff();
3598 let head = self.head_text.clone();
3599 let index = self.index_text.clone();
3600 let index_changed = self.index_changed;
3601 let head_changed = self.head_changed;
3602 let language_changed = self.language_changed;
3603 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3604 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3605 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3606 (None, None) => true,
3607 _ => false,
3608 };
3609
3610 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3611 .oid_diffs
3612 .iter()
3613 .filter_map(|(oid, weak)| {
3614 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3615 weak.upgrade().map(|diff| (*oid, diff, base_text))
3616 })
3617 .collect();
3618
3619 self.oid_diffs.retain(|oid, weak| {
3620 let alive = weak.upgrade().is_some();
3621 if !alive {
3622 if let Some(oid) = oid {
3623 self.oid_texts.remove(oid);
3624 }
3625 }
3626 alive
3627 });
3628 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3629 log::debug!(
3630 "start recalculating diffs for buffer {}",
3631 buffer.remote_id()
3632 );
3633
3634 let mut new_unstaged_diff = None;
3635 if let Some(unstaged_diff) = &unstaged_diff {
3636 new_unstaged_diff = Some(
3637 cx.update(|cx| {
3638 unstaged_diff.read(cx).update_diff(
3639 buffer.clone(),
3640 index,
3641 index_changed.then_some(false),
3642 language.clone(),
3643 cx,
3644 )
3645 })
3646 .await,
3647 );
3648 }
3649
3650 // Dropping BufferDiff can be expensive, so yield back to the event loop
3651 // for a bit
3652 yield_now().await;
3653
3654 let mut new_uncommitted_diff = None;
3655 if let Some(uncommitted_diff) = &uncommitted_diff {
3656 new_uncommitted_diff = if index_matches_head {
3657 new_unstaged_diff.clone()
3658 } else {
3659 Some(
3660 cx.update(|cx| {
3661 uncommitted_diff.read(cx).update_diff(
3662 buffer.clone(),
3663 head,
3664 head_changed.then_some(true),
3665 language.clone(),
3666 cx,
3667 )
3668 })
3669 .await,
3670 )
3671 }
3672 }
3673
3674 // Dropping BufferDiff can be expensive, so yield back to the event loop
3675 // for a bit
3676 yield_now().await;
3677
3678 let cancel = this.update(cx, |this, _| {
3679 // This checks whether all pending stage/unstage operations
3680 // have quiesced (i.e. both the corresponding write and the
3681 // read of that write have completed). If not, then we cancel
3682 // this recalculation attempt to avoid invalidating pending
3683 // state too quickly; another recalculation will come along
3684 // later and clear the pending state once the state of the index has settled.
3685 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3686 *this.recalculating_tx.borrow_mut() = false;
3687 true
3688 } else {
3689 false
3690 }
3691 })?;
3692 if cancel {
3693 log::debug!(
3694 concat!(
3695 "aborting recalculating diffs for buffer {}",
3696 "due to subsequent hunk operations",
3697 ),
3698 buffer.remote_id()
3699 );
3700 return Ok(());
3701 }
3702
3703 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3704 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3705 {
3706 let task = unstaged_diff.update(cx, |diff, cx| {
3707 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3708 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3709 // view multibuffers.
3710 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3711 });
3712 Some(task.await)
3713 } else {
3714 None
3715 };
3716
3717 yield_now().await;
3718
3719 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3720 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3721 {
3722 uncommitted_diff
3723 .update(cx, |diff, cx| {
3724 if language_changed {
3725 diff.language_changed(language.clone(), language_registry.clone(), cx);
3726 }
3727 diff.set_snapshot_with_secondary(
3728 new_uncommitted_diff,
3729 &buffer,
3730 unstaged_changed_range.flatten(),
3731 true,
3732 cx,
3733 )
3734 })
3735 .await;
3736 }
3737
3738 yield_now().await;
3739
3740 for (oid, oid_diff, base_text) in oid_diffs {
3741 let new_oid_diff = cx
3742 .update(|cx| {
3743 oid_diff.read(cx).update_diff(
3744 buffer.clone(),
3745 base_text,
3746 None,
3747 language.clone(),
3748 cx,
3749 )
3750 })
3751 .await;
3752
3753 oid_diff
3754 .update(cx, |diff, cx| {
3755 if language_changed {
3756 diff.language_changed(language.clone(), language_registry.clone(), cx);
3757 }
3758 diff.set_snapshot(new_oid_diff, &buffer, cx)
3759 })
3760 .await;
3761
3762 log::debug!(
3763 "finished recalculating oid diff for buffer {} oid {:?}",
3764 buffer.remote_id(),
3765 oid
3766 );
3767
3768 yield_now().await;
3769 }
3770
3771 log::debug!(
3772 "finished recalculating diffs for buffer {}",
3773 buffer.remote_id()
3774 );
3775
3776 if let Some(this) = this.upgrade() {
3777 this.update(cx, |this, _| {
3778 this.index_changed = false;
3779 this.head_changed = false;
3780 this.language_changed = false;
3781 *this.recalculating_tx.borrow_mut() = false;
3782 });
3783 }
3784
3785 Ok(())
3786 }));
3787 }
3788}
3789
3790fn make_remote_delegate(
3791 this: Entity<GitStore>,
3792 project_id: u64,
3793 repository_id: RepositoryId,
3794 askpass_id: u64,
3795 cx: &mut AsyncApp,
3796) -> AskPassDelegate {
3797 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3798 this.update(cx, |this, cx| {
3799 let Some((client, _)) = this.downstream_client() else {
3800 return;
3801 };
3802 let response = client.request(proto::AskPassRequest {
3803 project_id,
3804 repository_id: repository_id.to_proto(),
3805 askpass_id,
3806 prompt,
3807 });
3808 cx.spawn(async move |_, _| {
3809 let mut response = response.await?.response;
3810 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3811 .ok();
3812 response.zeroize();
3813 anyhow::Ok(())
3814 })
3815 .detach_and_log_err(cx);
3816 });
3817 })
3818}
3819
3820impl RepositoryId {
3821 pub fn to_proto(self) -> u64 {
3822 self.0
3823 }
3824
3825 pub fn from_proto(id: u64) -> Self {
3826 RepositoryId(id)
3827 }
3828}
3829
3830impl RepositorySnapshot {
3831 fn empty(
3832 id: RepositoryId,
3833 work_directory_abs_path: Arc<Path>,
3834 original_repo_abs_path: Option<Arc<Path>>,
3835 path_style: PathStyle,
3836 ) -> Self {
3837 Self {
3838 id,
3839 statuses_by_path: Default::default(),
3840 original_repo_abs_path: original_repo_abs_path
3841 .unwrap_or_else(|| work_directory_abs_path.clone()),
3842 work_directory_abs_path,
3843 branch: None,
3844 branch_list: Arc::from([]),
3845 head_commit: None,
3846 scan_id: 0,
3847 merge: Default::default(),
3848 remote_origin_url: None,
3849 remote_upstream_url: None,
3850 stash_entries: Default::default(),
3851 linked_worktrees: Arc::from([]),
3852 path_style,
3853 }
3854 }
3855
3856 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3857 proto::UpdateRepository {
3858 branch_summary: self.branch.as_ref().map(branch_to_proto),
3859 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3860 updated_statuses: self
3861 .statuses_by_path
3862 .iter()
3863 .map(|entry| entry.to_proto())
3864 .collect(),
3865 removed_statuses: Default::default(),
3866 current_merge_conflicts: self
3867 .merge
3868 .merge_heads_by_conflicted_path
3869 .iter()
3870 .map(|(repo_path, _)| repo_path.to_proto())
3871 .collect(),
3872 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3873 project_id,
3874 id: self.id.to_proto(),
3875 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3876 entry_ids: vec![self.id.to_proto()],
3877 scan_id: self.scan_id,
3878 is_last_update: true,
3879 stash_entries: self
3880 .stash_entries
3881 .entries
3882 .iter()
3883 .map(stash_to_proto)
3884 .collect(),
3885 remote_upstream_url: self.remote_upstream_url.clone(),
3886 remote_origin_url: self.remote_origin_url.clone(),
3887 original_repo_abs_path: Some(
3888 self.original_repo_abs_path.to_string_lossy().into_owned(),
3889 ),
3890 linked_worktrees: self
3891 .linked_worktrees
3892 .iter()
3893 .map(worktree_to_proto)
3894 .collect(),
3895 }
3896 }
3897
3898 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3899 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3900 let mut removed_statuses: Vec<String> = Vec::new();
3901
3902 let mut new_statuses = self.statuses_by_path.iter().peekable();
3903 let mut old_statuses = old.statuses_by_path.iter().peekable();
3904
3905 let mut current_new_entry = new_statuses.next();
3906 let mut current_old_entry = old_statuses.next();
3907 loop {
3908 match (current_new_entry, current_old_entry) {
3909 (Some(new_entry), Some(old_entry)) => {
3910 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3911 Ordering::Less => {
3912 updated_statuses.push(new_entry.to_proto());
3913 current_new_entry = new_statuses.next();
3914 }
3915 Ordering::Equal => {
3916 if new_entry.status != old_entry.status
3917 || new_entry.diff_stat != old_entry.diff_stat
3918 {
3919 updated_statuses.push(new_entry.to_proto());
3920 }
3921 current_old_entry = old_statuses.next();
3922 current_new_entry = new_statuses.next();
3923 }
3924 Ordering::Greater => {
3925 removed_statuses.push(old_entry.repo_path.to_proto());
3926 current_old_entry = old_statuses.next();
3927 }
3928 }
3929 }
3930 (None, Some(old_entry)) => {
3931 removed_statuses.push(old_entry.repo_path.to_proto());
3932 current_old_entry = old_statuses.next();
3933 }
3934 (Some(new_entry), None) => {
3935 updated_statuses.push(new_entry.to_proto());
3936 current_new_entry = new_statuses.next();
3937 }
3938 (None, None) => break,
3939 }
3940 }
3941
3942 proto::UpdateRepository {
3943 branch_summary: self.branch.as_ref().map(branch_to_proto),
3944 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3945 updated_statuses,
3946 removed_statuses,
3947 current_merge_conflicts: self
3948 .merge
3949 .merge_heads_by_conflicted_path
3950 .iter()
3951 .map(|(path, _)| path.to_proto())
3952 .collect(),
3953 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3954 project_id,
3955 id: self.id.to_proto(),
3956 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3957 entry_ids: vec![],
3958 scan_id: self.scan_id,
3959 is_last_update: true,
3960 stash_entries: self
3961 .stash_entries
3962 .entries
3963 .iter()
3964 .map(stash_to_proto)
3965 .collect(),
3966 remote_upstream_url: self.remote_upstream_url.clone(),
3967 remote_origin_url: self.remote_origin_url.clone(),
3968 original_repo_abs_path: Some(
3969 self.original_repo_abs_path.to_string_lossy().into_owned(),
3970 ),
3971 linked_worktrees: self
3972 .linked_worktrees
3973 .iter()
3974 .map(worktree_to_proto)
3975 .collect(),
3976 }
3977 }
3978
3979 /// The main worktree is the original checkout that other worktrees were
3980 /// created from.
3981 ///
3982 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3983 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3984 ///
3985 /// Submodules also return `true` here, since they are not linked worktrees.
3986 pub fn is_main_worktree(&self) -> bool {
3987 self.work_directory_abs_path == self.original_repo_abs_path
3988 }
3989
3990 /// Returns true if this repository is a linked worktree, that is, one that
3991 /// was created from another worktree.
3992 ///
3993 /// Returns `false` for both the main worktree and submodules.
3994 pub fn is_linked_worktree(&self) -> bool {
3995 !self.is_main_worktree()
3996 }
3997
3998 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3999 &self.linked_worktrees
4000 }
4001
4002 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
4003 self.statuses_by_path.iter().cloned()
4004 }
4005
4006 pub fn status_summary(&self) -> GitSummary {
4007 self.statuses_by_path.summary().item_summary
4008 }
4009
4010 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
4011 self.statuses_by_path
4012 .get(&PathKey(path.as_ref().clone()), ())
4013 .cloned()
4014 }
4015
4016 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4017 self.statuses_by_path
4018 .get(&PathKey(path.as_ref().clone()), ())
4019 .and_then(|entry| entry.diff_stat)
4020 }
4021
4022 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
4023 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
4024 }
4025
4026 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
4027 let repo_path = repo_path.display(self.path_style);
4028 PathBuf::from(
4029 self.path_style
4030 .join(&self.work_directory_abs_path, repo_path.as_ref())
4031 .unwrap(),
4032 )
4033 }
4034
4035 #[inline]
4036 fn abs_path_to_repo_path_inner(
4037 work_directory_abs_path: &Path,
4038 abs_path: &Path,
4039 path_style: PathStyle,
4040 ) -> Option<RepoPath> {
4041 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
4042 Some(RepoPath::from_rel_path(&rel_path))
4043 }
4044
4045 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
4046 self.merge
4047 .merge_heads_by_conflicted_path
4048 .contains_key(repo_path)
4049 }
4050
4051 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
4052 let had_conflict_on_last_merge_head_change = self
4053 .merge
4054 .merge_heads_by_conflicted_path
4055 .contains_key(repo_path);
4056 let has_conflict_currently = self
4057 .status_for_path(repo_path)
4058 .is_some_and(|entry| entry.status.is_conflicted());
4059 had_conflict_on_last_merge_head_change || has_conflict_currently
4060 }
4061
4062 /// This is the name that will be displayed in the repository selector for this repository.
4063 pub fn display_name(&self) -> SharedString {
4064 self.work_directory_abs_path
4065 .file_name()
4066 .unwrap_or_default()
4067 .to_string_lossy()
4068 .to_string()
4069 .into()
4070 }
4071}
4072
4073pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
4074 proto::StashEntry {
4075 oid: entry.oid.as_bytes().to_vec(),
4076 message: entry.message.clone(),
4077 branch: entry.branch.clone(),
4078 index: entry.index as u64,
4079 timestamp: entry.timestamp,
4080 }
4081}
4082
4083pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
4084 Ok(StashEntry {
4085 oid: Oid::from_bytes(&entry.oid)?,
4086 message: entry.message.clone(),
4087 index: entry.index as usize,
4088 branch: entry.branch.clone(),
4089 timestamp: entry.timestamp,
4090 })
4091}
4092
4093impl MergeDetails {
4094 async fn update(
4095 &mut self,
4096 backend: &Arc<dyn GitRepository>,
4097 current_conflicted_paths: Vec<RepoPath>,
4098 ) -> Result<bool> {
4099 log::debug!("load merge details");
4100 self.message = backend.merge_message().await.map(SharedString::from);
4101 let heads = backend
4102 .revparse_batch(vec![
4103 "MERGE_HEAD".into(),
4104 "CHERRY_PICK_HEAD".into(),
4105 "REBASE_HEAD".into(),
4106 "REVERT_HEAD".into(),
4107 "APPLY_HEAD".into(),
4108 ])
4109 .await
4110 .log_err()
4111 .unwrap_or_default()
4112 .into_iter()
4113 .map(|opt| opt.map(SharedString::from))
4114 .collect::<Vec<_>>();
4115
4116 let mut conflicts_changed = false;
4117
4118 // Record the merge state for newly conflicted paths
4119 for path in ¤t_conflicted_paths {
4120 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
4121 conflicts_changed = true;
4122 self.merge_heads_by_conflicted_path
4123 .insert(path.clone(), heads.clone());
4124 }
4125 }
4126
4127 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4128 self.merge_heads_by_conflicted_path
4129 .retain(|path, old_merge_heads| {
4130 let keep = current_conflicted_paths.contains(path)
4131 || (old_merge_heads == &heads
4132 && old_merge_heads.iter().any(|head| head.is_some()));
4133 if !keep {
4134 conflicts_changed = true;
4135 }
4136 keep
4137 });
4138
4139 Ok(conflicts_changed)
4140 }
4141}
4142
4143impl Repository {
4144 pub fn is_trusted(&self) -> bool {
4145 match self.repository_state.peek() {
4146 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4147 _ => false,
4148 }
4149 }
4150
4151 pub fn snapshot(&self) -> RepositorySnapshot {
4152 self.snapshot.clone()
4153 }
4154
4155 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4156 self.pending_ops.iter().cloned()
4157 }
4158
4159 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4160 self.pending_ops.summary().clone()
4161 }
4162
4163 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4164 self.pending_ops
4165 .get(&PathKey(path.as_ref().clone()), ())
4166 .cloned()
4167 }
4168
4169 fn local(
4170 id: RepositoryId,
4171 work_directory_abs_path: Arc<Path>,
4172 original_repo_abs_path: Arc<Path>,
4173 dot_git_abs_path: Arc<Path>,
4174 project_environment: WeakEntity<ProjectEnvironment>,
4175 fs: Arc<dyn Fs>,
4176 is_trusted: bool,
4177 git_store: WeakEntity<GitStore>,
4178 cx: &mut Context<Self>,
4179 ) -> Self {
4180 let snapshot = RepositorySnapshot::empty(
4181 id,
4182 work_directory_abs_path.clone(),
4183 Some(original_repo_abs_path),
4184 PathStyle::local(),
4185 );
4186 let state = cx
4187 .spawn(async move |_, cx| {
4188 LocalRepositoryState::new(
4189 work_directory_abs_path,
4190 dot_git_abs_path,
4191 project_environment,
4192 fs,
4193 is_trusted,
4194 cx,
4195 )
4196 .await
4197 .map_err(|err| err.to_string())
4198 })
4199 .shared();
4200 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4201 let state = cx
4202 .spawn(async move |_, _| {
4203 let state = state.await?;
4204 Ok(RepositoryState::Local(state))
4205 })
4206 .shared();
4207
4208 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4209 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4210 if this.scan_id > 1 {
4211 this.initial_graph_data.clear();
4212 }
4213 }
4214 RepositoryEvent::StashEntriesChanged => {
4215 if this.scan_id > 1 {
4216 this.initial_graph_data
4217 .retain(|(log_source, _), _| *log_source != LogSource::All);
4218 }
4219 }
4220 _ => {}
4221 })
4222 .detach();
4223
4224 Repository {
4225 this: cx.weak_entity(),
4226 git_store,
4227 snapshot,
4228 pending_ops: Default::default(),
4229 repository_state: state,
4230 commit_message_buffer: None,
4231 askpass_delegates: Default::default(),
4232 paths_needing_status_update: Default::default(),
4233 latest_askpass_id: 0,
4234 job_sender,
4235 job_id: 0,
4236 active_jobs: Default::default(),
4237 initial_graph_data: Default::default(),
4238 commit_data: Default::default(),
4239 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4240 }
4241 }
4242
4243 fn remote(
4244 id: RepositoryId,
4245 work_directory_abs_path: Arc<Path>,
4246 original_repo_abs_path: Option<Arc<Path>>,
4247 path_style: PathStyle,
4248 project_id: ProjectId,
4249 client: AnyProtoClient,
4250 git_store: WeakEntity<GitStore>,
4251 cx: &mut Context<Self>,
4252 ) -> Self {
4253 let snapshot = RepositorySnapshot::empty(
4254 id,
4255 work_directory_abs_path,
4256 original_repo_abs_path,
4257 path_style,
4258 );
4259 let repository_state = RemoteRepositoryState { project_id, client };
4260 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4261 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4262 Self {
4263 this: cx.weak_entity(),
4264 snapshot,
4265 commit_message_buffer: None,
4266 git_store,
4267 pending_ops: Default::default(),
4268 paths_needing_status_update: Default::default(),
4269 job_sender,
4270 repository_state,
4271 askpass_delegates: Default::default(),
4272 latest_askpass_id: 0,
4273 active_jobs: Default::default(),
4274 job_id: 0,
4275 initial_graph_data: Default::default(),
4276 commit_data: Default::default(),
4277 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4278 }
4279 }
4280
4281 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4282 self.git_store.upgrade()
4283 }
4284
4285 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4286 let this = cx.weak_entity();
4287 let git_store = self.git_store.clone();
4288 let _ = self.send_keyed_job(
4289 Some(GitJobKey::ReloadBufferDiffBases),
4290 None,
4291 |state, mut cx| async move {
4292 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4293 log::error!("tried to recompute diffs for a non-local repository");
4294 return Ok(());
4295 };
4296
4297 let Some(this) = this.upgrade() else {
4298 return Ok(());
4299 };
4300
4301 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4302 git_store.update(cx, |git_store, cx| {
4303 git_store
4304 .diffs
4305 .iter()
4306 .filter_map(|(buffer_id, diff_state)| {
4307 let buffer_store = git_store.buffer_store.read(cx);
4308 let buffer = buffer_store.get(*buffer_id)?;
4309 let file = File::from_dyn(buffer.read(cx).file())?;
4310 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4311 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4312 log::debug!(
4313 "start reload diff bases for repo path {}",
4314 repo_path.as_unix_str()
4315 );
4316 diff_state.update(cx, |diff_state, _| {
4317 let has_unstaged_diff = diff_state
4318 .unstaged_diff
4319 .as_ref()
4320 .is_some_and(|diff| diff.is_upgradable());
4321 let has_uncommitted_diff = diff_state
4322 .uncommitted_diff
4323 .as_ref()
4324 .is_some_and(|set| set.is_upgradable());
4325
4326 Some((
4327 buffer,
4328 repo_path,
4329 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4330 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4331 ))
4332 })
4333 })
4334 .collect::<Vec<_>>()
4335 })
4336 })?;
4337
4338 let buffer_diff_base_changes = cx
4339 .background_spawn(async move {
4340 let mut changes = Vec::new();
4341 for (buffer, repo_path, current_index_text, current_head_text) in
4342 &repo_diff_state_updates
4343 {
4344 let index_text = if current_index_text.is_some() {
4345 backend.load_index_text(repo_path.clone()).await
4346 } else {
4347 None
4348 };
4349 let head_text = if current_head_text.is_some() {
4350 backend.load_committed_text(repo_path.clone()).await
4351 } else {
4352 None
4353 };
4354
4355 let change =
4356 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4357 (Some(current_index), Some(current_head)) => {
4358 let index_changed =
4359 index_text.as_deref() != current_index.as_deref();
4360 let head_changed =
4361 head_text.as_deref() != current_head.as_deref();
4362 if index_changed && head_changed {
4363 if index_text == head_text {
4364 Some(DiffBasesChange::SetBoth(head_text))
4365 } else {
4366 Some(DiffBasesChange::SetEach {
4367 index: index_text,
4368 head: head_text,
4369 })
4370 }
4371 } else if index_changed {
4372 Some(DiffBasesChange::SetIndex(index_text))
4373 } else if head_changed {
4374 Some(DiffBasesChange::SetHead(head_text))
4375 } else {
4376 None
4377 }
4378 }
4379 (Some(current_index), None) => {
4380 let index_changed =
4381 index_text.as_deref() != current_index.as_deref();
4382 index_changed
4383 .then_some(DiffBasesChange::SetIndex(index_text))
4384 }
4385 (None, Some(current_head)) => {
4386 let head_changed =
4387 head_text.as_deref() != current_head.as_deref();
4388 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4389 }
4390 (None, None) => None,
4391 };
4392
4393 changes.push((buffer.clone(), change))
4394 }
4395 changes
4396 })
4397 .await;
4398
4399 git_store.update(&mut cx, |git_store, cx| {
4400 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4401 let buffer_snapshot = buffer.read(cx).text_snapshot();
4402 let buffer_id = buffer_snapshot.remote_id();
4403 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4404 continue;
4405 };
4406
4407 let downstream_client = git_store.downstream_client();
4408 diff_state.update(cx, |diff_state, cx| {
4409 use proto::update_diff_bases::Mode;
4410
4411 if let Some((diff_bases_change, (client, project_id))) =
4412 diff_bases_change.clone().zip(downstream_client)
4413 {
4414 let (staged_text, committed_text, mode) = match diff_bases_change {
4415 DiffBasesChange::SetIndex(index) => {
4416 (index, None, Mode::IndexOnly)
4417 }
4418 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4419 DiffBasesChange::SetEach { index, head } => {
4420 (index, head, Mode::IndexAndHead)
4421 }
4422 DiffBasesChange::SetBoth(text) => {
4423 (None, text, Mode::IndexMatchesHead)
4424 }
4425 };
4426 client
4427 .send(proto::UpdateDiffBases {
4428 project_id: project_id.to_proto(),
4429 buffer_id: buffer_id.to_proto(),
4430 staged_text,
4431 committed_text,
4432 mode: mode as i32,
4433 })
4434 .log_err();
4435 }
4436
4437 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4438 });
4439 }
4440 })
4441 },
4442 );
4443 }
4444
4445 pub fn send_job<F, Fut, R>(
4446 &mut self,
4447 status: Option<SharedString>,
4448 job: F,
4449 ) -> oneshot::Receiver<R>
4450 where
4451 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4452 Fut: Future<Output = R> + 'static,
4453 R: Send + 'static,
4454 {
4455 self.send_keyed_job(None, status, job)
4456 }
4457
4458 fn send_keyed_job<F, Fut, R>(
4459 &mut self,
4460 key: Option<GitJobKey>,
4461 status: Option<SharedString>,
4462 job: F,
4463 ) -> oneshot::Receiver<R>
4464 where
4465 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4466 Fut: Future<Output = R> + 'static,
4467 R: Send + 'static,
4468 {
4469 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4470 let job_id = post_inc(&mut self.job_id);
4471 let this = self.this.clone();
4472 self.job_sender
4473 .unbounded_send(GitJob {
4474 key,
4475 job: Box::new(move |state, cx: &mut AsyncApp| {
4476 let job = job(state, cx.clone());
4477 cx.spawn(async move |cx| {
4478 if let Some(s) = status.clone() {
4479 this.update(cx, |this, cx| {
4480 this.active_jobs.insert(
4481 job_id,
4482 JobInfo {
4483 start: Instant::now(),
4484 message: s.clone(),
4485 },
4486 );
4487
4488 cx.notify();
4489 })
4490 .ok();
4491 }
4492 let result = job.await;
4493
4494 this.update(cx, |this, cx| {
4495 this.active_jobs.remove(&job_id);
4496 cx.notify();
4497 })
4498 .ok();
4499
4500 result_tx.send(result).ok();
4501 })
4502 }),
4503 })
4504 .ok();
4505 result_rx
4506 }
4507
4508 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4509 let Some(git_store) = self.git_store.upgrade() else {
4510 return;
4511 };
4512 let entity = cx.entity();
4513 git_store.update(cx, |git_store, cx| {
4514 let Some((&id, _)) = git_store
4515 .repositories
4516 .iter()
4517 .find(|(_, handle)| *handle == &entity)
4518 else {
4519 return;
4520 };
4521 git_store.active_repo_id = Some(id);
4522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4523 });
4524 }
4525
4526 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4527 self.snapshot.status()
4528 }
4529
4530 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4531 self.snapshot.diff_stat_for_path(path)
4532 }
4533
4534 pub fn cached_stash(&self) -> GitStash {
4535 self.snapshot.stash_entries.clone()
4536 }
4537
4538 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4539 let git_store = self.git_store.upgrade()?;
4540 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4541 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4542 let abs_path = SanitizedPath::new(&abs_path);
4543 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4544 Some(ProjectPath {
4545 worktree_id: worktree.read(cx).id(),
4546 path: relative_path,
4547 })
4548 }
4549
4550 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4551 let git_store = self.git_store.upgrade()?;
4552 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4553 let abs_path = worktree_store.absolutize(path, cx)?;
4554 self.snapshot.abs_path_to_repo_path(&abs_path)
4555 }
4556
4557 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4558 other
4559 .read(cx)
4560 .snapshot
4561 .work_directory_abs_path
4562 .starts_with(&self.snapshot.work_directory_abs_path)
4563 }
4564
4565 pub fn open_commit_buffer(
4566 &mut self,
4567 languages: Option<Arc<LanguageRegistry>>,
4568 buffer_store: Entity<BufferStore>,
4569 cx: &mut Context<Self>,
4570 ) -> Task<Result<Entity<Buffer>>> {
4571 let id = self.id;
4572 if let Some(buffer) = self.commit_message_buffer.clone() {
4573 return Task::ready(Ok(buffer));
4574 }
4575 let this = cx.weak_entity();
4576
4577 let rx = self.send_job(None, move |state, mut cx| async move {
4578 let Some(this) = this.upgrade() else {
4579 bail!("git store was dropped");
4580 };
4581 match state {
4582 RepositoryState::Local(..) => {
4583 this.update(&mut cx, |_, cx| {
4584 Self::open_local_commit_buffer(languages, buffer_store, cx)
4585 })
4586 .await
4587 }
4588 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4589 let request = client.request(proto::OpenCommitMessageBuffer {
4590 project_id: project_id.0,
4591 repository_id: id.to_proto(),
4592 });
4593 let response = request.await.context("requesting to open commit buffer")?;
4594 let buffer_id = BufferId::new(response.buffer_id)?;
4595 let buffer = buffer_store
4596 .update(&mut cx, |buffer_store, cx| {
4597 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4598 })
4599 .await?;
4600 if let Some(language_registry) = languages {
4601 let git_commit_language =
4602 language_registry.language_for_name("Git Commit").await?;
4603 buffer.update(&mut cx, |buffer, cx| {
4604 buffer.set_language(Some(git_commit_language), cx);
4605 });
4606 }
4607 this.update(&mut cx, |this, _| {
4608 this.commit_message_buffer = Some(buffer.clone());
4609 });
4610 Ok(buffer)
4611 }
4612 }
4613 });
4614
4615 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4616 }
4617
4618 fn open_local_commit_buffer(
4619 language_registry: Option<Arc<LanguageRegistry>>,
4620 buffer_store: Entity<BufferStore>,
4621 cx: &mut Context<Self>,
4622 ) -> Task<Result<Entity<Buffer>>> {
4623 cx.spawn(async move |repository, cx| {
4624 let git_commit_language = match language_registry {
4625 Some(language_registry) => {
4626 Some(language_registry.language_for_name("Git Commit").await?)
4627 }
4628 None => None,
4629 };
4630 let buffer = buffer_store
4631 .update(cx, |buffer_store, cx| {
4632 buffer_store.create_buffer(git_commit_language, false, cx)
4633 })
4634 .await?;
4635
4636 repository.update(cx, |repository, _| {
4637 repository.commit_message_buffer = Some(buffer.clone());
4638 })?;
4639 Ok(buffer)
4640 })
4641 }
4642
4643 pub fn checkout_files(
4644 &mut self,
4645 commit: &str,
4646 paths: Vec<RepoPath>,
4647 cx: &mut Context<Self>,
4648 ) -> Task<Result<()>> {
4649 let commit = commit.to_string();
4650 let id = self.id;
4651
4652 self.spawn_job_with_tracking(
4653 paths.clone(),
4654 pending_op::GitStatus::Reverted,
4655 cx,
4656 async move |this, cx| {
4657 this.update(cx, |this, _cx| {
4658 this.send_job(
4659 Some(format!("git checkout {}", commit).into()),
4660 move |git_repo, _| async move {
4661 match git_repo {
4662 RepositoryState::Local(LocalRepositoryState {
4663 backend,
4664 environment,
4665 ..
4666 }) => {
4667 backend
4668 .checkout_files(commit, paths, environment.clone())
4669 .await
4670 }
4671 RepositoryState::Remote(RemoteRepositoryState {
4672 project_id,
4673 client,
4674 }) => {
4675 client
4676 .request(proto::GitCheckoutFiles {
4677 project_id: project_id.0,
4678 repository_id: id.to_proto(),
4679 commit,
4680 paths: paths
4681 .into_iter()
4682 .map(|p| p.to_proto())
4683 .collect(),
4684 })
4685 .await?;
4686
4687 Ok(())
4688 }
4689 }
4690 },
4691 )
4692 })?
4693 .await?
4694 },
4695 )
4696 }
4697
4698 pub fn reset(
4699 &mut self,
4700 commit: String,
4701 reset_mode: ResetMode,
4702 _cx: &mut App,
4703 ) -> oneshot::Receiver<Result<()>> {
4704 let id = self.id;
4705
4706 self.send_job(None, move |git_repo, _| async move {
4707 match git_repo {
4708 RepositoryState::Local(LocalRepositoryState {
4709 backend,
4710 environment,
4711 ..
4712 }) => backend.reset(commit, reset_mode, environment).await,
4713 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4714 client
4715 .request(proto::GitReset {
4716 project_id: project_id.0,
4717 repository_id: id.to_proto(),
4718 commit,
4719 mode: match reset_mode {
4720 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4721 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4722 },
4723 })
4724 .await?;
4725
4726 Ok(())
4727 }
4728 }
4729 })
4730 }
4731
4732 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4733 let id = self.id;
4734 self.send_job(None, move |git_repo, _cx| async move {
4735 match git_repo {
4736 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4737 backend.show(commit).await
4738 }
4739 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4740 let resp = client
4741 .request(proto::GitShow {
4742 project_id: project_id.0,
4743 repository_id: id.to_proto(),
4744 commit,
4745 })
4746 .await?;
4747
4748 Ok(CommitDetails {
4749 sha: resp.sha.into(),
4750 message: resp.message.into(),
4751 commit_timestamp: resp.commit_timestamp,
4752 author_email: resp.author_email.into(),
4753 author_name: resp.author_name.into(),
4754 })
4755 }
4756 }
4757 })
4758 }
4759
4760 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4761 let id = self.id;
4762 self.send_job(None, move |git_repo, cx| async move {
4763 match git_repo {
4764 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4765 backend.load_commit(commit, cx).await
4766 }
4767 RepositoryState::Remote(RemoteRepositoryState {
4768 client, project_id, ..
4769 }) => {
4770 let response = client
4771 .request(proto::LoadCommitDiff {
4772 project_id: project_id.0,
4773 repository_id: id.to_proto(),
4774 commit,
4775 })
4776 .await?;
4777 Ok(CommitDiff {
4778 files: response
4779 .files
4780 .into_iter()
4781 .map(|file| {
4782 Ok(CommitFile {
4783 path: RepoPath::from_proto(&file.path)?,
4784 old_text: file.old_text,
4785 new_text: file.new_text,
4786 is_binary: file.is_binary,
4787 })
4788 })
4789 .collect::<Result<Vec<_>>>()?,
4790 })
4791 }
4792 }
4793 })
4794 }
4795
4796 pub fn file_history(
4797 &mut self,
4798 path: RepoPath,
4799 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4800 self.file_history_paginated(path, 0, None)
4801 }
4802
4803 pub fn file_history_paginated(
4804 &mut self,
4805 path: RepoPath,
4806 skip: usize,
4807 limit: Option<usize>,
4808 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4809 let id = self.id;
4810 self.send_job(None, move |git_repo, _cx| async move {
4811 match git_repo {
4812 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4813 backend.file_history_paginated(path, skip, limit).await
4814 }
4815 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4816 let response = client
4817 .request(proto::GitFileHistory {
4818 project_id: project_id.0,
4819 repository_id: id.to_proto(),
4820 path: path.to_proto(),
4821 skip: skip as u64,
4822 limit: limit.map(|l| l as u64),
4823 })
4824 .await?;
4825 Ok(git::repository::FileHistory {
4826 entries: response
4827 .entries
4828 .into_iter()
4829 .map(|entry| git::repository::FileHistoryEntry {
4830 sha: entry.sha.into(),
4831 subject: entry.subject.into(),
4832 message: entry.message.into(),
4833 commit_timestamp: entry.commit_timestamp,
4834 author_name: entry.author_name.into(),
4835 author_email: entry.author_email.into(),
4836 })
4837 .collect(),
4838 path: RepoPath::from_proto(&response.path)?,
4839 })
4840 }
4841 }
4842 })
4843 }
4844
4845 pub fn get_graph_data(
4846 &self,
4847 log_source: LogSource,
4848 log_order: LogOrder,
4849 ) -> Option<&InitialGitGraphData> {
4850 self.initial_graph_data.get(&(log_source, log_order))
4851 }
4852
4853 pub fn search_commits(
4854 &mut self,
4855 log_source: LogSource,
4856 search_args: SearchCommitArgs,
4857 request_tx: smol::channel::Sender<Oid>,
4858 cx: &mut Context<Self>,
4859 ) {
4860 let repository_state = self.repository_state.clone();
4861
4862 cx.background_spawn(async move {
4863 let repo_state = repository_state.await;
4864
4865 match repo_state {
4866 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4867 backend
4868 .search_commits(log_source, search_args, request_tx)
4869 .await
4870 .log_err();
4871 }
4872 Ok(RepositoryState::Remote(_)) => {}
4873 Err(_) => {}
4874 };
4875 })
4876 .detach();
4877 }
4878
4879 pub fn graph_data(
4880 &mut self,
4881 log_source: LogSource,
4882 log_order: LogOrder,
4883 range: Range<usize>,
4884 cx: &mut Context<Self>,
4885 ) -> GraphDataResponse<'_> {
4886 let initial_commit_data = self
4887 .initial_graph_data
4888 .entry((log_source.clone(), log_order))
4889 .or_insert_with(|| {
4890 let state = self.repository_state.clone();
4891 let log_source = log_source.clone();
4892
4893 let fetch_task = cx.spawn(async move |repository, cx| {
4894 let state = state.await;
4895 let result = match state {
4896 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4897 Self::local_git_graph_data(
4898 repository.clone(),
4899 backend,
4900 log_source.clone(),
4901 log_order,
4902 cx,
4903 )
4904 .await
4905 }
4906 Ok(RepositoryState::Remote(_)) => {
4907 Err("Git graph is not supported for collab yet".into())
4908 }
4909 Err(e) => Err(SharedString::from(e)),
4910 };
4911
4912 if let Err(fetch_task_error) = result {
4913 repository
4914 .update(cx, |repository, _| {
4915 if let Some(data) = repository
4916 .initial_graph_data
4917 .get_mut(&(log_source, log_order))
4918 {
4919 data.error = Some(fetch_task_error);
4920 } else {
4921 debug_panic!(
4922 "This task would be dropped if this entry doesn't exist"
4923 );
4924 }
4925 })
4926 .ok();
4927 }
4928 });
4929
4930 InitialGitGraphData {
4931 fetch_task,
4932 error: None,
4933 commit_data: Vec::new(),
4934 commit_oid_to_index: HashMap::default(),
4935 }
4936 });
4937
4938 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4939 let max_end = initial_commit_data.commit_data.len();
4940
4941 GraphDataResponse {
4942 commits: &initial_commit_data.commit_data
4943 [range.start.min(max_start)..range.end.min(max_end)],
4944 is_loading: !initial_commit_data.fetch_task.is_ready(),
4945 error: initial_commit_data.error.clone(),
4946 }
4947 }
4948
4949 async fn local_git_graph_data(
4950 this: WeakEntity<Self>,
4951 backend: Arc<dyn GitRepository>,
4952 log_source: LogSource,
4953 log_order: LogOrder,
4954 cx: &mut AsyncApp,
4955 ) -> Result<(), SharedString> {
4956 let (request_tx, request_rx) =
4957 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4958
4959 let task = cx.background_executor().spawn({
4960 let log_source = log_source.clone();
4961 async move {
4962 backend
4963 .initial_graph_data(log_source, log_order, request_tx)
4964 .await
4965 .map_err(|err| SharedString::from(err.to_string()))
4966 }
4967 });
4968
4969 let graph_data_key = (log_source, log_order);
4970
4971 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4972 this.update(cx, |repository, cx| {
4973 let graph_data = repository
4974 .initial_graph_data
4975 .entry(graph_data_key.clone())
4976 .and_modify(|graph_data| {
4977 for commit_data in initial_graph_commit_data {
4978 graph_data
4979 .commit_oid_to_index
4980 .insert(commit_data.sha, graph_data.commit_data.len());
4981 graph_data.commit_data.push(commit_data);
4982 }
4983 cx.emit(RepositoryEvent::GraphEvent(
4984 graph_data_key.clone(),
4985 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4986 ));
4987 });
4988
4989 match &graph_data {
4990 Entry::Occupied(_) => {}
4991 Entry::Vacant(_) => {
4992 debug_panic!("This task should be dropped if data doesn't exist");
4993 }
4994 }
4995 })
4996 .ok();
4997 }
4998
4999 task.await?;
5000 Ok(())
5001 }
5002
5003 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
5004 if !self.commit_data.contains_key(&sha) {
5005 match &self.graph_commit_data_handler {
5006 GraphCommitHandlerState::Open(handler) => {
5007 if handler.commit_data_request.try_send(sha).is_ok() {
5008 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
5009 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
5010 }
5011 }
5012 GraphCommitHandlerState::Closed => {
5013 self.open_graph_commit_data_handler(cx);
5014 }
5015 GraphCommitHandlerState::Starting => {}
5016 }
5017 }
5018
5019 self.commit_data
5020 .get(&sha)
5021 .unwrap_or(&CommitDataState::Loading)
5022 }
5023
5024 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
5025 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
5026
5027 let state = self.repository_state.clone();
5028 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
5029 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
5030
5031 let foreground_task = cx.spawn(async move |this, cx| {
5032 while let Ok((sha, commit_data)) = result_rx.recv().await {
5033 let result = this.update(cx, |this, cx| {
5034 let old_value = this
5035 .commit_data
5036 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
5037 debug_assert!(
5038 !matches!(old_value, Some(CommitDataState::Loaded(_))),
5039 "We should never overwrite commit data"
5040 );
5041
5042 cx.notify();
5043 });
5044 if result.is_err() {
5045 break;
5046 }
5047 }
5048
5049 this.update(cx, |this, _cx| {
5050 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
5051 })
5052 .ok();
5053 });
5054
5055 let request_tx_for_handler = request_tx;
5056 let background_executor = cx.background_executor().clone();
5057
5058 cx.background_spawn(async move {
5059 let backend = match state.await {
5060 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
5061 Ok(RepositoryState::Remote(_)) => {
5062 log::error!("commit_data_reader not supported for remote repositories");
5063 return;
5064 }
5065 Err(error) => {
5066 log::error!("failed to get repository state: {error}");
5067 return;
5068 }
5069 };
5070
5071 let reader = match backend.commit_data_reader() {
5072 Ok(reader) => reader,
5073 Err(error) => {
5074 log::error!("failed to create commit data reader: {error:?}");
5075 return;
5076 }
5077 };
5078
5079 loop {
5080 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
5081
5082 futures::select_biased! {
5083 sha = futures::FutureExt::fuse(request_rx.recv()) => {
5084 let Ok(sha) = sha else {
5085 break;
5086 };
5087
5088 match reader.read(sha).await {
5089 Ok(commit_data) => {
5090 if result_tx.send((sha, commit_data)).await.is_err() {
5091 break;
5092 }
5093 }
5094 Err(error) => {
5095 log::error!("failed to read commit data for {sha}: {error:?}");
5096 }
5097 }
5098 }
5099 _ = futures::FutureExt::fuse(timeout) => {
5100 break;
5101 }
5102 }
5103 }
5104
5105 drop(result_tx);
5106 })
5107 .detach();
5108
5109 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
5110 _task: foreground_task,
5111 commit_data_request: request_tx_for_handler,
5112 });
5113 }
5114
5115 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
5116 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
5117 }
5118
5119 fn save_buffers<'a>(
5120 &self,
5121 entries: impl IntoIterator<Item = &'a RepoPath>,
5122 cx: &mut Context<Self>,
5123 ) -> Vec<Task<anyhow::Result<()>>> {
5124 let mut save_futures = Vec::new();
5125 if let Some(buffer_store) = self.buffer_store(cx) {
5126 buffer_store.update(cx, |buffer_store, cx| {
5127 for path in entries {
5128 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5129 continue;
5130 };
5131 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5132 && buffer
5133 .read(cx)
5134 .file()
5135 .is_some_and(|file| file.disk_state().exists())
5136 && buffer.read(cx).has_unsaved_edits()
5137 {
5138 save_futures.push(buffer_store.save_buffer(buffer, cx));
5139 }
5140 }
5141 })
5142 }
5143 save_futures
5144 }
5145
5146 pub fn stage_entries(
5147 &mut self,
5148 entries: Vec<RepoPath>,
5149 cx: &mut Context<Self>,
5150 ) -> Task<anyhow::Result<()>> {
5151 self.stage_or_unstage_entries(true, entries, cx)
5152 }
5153
5154 pub fn unstage_entries(
5155 &mut self,
5156 entries: Vec<RepoPath>,
5157 cx: &mut Context<Self>,
5158 ) -> Task<anyhow::Result<()>> {
5159 self.stage_or_unstage_entries(false, entries, cx)
5160 }
5161
5162 fn stage_or_unstage_entries(
5163 &mut self,
5164 stage: bool,
5165 entries: Vec<RepoPath>,
5166 cx: &mut Context<Self>,
5167 ) -> Task<anyhow::Result<()>> {
5168 if entries.is_empty() {
5169 return Task::ready(Ok(()));
5170 }
5171 let Some(git_store) = self.git_store.upgrade() else {
5172 return Task::ready(Ok(()));
5173 };
5174 let id = self.id;
5175 let save_tasks = self.save_buffers(&entries, cx);
5176 let paths = entries
5177 .iter()
5178 .map(|p| p.as_unix_str())
5179 .collect::<Vec<_>>()
5180 .join(" ");
5181 let status = if stage {
5182 format!("git add {paths}")
5183 } else {
5184 format!("git reset {paths}")
5185 };
5186 let job_key = GitJobKey::WriteIndex(entries.clone());
5187
5188 self.spawn_job_with_tracking(
5189 entries.clone(),
5190 if stage {
5191 pending_op::GitStatus::Staged
5192 } else {
5193 pending_op::GitStatus::Unstaged
5194 },
5195 cx,
5196 async move |this, cx| {
5197 for save_task in save_tasks {
5198 save_task.await?;
5199 }
5200
5201 this.update(cx, |this, cx| {
5202 let weak_this = cx.weak_entity();
5203 this.send_keyed_job(
5204 Some(job_key),
5205 Some(status.into()),
5206 move |git_repo, mut cx| async move {
5207 let hunk_staging_operation_counts = weak_this
5208 .update(&mut cx, |this, cx| {
5209 let mut hunk_staging_operation_counts = HashMap::default();
5210 for path in &entries {
5211 let Some(project_path) =
5212 this.repo_path_to_project_path(path, cx)
5213 else {
5214 continue;
5215 };
5216 let Some(buffer) = git_store
5217 .read(cx)
5218 .buffer_store
5219 .read(cx)
5220 .get_by_path(&project_path)
5221 else {
5222 continue;
5223 };
5224 let Some(diff_state) = git_store
5225 .read(cx)
5226 .diffs
5227 .get(&buffer.read(cx).remote_id())
5228 .cloned()
5229 else {
5230 continue;
5231 };
5232 let Some(uncommitted_diff) =
5233 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5234 |uncommitted_diff| uncommitted_diff.upgrade(),
5235 )
5236 else {
5237 continue;
5238 };
5239 let buffer_snapshot = buffer.read(cx).text_snapshot();
5240 let file_exists = buffer
5241 .read(cx)
5242 .file()
5243 .is_some_and(|file| file.disk_state().exists());
5244 let hunk_staging_operation_count =
5245 diff_state.update(cx, |diff_state, cx| {
5246 uncommitted_diff.update(
5247 cx,
5248 |uncommitted_diff, cx| {
5249 uncommitted_diff
5250 .stage_or_unstage_all_hunks(
5251 stage,
5252 &buffer_snapshot,
5253 file_exists,
5254 cx,
5255 );
5256 },
5257 );
5258
5259 diff_state.hunk_staging_operation_count += 1;
5260 diff_state.hunk_staging_operation_count
5261 });
5262 hunk_staging_operation_counts.insert(
5263 diff_state.downgrade(),
5264 hunk_staging_operation_count,
5265 );
5266 }
5267 hunk_staging_operation_counts
5268 })
5269 .unwrap_or_default();
5270
5271 let result = match git_repo {
5272 RepositoryState::Local(LocalRepositoryState {
5273 backend,
5274 environment,
5275 ..
5276 }) => {
5277 if stage {
5278 backend.stage_paths(entries, environment.clone()).await
5279 } else {
5280 backend.unstage_paths(entries, environment.clone()).await
5281 }
5282 }
5283 RepositoryState::Remote(RemoteRepositoryState {
5284 project_id,
5285 client,
5286 }) => {
5287 if stage {
5288 client
5289 .request(proto::Stage {
5290 project_id: project_id.0,
5291 repository_id: id.to_proto(),
5292 paths: entries
5293 .into_iter()
5294 .map(|repo_path| repo_path.to_proto())
5295 .collect(),
5296 })
5297 .await
5298 .context("sending stage request")
5299 .map(|_| ())
5300 } else {
5301 client
5302 .request(proto::Unstage {
5303 project_id: project_id.0,
5304 repository_id: id.to_proto(),
5305 paths: entries
5306 .into_iter()
5307 .map(|repo_path| repo_path.to_proto())
5308 .collect(),
5309 })
5310 .await
5311 .context("sending unstage request")
5312 .map(|_| ())
5313 }
5314 }
5315 };
5316
5317 for (diff_state, hunk_staging_operation_count) in
5318 hunk_staging_operation_counts
5319 {
5320 diff_state
5321 .update(&mut cx, |diff_state, cx| {
5322 if result.is_ok() {
5323 diff_state.hunk_staging_operation_count_as_of_write =
5324 hunk_staging_operation_count;
5325 } else if let Some(uncommitted_diff) =
5326 &diff_state.uncommitted_diff
5327 {
5328 uncommitted_diff
5329 .update(cx, |uncommitted_diff, cx| {
5330 uncommitted_diff.clear_pending_hunks(cx);
5331 })
5332 .ok();
5333 }
5334 })
5335 .ok();
5336 }
5337
5338 result
5339 },
5340 )
5341 })?
5342 .await?
5343 },
5344 )
5345 }
5346
5347 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5348 let snapshot = self.snapshot.clone();
5349 let pending_ops = self.pending_ops.clone();
5350 let to_stage = cx.background_spawn(async move {
5351 snapshot
5352 .status()
5353 .filter_map(|entry| {
5354 if let Some(ops) =
5355 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5356 {
5357 if ops.staging() || ops.staged() {
5358 None
5359 } else {
5360 Some(entry.repo_path)
5361 }
5362 } else if entry.status.staging().is_fully_staged() {
5363 None
5364 } else {
5365 Some(entry.repo_path)
5366 }
5367 })
5368 .collect()
5369 });
5370
5371 cx.spawn(async move |this, cx| {
5372 let to_stage = to_stage.await;
5373 this.update(cx, |this, cx| {
5374 this.stage_or_unstage_entries(true, to_stage, cx)
5375 })?
5376 .await
5377 })
5378 }
5379
5380 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5381 let snapshot = self.snapshot.clone();
5382 let pending_ops = self.pending_ops.clone();
5383 let to_unstage = cx.background_spawn(async move {
5384 snapshot
5385 .status()
5386 .filter_map(|entry| {
5387 if let Some(ops) =
5388 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5389 {
5390 if !ops.staging() && !ops.staged() {
5391 None
5392 } else {
5393 Some(entry.repo_path)
5394 }
5395 } else if entry.status.staging().is_fully_unstaged() {
5396 None
5397 } else {
5398 Some(entry.repo_path)
5399 }
5400 })
5401 .collect()
5402 });
5403
5404 cx.spawn(async move |this, cx| {
5405 let to_unstage = to_unstage.await;
5406 this.update(cx, |this, cx| {
5407 this.stage_or_unstage_entries(false, to_unstage, cx)
5408 })?
5409 .await
5410 })
5411 }
5412
5413 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5414 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5415
5416 self.stash_entries(to_stash, cx)
5417 }
5418
5419 pub fn stash_entries(
5420 &mut self,
5421 entries: Vec<RepoPath>,
5422 cx: &mut Context<Self>,
5423 ) -> Task<anyhow::Result<()>> {
5424 let id = self.id;
5425
5426 cx.spawn(async move |this, cx| {
5427 this.update(cx, |this, _| {
5428 this.send_job(None, move |git_repo, _cx| async move {
5429 match git_repo {
5430 RepositoryState::Local(LocalRepositoryState {
5431 backend,
5432 environment,
5433 ..
5434 }) => backend.stash_paths(entries, environment).await,
5435 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5436 client
5437 .request(proto::Stash {
5438 project_id: project_id.0,
5439 repository_id: id.to_proto(),
5440 paths: entries
5441 .into_iter()
5442 .map(|repo_path| repo_path.to_proto())
5443 .collect(),
5444 })
5445 .await?;
5446 Ok(())
5447 }
5448 }
5449 })
5450 })?
5451 .await??;
5452 Ok(())
5453 })
5454 }
5455
5456 pub fn stash_pop(
5457 &mut self,
5458 index: Option<usize>,
5459 cx: &mut Context<Self>,
5460 ) -> Task<anyhow::Result<()>> {
5461 let id = self.id;
5462 cx.spawn(async move |this, cx| {
5463 this.update(cx, |this, _| {
5464 this.send_job(None, move |git_repo, _cx| async move {
5465 match git_repo {
5466 RepositoryState::Local(LocalRepositoryState {
5467 backend,
5468 environment,
5469 ..
5470 }) => backend.stash_pop(index, environment).await,
5471 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5472 client
5473 .request(proto::StashPop {
5474 project_id: project_id.0,
5475 repository_id: id.to_proto(),
5476 stash_index: index.map(|i| i as u64),
5477 })
5478 .await
5479 .context("sending stash pop request")?;
5480 Ok(())
5481 }
5482 }
5483 })
5484 })?
5485 .await??;
5486 Ok(())
5487 })
5488 }
5489
5490 pub fn stash_apply(
5491 &mut self,
5492 index: Option<usize>,
5493 cx: &mut Context<Self>,
5494 ) -> Task<anyhow::Result<()>> {
5495 let id = self.id;
5496 cx.spawn(async move |this, cx| {
5497 this.update(cx, |this, _| {
5498 this.send_job(None, move |git_repo, _cx| async move {
5499 match git_repo {
5500 RepositoryState::Local(LocalRepositoryState {
5501 backend,
5502 environment,
5503 ..
5504 }) => backend.stash_apply(index, environment).await,
5505 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5506 client
5507 .request(proto::StashApply {
5508 project_id: project_id.0,
5509 repository_id: id.to_proto(),
5510 stash_index: index.map(|i| i as u64),
5511 })
5512 .await
5513 .context("sending stash apply request")?;
5514 Ok(())
5515 }
5516 }
5517 })
5518 })?
5519 .await??;
5520 Ok(())
5521 })
5522 }
5523
5524 pub fn stash_drop(
5525 &mut self,
5526 index: Option<usize>,
5527 cx: &mut Context<Self>,
5528 ) -> oneshot::Receiver<anyhow::Result<()>> {
5529 let id = self.id;
5530 let updates_tx = self
5531 .git_store()
5532 .and_then(|git_store| match &git_store.read(cx).state {
5533 GitStoreState::Local { downstream, .. } => downstream
5534 .as_ref()
5535 .map(|downstream| downstream.updates_tx.clone()),
5536 _ => None,
5537 });
5538 let this = cx.weak_entity();
5539 self.send_job(None, move |git_repo, mut cx| async move {
5540 match git_repo {
5541 RepositoryState::Local(LocalRepositoryState {
5542 backend,
5543 environment,
5544 ..
5545 }) => {
5546 // TODO would be nice to not have to do this manually
5547 let result = backend.stash_drop(index, environment).await;
5548 if result.is_ok()
5549 && let Ok(stash_entries) = backend.stash_entries().await
5550 {
5551 let snapshot = this.update(&mut cx, |this, cx| {
5552 this.snapshot.stash_entries = stash_entries;
5553 cx.emit(RepositoryEvent::StashEntriesChanged);
5554 this.snapshot.clone()
5555 })?;
5556 if let Some(updates_tx) = updates_tx {
5557 updates_tx
5558 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5559 .ok();
5560 }
5561 }
5562
5563 result
5564 }
5565 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5566 client
5567 .request(proto::StashDrop {
5568 project_id: project_id.0,
5569 repository_id: id.to_proto(),
5570 stash_index: index.map(|i| i as u64),
5571 })
5572 .await
5573 .context("sending stash pop request")?;
5574 Ok(())
5575 }
5576 }
5577 })
5578 }
5579
5580 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5581 let id = self.id;
5582 self.send_job(
5583 Some(format!("git hook {}", hook.as_str()).into()),
5584 move |git_repo, _cx| async move {
5585 match git_repo {
5586 RepositoryState::Local(LocalRepositoryState {
5587 backend,
5588 environment,
5589 ..
5590 }) => backend.run_hook(hook, environment.clone()).await,
5591 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5592 client
5593 .request(proto::RunGitHook {
5594 project_id: project_id.0,
5595 repository_id: id.to_proto(),
5596 hook: hook.to_proto(),
5597 })
5598 .await?;
5599
5600 Ok(())
5601 }
5602 }
5603 },
5604 )
5605 }
5606
5607 pub fn commit(
5608 &mut self,
5609 message: SharedString,
5610 name_and_email: Option<(SharedString, SharedString)>,
5611 options: CommitOptions,
5612 askpass: AskPassDelegate,
5613 cx: &mut App,
5614 ) -> oneshot::Receiver<Result<()>> {
5615 let id = self.id;
5616 let askpass_delegates = self.askpass_delegates.clone();
5617 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5618
5619 let rx = self.run_hook(RunHook::PreCommit, cx);
5620
5621 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5622 rx.await??;
5623
5624 match git_repo {
5625 RepositoryState::Local(LocalRepositoryState {
5626 backend,
5627 environment,
5628 ..
5629 }) => {
5630 backend
5631 .commit(message, name_and_email, options, askpass, environment)
5632 .await
5633 }
5634 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5635 askpass_delegates.lock().insert(askpass_id, askpass);
5636 let _defer = util::defer(|| {
5637 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5638 debug_assert!(askpass_delegate.is_some());
5639 });
5640 let (name, email) = name_and_email.unzip();
5641 client
5642 .request(proto::Commit {
5643 project_id: project_id.0,
5644 repository_id: id.to_proto(),
5645 message: String::from(message),
5646 name: name.map(String::from),
5647 email: email.map(String::from),
5648 options: Some(proto::commit::CommitOptions {
5649 amend: options.amend,
5650 signoff: options.signoff,
5651 allow_empty: options.allow_empty,
5652 }),
5653 askpass_id,
5654 })
5655 .await?;
5656
5657 Ok(())
5658 }
5659 }
5660 })
5661 }
5662
5663 pub fn fetch(
5664 &mut self,
5665 fetch_options: FetchOptions,
5666 askpass: AskPassDelegate,
5667 _cx: &mut App,
5668 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5669 let askpass_delegates = self.askpass_delegates.clone();
5670 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5671 let id = self.id;
5672
5673 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5674 match git_repo {
5675 RepositoryState::Local(LocalRepositoryState {
5676 backend,
5677 environment,
5678 ..
5679 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5680 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5681 askpass_delegates.lock().insert(askpass_id, askpass);
5682 let _defer = util::defer(|| {
5683 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5684 debug_assert!(askpass_delegate.is_some());
5685 });
5686
5687 let response = client
5688 .request(proto::Fetch {
5689 project_id: project_id.0,
5690 repository_id: id.to_proto(),
5691 askpass_id,
5692 remote: fetch_options.to_proto(),
5693 })
5694 .await?;
5695
5696 Ok(RemoteCommandOutput {
5697 stdout: response.stdout,
5698 stderr: response.stderr,
5699 })
5700 }
5701 }
5702 })
5703 }
5704
5705 pub fn push(
5706 &mut self,
5707 branch: SharedString,
5708 remote_branch: SharedString,
5709 remote: SharedString,
5710 options: Option<PushOptions>,
5711 askpass: AskPassDelegate,
5712 cx: &mut Context<Self>,
5713 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5714 let askpass_delegates = self.askpass_delegates.clone();
5715 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5716 let id = self.id;
5717
5718 let args = options
5719 .map(|option| match option {
5720 PushOptions::SetUpstream => " --set-upstream",
5721 PushOptions::Force => " --force-with-lease",
5722 })
5723 .unwrap_or("");
5724
5725 let updates_tx = self
5726 .git_store()
5727 .and_then(|git_store| match &git_store.read(cx).state {
5728 GitStoreState::Local { downstream, .. } => downstream
5729 .as_ref()
5730 .map(|downstream| downstream.updates_tx.clone()),
5731 _ => None,
5732 });
5733
5734 let this = cx.weak_entity();
5735 self.send_job(
5736 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5737 move |git_repo, mut cx| async move {
5738 match git_repo {
5739 RepositoryState::Local(LocalRepositoryState {
5740 backend,
5741 environment,
5742 ..
5743 }) => {
5744 let result = backend
5745 .push(
5746 branch.to_string(),
5747 remote_branch.to_string(),
5748 remote.to_string(),
5749 options,
5750 askpass,
5751 environment.clone(),
5752 cx.clone(),
5753 )
5754 .await;
5755 // TODO would be nice to not have to do this manually
5756 if result.is_ok() {
5757 let branches = backend.branches().await?;
5758 let branch = branches.into_iter().find(|branch| branch.is_head);
5759 log::info!("head branch after scan is {branch:?}");
5760 let snapshot = this.update(&mut cx, |this, cx| {
5761 this.snapshot.branch = branch;
5762 cx.emit(RepositoryEvent::HeadChanged);
5763 this.snapshot.clone()
5764 })?;
5765 if let Some(updates_tx) = updates_tx {
5766 updates_tx
5767 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5768 .ok();
5769 }
5770 }
5771 result
5772 }
5773 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5774 askpass_delegates.lock().insert(askpass_id, askpass);
5775 let _defer = util::defer(|| {
5776 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5777 debug_assert!(askpass_delegate.is_some());
5778 });
5779 let response = client
5780 .request(proto::Push {
5781 project_id: project_id.0,
5782 repository_id: id.to_proto(),
5783 askpass_id,
5784 branch_name: branch.to_string(),
5785 remote_branch_name: remote_branch.to_string(),
5786 remote_name: remote.to_string(),
5787 options: options.map(|options| match options {
5788 PushOptions::Force => proto::push::PushOptions::Force,
5789 PushOptions::SetUpstream => {
5790 proto::push::PushOptions::SetUpstream
5791 }
5792 }
5793 as i32),
5794 })
5795 .await?;
5796
5797 Ok(RemoteCommandOutput {
5798 stdout: response.stdout,
5799 stderr: response.stderr,
5800 })
5801 }
5802 }
5803 },
5804 )
5805 }
5806
5807 pub fn pull(
5808 &mut self,
5809 branch: Option<SharedString>,
5810 remote: SharedString,
5811 rebase: bool,
5812 askpass: AskPassDelegate,
5813 _cx: &mut App,
5814 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5815 let askpass_delegates = self.askpass_delegates.clone();
5816 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5817 let id = self.id;
5818
5819 let mut status = "git pull".to_string();
5820 if rebase {
5821 status.push_str(" --rebase");
5822 }
5823 status.push_str(&format!(" {}", remote));
5824 if let Some(b) = &branch {
5825 status.push_str(&format!(" {}", b));
5826 }
5827
5828 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5829 match git_repo {
5830 RepositoryState::Local(LocalRepositoryState {
5831 backend,
5832 environment,
5833 ..
5834 }) => {
5835 backend
5836 .pull(
5837 branch.as_ref().map(|b| b.to_string()),
5838 remote.to_string(),
5839 rebase,
5840 askpass,
5841 environment.clone(),
5842 cx,
5843 )
5844 .await
5845 }
5846 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5847 askpass_delegates.lock().insert(askpass_id, askpass);
5848 let _defer = util::defer(|| {
5849 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5850 debug_assert!(askpass_delegate.is_some());
5851 });
5852 let response = client
5853 .request(proto::Pull {
5854 project_id: project_id.0,
5855 repository_id: id.to_proto(),
5856 askpass_id,
5857 rebase,
5858 branch_name: branch.as_ref().map(|b| b.to_string()),
5859 remote_name: remote.to_string(),
5860 })
5861 .await?;
5862
5863 Ok(RemoteCommandOutput {
5864 stdout: response.stdout,
5865 stderr: response.stderr,
5866 })
5867 }
5868 }
5869 })
5870 }
5871
5872 fn spawn_set_index_text_job(
5873 &mut self,
5874 path: RepoPath,
5875 content: Option<String>,
5876 hunk_staging_operation_count: Option<usize>,
5877 cx: &mut Context<Self>,
5878 ) -> oneshot::Receiver<anyhow::Result<()>> {
5879 let id = self.id;
5880 let this = cx.weak_entity();
5881 let git_store = self.git_store.clone();
5882 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5883 self.send_keyed_job(
5884 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5885 None,
5886 move |git_repo, mut cx| async move {
5887 log::debug!(
5888 "start updating index text for buffer {}",
5889 path.as_unix_str()
5890 );
5891
5892 match git_repo {
5893 RepositoryState::Local(LocalRepositoryState {
5894 fs,
5895 backend,
5896 environment,
5897 ..
5898 }) => {
5899 let executable = match fs.metadata(&abs_path).await {
5900 Ok(Some(meta)) => meta.is_executable,
5901 Ok(None) => false,
5902 Err(_err) => false,
5903 };
5904 backend
5905 .set_index_text(path.clone(), content, environment.clone(), executable)
5906 .await?;
5907 }
5908 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5909 client
5910 .request(proto::SetIndexText {
5911 project_id: project_id.0,
5912 repository_id: id.to_proto(),
5913 path: path.to_proto(),
5914 text: content,
5915 })
5916 .await?;
5917 }
5918 }
5919 log::debug!(
5920 "finish updating index text for buffer {}",
5921 path.as_unix_str()
5922 );
5923
5924 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5925 let project_path = this
5926 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5927 .ok()
5928 .flatten();
5929 git_store
5930 .update(&mut cx, |git_store, cx| {
5931 let buffer_id = git_store
5932 .buffer_store
5933 .read(cx)
5934 .get_by_path(&project_path?)?
5935 .read(cx)
5936 .remote_id();
5937 let diff_state = git_store.diffs.get(&buffer_id)?;
5938 diff_state.update(cx, |diff_state, _| {
5939 diff_state.hunk_staging_operation_count_as_of_write =
5940 hunk_staging_operation_count;
5941 });
5942 Some(())
5943 })
5944 .context("Git store dropped")?;
5945 }
5946 Ok(())
5947 },
5948 )
5949 }
5950
5951 pub fn create_remote(
5952 &mut self,
5953 remote_name: String,
5954 remote_url: String,
5955 ) -> oneshot::Receiver<Result<()>> {
5956 let id = self.id;
5957 self.send_job(
5958 Some(format!("git remote add {remote_name} {remote_url}").into()),
5959 move |repo, _cx| async move {
5960 match repo {
5961 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5962 backend.create_remote(remote_name, remote_url).await
5963 }
5964 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5965 client
5966 .request(proto::GitCreateRemote {
5967 project_id: project_id.0,
5968 repository_id: id.to_proto(),
5969 remote_name,
5970 remote_url,
5971 })
5972 .await?;
5973
5974 Ok(())
5975 }
5976 }
5977 },
5978 )
5979 }
5980
5981 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5982 let id = self.id;
5983 self.send_job(
5984 Some(format!("git remove remote {remote_name}").into()),
5985 move |repo, _cx| async move {
5986 match repo {
5987 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5988 backend.remove_remote(remote_name).await
5989 }
5990 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5991 client
5992 .request(proto::GitRemoveRemote {
5993 project_id: project_id.0,
5994 repository_id: id.to_proto(),
5995 remote_name,
5996 })
5997 .await?;
5998
5999 Ok(())
6000 }
6001 }
6002 },
6003 )
6004 }
6005
6006 pub fn get_remotes(
6007 &mut self,
6008 branch_name: Option<String>,
6009 is_push: bool,
6010 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
6011 let id = self.id;
6012 self.send_job(None, move |repo, _cx| async move {
6013 match repo {
6014 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6015 let remote = if let Some(branch_name) = branch_name {
6016 if is_push {
6017 backend.get_push_remote(branch_name).await?
6018 } else {
6019 backend.get_branch_remote(branch_name).await?
6020 }
6021 } else {
6022 None
6023 };
6024
6025 match remote {
6026 Some(remote) => Ok(vec![remote]),
6027 None => backend.get_all_remotes().await,
6028 }
6029 }
6030 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6031 let response = client
6032 .request(proto::GetRemotes {
6033 project_id: project_id.0,
6034 repository_id: id.to_proto(),
6035 branch_name,
6036 is_push,
6037 })
6038 .await?;
6039
6040 let remotes = response
6041 .remotes
6042 .into_iter()
6043 .map(|remotes| Remote {
6044 name: remotes.name.into(),
6045 })
6046 .collect();
6047
6048 Ok(remotes)
6049 }
6050 }
6051 })
6052 }
6053
6054 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
6055 let id = self.id;
6056 self.send_job(None, move |repo, _| async move {
6057 match repo {
6058 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6059 backend.branches().await
6060 }
6061 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6062 let response = client
6063 .request(proto::GitGetBranches {
6064 project_id: project_id.0,
6065 repository_id: id.to_proto(),
6066 })
6067 .await?;
6068
6069 let branches = response
6070 .branches
6071 .into_iter()
6072 .map(|branch| proto_to_branch(&branch))
6073 .collect();
6074
6075 Ok(branches)
6076 }
6077 }
6078 })
6079 }
6080
6081 /// If this is a linked worktree (*NOT* the main checkout of a repository),
6082 /// returns the pathed for the linked worktree.
6083 ///
6084 /// Returns None if this is the main checkout.
6085 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
6086 if self.work_directory_abs_path != self.original_repo_abs_path {
6087 Some(&self.work_directory_abs_path)
6088 } else {
6089 None
6090 }
6091 }
6092
6093 pub fn path_for_new_linked_worktree(
6094 &self,
6095 branch_name: &str,
6096 worktree_directory_setting: &str,
6097 ) -> Result<PathBuf> {
6098 let original_repo = self.original_repo_abs_path.clone();
6099 let project_name = original_repo
6100 .file_name()
6101 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
6102 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
6103 Ok(directory.join(branch_name).join(project_name))
6104 }
6105
6106 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
6107 let id = self.id;
6108 self.send_job(None, move |repo, _| async move {
6109 match repo {
6110 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6111 backend.worktrees().await
6112 }
6113 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6114 let response = client
6115 .request(proto::GitGetWorktrees {
6116 project_id: project_id.0,
6117 repository_id: id.to_proto(),
6118 })
6119 .await?;
6120
6121 let worktrees = response
6122 .worktrees
6123 .into_iter()
6124 .map(|worktree| proto_to_worktree(&worktree))
6125 .collect();
6126
6127 Ok(worktrees)
6128 }
6129 }
6130 })
6131 }
6132
6133 pub fn create_worktree(
6134 &mut self,
6135 target: CreateWorktreeTarget,
6136 path: PathBuf,
6137 ) -> oneshot::Receiver<Result<()>> {
6138 let id = self.id;
6139 let job_description = match target.branch_name() {
6140 Some(branch_name) => format!("git worktree add: {branch_name}"),
6141 None => "git worktree add (detached)".to_string(),
6142 };
6143 self.send_job(Some(job_description.into()), move |repo, _cx| async move {
6144 match repo {
6145 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6146 backend.create_worktree(target, path).await
6147 }
6148 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6149 let (name, commit, use_existing_branch) = match target {
6150 CreateWorktreeTarget::ExistingBranch { branch_name } => {
6151 (Some(branch_name), None, true)
6152 }
6153 CreateWorktreeTarget::NewBranch {
6154 branch_name,
6155 base_sha,
6156 } => (Some(branch_name), base_sha, false),
6157 CreateWorktreeTarget::Detached { base_sha } => (None, base_sha, false),
6158 };
6159
6160 client
6161 .request(proto::GitCreateWorktree {
6162 project_id: project_id.0,
6163 repository_id: id.to_proto(),
6164 name: name.unwrap_or_default(),
6165 directory: path.to_string_lossy().to_string(),
6166 commit,
6167 use_existing_branch,
6168 })
6169 .await?;
6170
6171 Ok(())
6172 }
6173 }
6174 })
6175 }
6176
6177 pub fn create_worktree_detached(
6178 &mut self,
6179 path: PathBuf,
6180 commit: String,
6181 ) -> oneshot::Receiver<Result<()>> {
6182 self.create_worktree(
6183 CreateWorktreeTarget::Detached {
6184 base_sha: Some(commit),
6185 },
6186 path,
6187 )
6188 }
6189
6190 pub fn checkout_branch_in_worktree(
6191 &mut self,
6192 branch_name: String,
6193 worktree_path: PathBuf,
6194 create: bool,
6195 ) -> oneshot::Receiver<Result<()>> {
6196 let description = if create {
6197 format!("git checkout -b {branch_name}")
6198 } else {
6199 format!("git checkout {branch_name}")
6200 };
6201 self.send_job(Some(description.into()), move |repo, _cx| async move {
6202 match repo {
6203 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6204 backend
6205 .checkout_branch_in_worktree(branch_name, worktree_path, create)
6206 .await
6207 }
6208 RepositoryState::Remote(_) => {
6209 log::warn!("checkout_branch_in_worktree not supported for remote repositories");
6210 Ok(())
6211 }
6212 }
6213 })
6214 }
6215
6216 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6217 let id = self.id;
6218 self.send_job(None, move |repo, _cx| async move {
6219 match repo {
6220 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6221 Ok(backend.head_sha().await)
6222 }
6223 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6224 let response = client
6225 .request(proto::GitGetHeadSha {
6226 project_id: project_id.0,
6227 repository_id: id.to_proto(),
6228 })
6229 .await?;
6230
6231 Ok(response.sha)
6232 }
6233 }
6234 })
6235 }
6236
6237 fn edit_ref(
6238 &mut self,
6239 ref_name: String,
6240 commit: Option<String>,
6241 ) -> oneshot::Receiver<Result<()>> {
6242 let id = self.id;
6243 self.send_job(None, move |repo, _cx| async move {
6244 match repo {
6245 RepositoryState::Local(LocalRepositoryState { backend, .. }) => match commit {
6246 Some(commit) => backend.update_ref(ref_name, commit).await,
6247 None => backend.delete_ref(ref_name).await,
6248 },
6249 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6250 let action = match commit {
6251 Some(sha) => proto::git_edit_ref::Action::UpdateToCommit(sha),
6252 None => {
6253 proto::git_edit_ref::Action::Delete(proto::git_edit_ref::DeleteRef {})
6254 }
6255 };
6256 client
6257 .request(proto::GitEditRef {
6258 project_id: project_id.0,
6259 repository_id: id.to_proto(),
6260 ref_name,
6261 action: Some(action),
6262 })
6263 .await?;
6264 Ok(())
6265 }
6266 }
6267 })
6268 }
6269
6270 pub fn update_ref(
6271 &mut self,
6272 ref_name: String,
6273 commit: String,
6274 ) -> oneshot::Receiver<Result<()>> {
6275 self.edit_ref(ref_name, Some(commit))
6276 }
6277
6278 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6279 self.edit_ref(ref_name, None)
6280 }
6281
6282 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6283 let id = self.id;
6284 self.send_job(None, move |repo, _cx| async move {
6285 match repo {
6286 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6287 backend.repair_worktrees().await
6288 }
6289 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6290 client
6291 .request(proto::GitRepairWorktrees {
6292 project_id: project_id.0,
6293 repository_id: id.to_proto(),
6294 })
6295 .await?;
6296 Ok(())
6297 }
6298 }
6299 })
6300 }
6301
6302 pub fn create_archive_checkpoint(&mut self) -> oneshot::Receiver<Result<(String, String)>> {
6303 let id = self.id;
6304 self.send_job(None, move |repo, _cx| async move {
6305 match repo {
6306 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6307 backend.create_archive_checkpoint().await
6308 }
6309 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6310 let response = client
6311 .request(proto::GitCreateArchiveCheckpoint {
6312 project_id: project_id.0,
6313 repository_id: id.to_proto(),
6314 })
6315 .await?;
6316 Ok((response.staged_commit_sha, response.unstaged_commit_sha))
6317 }
6318 }
6319 })
6320 }
6321
6322 pub fn restore_archive_checkpoint(
6323 &mut self,
6324 staged_sha: String,
6325 unstaged_sha: String,
6326 ) -> oneshot::Receiver<Result<()>> {
6327 let id = self.id;
6328 self.send_job(None, move |repo, _cx| async move {
6329 match repo {
6330 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6331 backend
6332 .restore_archive_checkpoint(staged_sha, unstaged_sha)
6333 .await
6334 }
6335 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6336 client
6337 .request(proto::GitRestoreArchiveCheckpoint {
6338 project_id: project_id.0,
6339 repository_id: id.to_proto(),
6340 staged_commit_sha: staged_sha,
6341 unstaged_commit_sha: unstaged_sha,
6342 })
6343 .await?;
6344 Ok(())
6345 }
6346 }
6347 })
6348 }
6349
6350 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6351 let id = self.id;
6352 let original_repo_abs_path = self.snapshot.original_repo_abs_path.clone();
6353 self.send_job(
6354 Some(format!("git worktree remove: {}", path.display()).into()),
6355 move |repo, cx| async move {
6356 match repo {
6357 RepositoryState::Local(LocalRepositoryState { backend, fs, .. }) => {
6358 // When forcing, delete the worktree directory ourselves before
6359 // invoking git. `git worktree remove` can remove the admin
6360 // metadata in `.git/worktrees/<name>` but fail to delete the
6361 // working directory (it continues past directory-removal errors),
6362 // leaving an orphaned folder on disk. Deleting first guarantees
6363 // the directory is gone, and `git worktree remove --force`
6364 // tolerates a missing working tree while cleaning up the admin
6365 // entry. We keep this inside the `Local` arm so that for remote
6366 // projects the deletion runs on the remote machine (where the
6367 // `GitRemoveWorktree` RPC is handled against the local repo on
6368 // the headless server) using its own filesystem.
6369 //
6370 // After a successful removal, also delete any empty ancestor
6371 // directories between the worktree path and the configured
6372 // base directory used when creating linked worktrees.
6373 //
6374 // Non-force removals are left untouched before git runs:
6375 // `git worktree remove` must see the dirty working tree to
6376 // refuse the operation.
6377 if force {
6378 fs.remove_dir(
6379 &path,
6380 RemoveOptions {
6381 recursive: true,
6382 ignore_if_not_exists: true,
6383 },
6384 )
6385 .await
6386 .with_context(|| {
6387 format!("failed to delete worktree directory '{}'", path.display())
6388 })?;
6389 }
6390
6391 backend.remove_worktree(path.clone(), force).await?;
6392
6393 let managed_worktree_base = cx.update(|cx| {
6394 let setting = &ProjectSettings::get_global(cx).git.worktree_directory;
6395 worktrees_directory_for_repo(&original_repo_abs_path, setting).log_err()
6396 });
6397
6398 if let Some(managed_worktree_base) = managed_worktree_base {
6399 remove_empty_managed_worktree_ancestors(
6400 fs.as_ref(),
6401 &path,
6402 &managed_worktree_base,
6403 )
6404 .await;
6405 }
6406
6407 Ok(())
6408 }
6409 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6410 client
6411 .request(proto::GitRemoveWorktree {
6412 project_id: project_id.0,
6413 repository_id: id.to_proto(),
6414 path: path.to_string_lossy().to_string(),
6415 force,
6416 })
6417 .await?;
6418
6419 Ok(())
6420 }
6421 }
6422 },
6423 )
6424 }
6425
6426 pub fn rename_worktree(
6427 &mut self,
6428 old_path: PathBuf,
6429 new_path: PathBuf,
6430 ) -> oneshot::Receiver<Result<()>> {
6431 let id = self.id;
6432 self.send_job(
6433 Some(format!("git worktree move: {}", old_path.display()).into()),
6434 move |repo, _cx| async move {
6435 match repo {
6436 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6437 backend.rename_worktree(old_path, new_path).await
6438 }
6439 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6440 client
6441 .request(proto::GitRenameWorktree {
6442 project_id: project_id.0,
6443 repository_id: id.to_proto(),
6444 old_path: old_path.to_string_lossy().to_string(),
6445 new_path: new_path.to_string_lossy().to_string(),
6446 })
6447 .await?;
6448
6449 Ok(())
6450 }
6451 }
6452 },
6453 )
6454 }
6455
6456 pub fn default_branch(
6457 &mut self,
6458 include_remote_name: bool,
6459 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6460 let id = self.id;
6461 self.send_job(None, move |repo, _| async move {
6462 match repo {
6463 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6464 backend.default_branch(include_remote_name).await
6465 }
6466 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6467 let response = client
6468 .request(proto::GetDefaultBranch {
6469 project_id: project_id.0,
6470 repository_id: id.to_proto(),
6471 })
6472 .await?;
6473
6474 anyhow::Ok(response.branch.map(SharedString::from))
6475 }
6476 }
6477 })
6478 }
6479
6480 pub fn diff_tree(
6481 &mut self,
6482 diff_type: DiffTreeType,
6483 _cx: &App,
6484 ) -> oneshot::Receiver<Result<TreeDiff>> {
6485 let repository_id = self.snapshot.id;
6486 self.send_job(None, move |repo, _cx| async move {
6487 match repo {
6488 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6489 backend.diff_tree(diff_type).await
6490 }
6491 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6492 let response = client
6493 .request(proto::GetTreeDiff {
6494 project_id: project_id.0,
6495 repository_id: repository_id.0,
6496 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6497 base: diff_type.base().to_string(),
6498 head: diff_type.head().to_string(),
6499 })
6500 .await?;
6501
6502 let entries = response
6503 .entries
6504 .into_iter()
6505 .filter_map(|entry| {
6506 let status = match entry.status() {
6507 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6508 proto::tree_diff_status::Status::Modified => {
6509 TreeDiffStatus::Modified {
6510 old: git::Oid::from_str(
6511 &entry.oid.context("missing oid").log_err()?,
6512 )
6513 .log_err()?,
6514 }
6515 }
6516 proto::tree_diff_status::Status::Deleted => {
6517 TreeDiffStatus::Deleted {
6518 old: git::Oid::from_str(
6519 &entry.oid.context("missing oid").log_err()?,
6520 )
6521 .log_err()?,
6522 }
6523 }
6524 };
6525 Some((
6526 RepoPath::from_rel_path(
6527 &RelPath::from_proto(&entry.path).log_err()?,
6528 ),
6529 status,
6530 ))
6531 })
6532 .collect();
6533
6534 Ok(TreeDiff { entries })
6535 }
6536 }
6537 })
6538 }
6539
6540 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6541 let id = self.id;
6542 self.send_job(None, move |repo, _cx| async move {
6543 match repo {
6544 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6545 backend.diff(diff_type).await
6546 }
6547 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6548 let (proto_diff_type, merge_base_ref) = match &diff_type {
6549 DiffType::HeadToIndex => {
6550 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6551 }
6552 DiffType::HeadToWorktree => {
6553 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6554 }
6555 DiffType::MergeBase { base_ref } => (
6556 proto::git_diff::DiffType::MergeBase.into(),
6557 Some(base_ref.to_string()),
6558 ),
6559 };
6560 let response = client
6561 .request(proto::GitDiff {
6562 project_id: project_id.0,
6563 repository_id: id.to_proto(),
6564 diff_type: proto_diff_type,
6565 merge_base_ref,
6566 })
6567 .await?;
6568
6569 Ok(response.diff)
6570 }
6571 }
6572 })
6573 }
6574
6575 pub fn create_branch(
6576 &mut self,
6577 branch_name: String,
6578 base_branch: Option<String>,
6579 ) -> oneshot::Receiver<Result<()>> {
6580 let id = self.id;
6581 let status_msg = if let Some(ref base) = base_branch {
6582 format!("git switch -c {branch_name} {base}").into()
6583 } else {
6584 format!("git switch -c {branch_name}").into()
6585 };
6586 self.send_job(Some(status_msg), move |repo, _cx| async move {
6587 match repo {
6588 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6589 backend.create_branch(branch_name, base_branch).await
6590 }
6591 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6592 client
6593 .request(proto::GitCreateBranch {
6594 project_id: project_id.0,
6595 repository_id: id.to_proto(),
6596 branch_name,
6597 })
6598 .await?;
6599
6600 Ok(())
6601 }
6602 }
6603 })
6604 }
6605
6606 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6607 let id = self.id;
6608 self.send_job(
6609 Some(format!("git switch {branch_name}").into()),
6610 move |repo, _cx| async move {
6611 match repo {
6612 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6613 backend.change_branch(branch_name).await
6614 }
6615 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6616 client
6617 .request(proto::GitChangeBranch {
6618 project_id: project_id.0,
6619 repository_id: id.to_proto(),
6620 branch_name,
6621 })
6622 .await?;
6623
6624 Ok(())
6625 }
6626 }
6627 },
6628 )
6629 }
6630
6631 pub fn delete_branch(
6632 &mut self,
6633 is_remote: bool,
6634 branch_name: String,
6635 ) -> oneshot::Receiver<Result<()>> {
6636 let id = self.id;
6637 self.send_job(
6638 Some(
6639 format!(
6640 "git branch {} {}",
6641 if is_remote { "-dr" } else { "-d" },
6642 branch_name
6643 )
6644 .into(),
6645 ),
6646 move |repo, _cx| async move {
6647 match repo {
6648 RepositoryState::Local(state) => {
6649 state.backend.delete_branch(is_remote, branch_name).await
6650 }
6651 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6652 client
6653 .request(proto::GitDeleteBranch {
6654 project_id: project_id.0,
6655 repository_id: id.to_proto(),
6656 is_remote,
6657 branch_name,
6658 })
6659 .await?;
6660
6661 Ok(())
6662 }
6663 }
6664 },
6665 )
6666 }
6667
6668 pub fn rename_branch(
6669 &mut self,
6670 branch: String,
6671 new_name: String,
6672 ) -> oneshot::Receiver<Result<()>> {
6673 let id = self.id;
6674 self.send_job(
6675 Some(format!("git branch -m {branch} {new_name}").into()),
6676 move |repo, _cx| async move {
6677 match repo {
6678 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6679 backend.rename_branch(branch, new_name).await
6680 }
6681 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6682 client
6683 .request(proto::GitRenameBranch {
6684 project_id: project_id.0,
6685 repository_id: id.to_proto(),
6686 branch,
6687 new_name,
6688 })
6689 .await?;
6690
6691 Ok(())
6692 }
6693 }
6694 },
6695 )
6696 }
6697
6698 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6699 let id = self.id;
6700 self.send_job(None, move |repo, _cx| async move {
6701 match repo {
6702 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6703 backend.check_for_pushed_commit().await
6704 }
6705 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6706 let response = client
6707 .request(proto::CheckForPushedCommits {
6708 project_id: project_id.0,
6709 repository_id: id.to_proto(),
6710 })
6711 .await?;
6712
6713 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6714
6715 Ok(branches)
6716 }
6717 }
6718 })
6719 }
6720
6721 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6722 let id = self.id;
6723 self.send_job(None, move |repo, _cx| async move {
6724 match repo {
6725 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6726 backend.checkpoint().await
6727 }
6728 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6729 let response = client
6730 .request(proto::GitCreateCheckpoint {
6731 project_id: project_id.0,
6732 repository_id: id.to_proto(),
6733 })
6734 .await?;
6735
6736 Ok(GitRepositoryCheckpoint {
6737 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6738 })
6739 }
6740 }
6741 })
6742 }
6743
6744 pub fn restore_checkpoint(
6745 &mut self,
6746 checkpoint: GitRepositoryCheckpoint,
6747 ) -> oneshot::Receiver<Result<()>> {
6748 let id = self.id;
6749 self.send_job(None, move |repo, _cx| async move {
6750 match repo {
6751 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6752 backend.restore_checkpoint(checkpoint).await
6753 }
6754 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6755 client
6756 .request(proto::GitRestoreCheckpoint {
6757 project_id: project_id.0,
6758 repository_id: id.to_proto(),
6759 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6760 })
6761 .await?;
6762 Ok(())
6763 }
6764 }
6765 })
6766 }
6767
6768 pub(crate) fn apply_remote_update(
6769 &mut self,
6770 update: proto::UpdateRepository,
6771 cx: &mut Context<Self>,
6772 ) -> Result<()> {
6773 if let Some(main_path) = &update.original_repo_abs_path {
6774 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6775 }
6776
6777 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6778 let new_head_commit = update
6779 .head_commit_details
6780 .as_ref()
6781 .map(proto_to_commit_details);
6782 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6783 cx.emit(RepositoryEvent::HeadChanged)
6784 }
6785 self.snapshot.branch = new_branch;
6786 self.snapshot.head_commit = new_head_commit;
6787
6788 // We don't store any merge head state for downstream projects; the upstream
6789 // will track it and we will just get the updated conflicts
6790 let new_merge_heads = TreeMap::from_ordered_entries(
6791 update
6792 .current_merge_conflicts
6793 .into_iter()
6794 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6795 );
6796 let conflicts_changed =
6797 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6798 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6799 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6800 let new_stash_entries = GitStash {
6801 entries: update
6802 .stash_entries
6803 .iter()
6804 .filter_map(|entry| proto_to_stash(entry).ok())
6805 .collect(),
6806 };
6807 if self.snapshot.stash_entries != new_stash_entries {
6808 cx.emit(RepositoryEvent::StashEntriesChanged)
6809 }
6810 self.snapshot.stash_entries = new_stash_entries;
6811 let new_linked_worktrees: Arc<[GitWorktree]> = update
6812 .linked_worktrees
6813 .iter()
6814 .map(proto_to_worktree)
6815 .collect();
6816 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6817 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6818 }
6819 self.snapshot.linked_worktrees = new_linked_worktrees;
6820 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6821 self.snapshot.remote_origin_url = update.remote_origin_url;
6822
6823 let edits = update
6824 .removed_statuses
6825 .into_iter()
6826 .filter_map(|path| {
6827 Some(sum_tree::Edit::Remove(PathKey(
6828 RelPath::from_proto(&path).log_err()?,
6829 )))
6830 })
6831 .chain(
6832 update
6833 .updated_statuses
6834 .into_iter()
6835 .filter_map(|updated_status| {
6836 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6837 }),
6838 )
6839 .collect::<Vec<_>>();
6840 if conflicts_changed || !edits.is_empty() {
6841 cx.emit(RepositoryEvent::StatusesChanged);
6842 }
6843 self.snapshot.statuses_by_path.edit(edits, ());
6844
6845 if update.is_last_update {
6846 self.snapshot.scan_id = update.scan_id;
6847 }
6848 self.clear_pending_ops(cx);
6849 Ok(())
6850 }
6851
6852 pub fn compare_checkpoints(
6853 &mut self,
6854 left: GitRepositoryCheckpoint,
6855 right: GitRepositoryCheckpoint,
6856 ) -> oneshot::Receiver<Result<bool>> {
6857 let id = self.id;
6858 self.send_job(None, move |repo, _cx| async move {
6859 match repo {
6860 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6861 backend.compare_checkpoints(left, right).await
6862 }
6863 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6864 let response = client
6865 .request(proto::GitCompareCheckpoints {
6866 project_id: project_id.0,
6867 repository_id: id.to_proto(),
6868 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6869 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6870 })
6871 .await?;
6872 Ok(response.equal)
6873 }
6874 }
6875 })
6876 }
6877
6878 pub fn diff_checkpoints(
6879 &mut self,
6880 base_checkpoint: GitRepositoryCheckpoint,
6881 target_checkpoint: GitRepositoryCheckpoint,
6882 ) -> oneshot::Receiver<Result<String>> {
6883 let id = self.id;
6884 self.send_job(None, move |repo, _cx| async move {
6885 match repo {
6886 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6887 backend
6888 .diff_checkpoints(base_checkpoint, target_checkpoint)
6889 .await
6890 }
6891 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6892 let response = client
6893 .request(proto::GitDiffCheckpoints {
6894 project_id: project_id.0,
6895 repository_id: id.to_proto(),
6896 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6897 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6898 })
6899 .await?;
6900 Ok(response.diff)
6901 }
6902 }
6903 })
6904 }
6905
6906 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6907 let updated = SumTree::from_iter(
6908 self.pending_ops.iter().filter_map(|ops| {
6909 let inner_ops: Vec<PendingOp> =
6910 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6911 if inner_ops.is_empty() {
6912 None
6913 } else {
6914 Some(PendingOps {
6915 repo_path: ops.repo_path.clone(),
6916 ops: inner_ops,
6917 })
6918 }
6919 }),
6920 (),
6921 );
6922
6923 if updated != self.pending_ops {
6924 cx.emit(RepositoryEvent::PendingOpsChanged {
6925 pending_ops: self.pending_ops.clone(),
6926 })
6927 }
6928
6929 self.pending_ops = updated;
6930 }
6931
6932 fn schedule_scan(
6933 &mut self,
6934 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6935 cx: &mut Context<Self>,
6936 ) {
6937 let this = cx.weak_entity();
6938 let _ = self.send_keyed_job(
6939 Some(GitJobKey::ReloadGitState),
6940 None,
6941 |state, mut cx| async move {
6942 log::debug!("run scheduled git status scan");
6943
6944 let Some(this) = this.upgrade() else {
6945 return Ok(());
6946 };
6947 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6948 bail!("not a local repository")
6949 };
6950 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6951 this.update(&mut cx, |this, cx| {
6952 this.clear_pending_ops(cx);
6953 });
6954 if let Some(updates_tx) = updates_tx {
6955 updates_tx
6956 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6957 .ok();
6958 }
6959 Ok(())
6960 },
6961 );
6962 }
6963
6964 fn spawn_local_git_worker(
6965 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6966 cx: &mut Context<Self>,
6967 ) -> mpsc::UnboundedSender<GitJob> {
6968 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6969
6970 cx.spawn(async move |_, cx| {
6971 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6972 if let Some(git_hosting_provider_registry) =
6973 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6974 {
6975 git_hosting_providers::register_additional_providers(
6976 git_hosting_provider_registry,
6977 state.backend.clone(),
6978 )
6979 .await;
6980 }
6981 let state = RepositoryState::Local(state);
6982 let mut jobs = VecDeque::new();
6983 loop {
6984 while let Ok(next_job) = job_rx.try_recv() {
6985 jobs.push_back(next_job);
6986 }
6987
6988 if let Some(job) = jobs.pop_front() {
6989 if let Some(current_key) = &job.key
6990 && jobs
6991 .iter()
6992 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6993 {
6994 continue;
6995 }
6996 (job.job)(state.clone(), cx).await;
6997 } else if let Some(job) = job_rx.next().await {
6998 jobs.push_back(job);
6999 } else {
7000 break;
7001 }
7002 }
7003 anyhow::Ok(())
7004 })
7005 .detach_and_log_err(cx);
7006
7007 job_tx
7008 }
7009
7010 fn spawn_remote_git_worker(
7011 state: RemoteRepositoryState,
7012 cx: &mut Context<Self>,
7013 ) -> mpsc::UnboundedSender<GitJob> {
7014 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
7015
7016 cx.spawn(async move |_, cx| {
7017 let state = RepositoryState::Remote(state);
7018 let mut jobs = VecDeque::new();
7019 loop {
7020 while let Ok(next_job) = job_rx.try_recv() {
7021 jobs.push_back(next_job);
7022 }
7023
7024 if let Some(job) = jobs.pop_front() {
7025 if let Some(current_key) = &job.key
7026 && jobs
7027 .iter()
7028 .any(|other_job| other_job.key.as_ref() == Some(current_key))
7029 {
7030 continue;
7031 }
7032 (job.job)(state.clone(), cx).await;
7033 } else if let Some(job) = job_rx.next().await {
7034 jobs.push_back(job);
7035 } else {
7036 break;
7037 }
7038 }
7039 anyhow::Ok(())
7040 })
7041 .detach_and_log_err(cx);
7042
7043 job_tx
7044 }
7045
7046 fn load_staged_text(
7047 &mut self,
7048 buffer_id: BufferId,
7049 repo_path: RepoPath,
7050 cx: &App,
7051 ) -> Task<Result<Option<String>>> {
7052 let rx = self.send_job(None, move |state, _| async move {
7053 match state {
7054 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7055 anyhow::Ok(backend.load_index_text(repo_path).await)
7056 }
7057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7058 let response = client
7059 .request(proto::OpenUnstagedDiff {
7060 project_id: project_id.to_proto(),
7061 buffer_id: buffer_id.to_proto(),
7062 })
7063 .await?;
7064 Ok(response.staged_text)
7065 }
7066 }
7067 });
7068 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7069 }
7070
7071 fn load_committed_text(
7072 &mut self,
7073 buffer_id: BufferId,
7074 repo_path: RepoPath,
7075 cx: &App,
7076 ) -> Task<Result<DiffBasesChange>> {
7077 let rx = self.send_job(None, move |state, _| async move {
7078 match state {
7079 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7080 let committed_text = backend.load_committed_text(repo_path.clone()).await;
7081 let staged_text = backend.load_index_text(repo_path).await;
7082 let diff_bases_change = if committed_text == staged_text {
7083 DiffBasesChange::SetBoth(committed_text)
7084 } else {
7085 DiffBasesChange::SetEach {
7086 index: staged_text,
7087 head: committed_text,
7088 }
7089 };
7090 anyhow::Ok(diff_bases_change)
7091 }
7092 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7093 use proto::open_uncommitted_diff_response::Mode;
7094
7095 let response = client
7096 .request(proto::OpenUncommittedDiff {
7097 project_id: project_id.to_proto(),
7098 buffer_id: buffer_id.to_proto(),
7099 })
7100 .await?;
7101 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
7102 let bases = match mode {
7103 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
7104 Mode::IndexAndHead => DiffBasesChange::SetEach {
7105 head: response.committed_text,
7106 index: response.staged_text,
7107 },
7108 };
7109 Ok(bases)
7110 }
7111 }
7112 });
7113
7114 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7115 }
7116
7117 pub fn load_commit_template_text(
7118 &mut self,
7119 ) -> oneshot::Receiver<Result<Option<GitCommitTemplate>>> {
7120 self.send_job(None, move |git_repo, _cx| async move {
7121 match git_repo {
7122 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7123 backend.load_commit_template().await
7124 }
7125 RepositoryState::Remote(_) => Ok(None),
7126 }
7127 })
7128 }
7129
7130 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
7131 let repository_id = self.snapshot.id;
7132 let rx = self.send_job(None, move |state, _| async move {
7133 match state {
7134 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7135 backend.load_blob_content(oid).await
7136 }
7137 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
7138 let response = client
7139 .request(proto::GetBlobContent {
7140 project_id: project_id.to_proto(),
7141 repository_id: repository_id.0,
7142 oid: oid.to_string(),
7143 })
7144 .await?;
7145 Ok(response.content)
7146 }
7147 }
7148 });
7149 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7150 }
7151
7152 fn paths_changed(
7153 &mut self,
7154 paths: Vec<RepoPath>,
7155 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
7156 cx: &mut Context<Self>,
7157 ) {
7158 if !paths.is_empty() {
7159 self.paths_needing_status_update.push(paths);
7160 }
7161
7162 let this = cx.weak_entity();
7163 let _ = self.send_keyed_job(
7164 Some(GitJobKey::RefreshStatuses),
7165 None,
7166 |state, mut cx| async move {
7167 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
7168 (
7169 this.snapshot.clone(),
7170 mem::take(&mut this.paths_needing_status_update),
7171 )
7172 })?;
7173 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
7174 bail!("not a local repository")
7175 };
7176
7177 if changed_paths.is_empty() {
7178 return Ok(());
7179 }
7180
7181 let has_head = prev_snapshot.head_commit.is_some();
7182
7183 let stash_entries = backend.stash_entries().await?;
7184 let changed_path_statuses = cx
7185 .background_spawn(async move {
7186 let mut changed_paths =
7187 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
7188 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
7189
7190 let status_task = backend.status(&changed_paths_vec);
7191 let diff_stat_future = if has_head {
7192 backend.diff_stat(&changed_paths_vec)
7193 } else {
7194 future::ready(Ok(status::GitDiffStat {
7195 entries: Arc::default(),
7196 }))
7197 .boxed()
7198 };
7199
7200 let (statuses, diff_stats) =
7201 futures::future::try_join(status_task, diff_stat_future).await?;
7202
7203 let diff_stats: HashMap<RepoPath, DiffStat> =
7204 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
7205
7206 let mut changed_path_statuses = Vec::new();
7207 let prev_statuses = prev_snapshot.statuses_by_path.clone();
7208 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7209
7210 for (repo_path, status) in &*statuses.entries {
7211 let current_diff_stat = diff_stats.get(repo_path).copied();
7212
7213 changed_paths.remove(repo_path);
7214 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
7215 && cursor.item().is_some_and(|entry| {
7216 entry.status == *status && entry.diff_stat == current_diff_stat
7217 })
7218 {
7219 continue;
7220 }
7221
7222 changed_path_statuses.push(Edit::Insert(StatusEntry {
7223 repo_path: repo_path.clone(),
7224 status: *status,
7225 diff_stat: current_diff_stat,
7226 }));
7227 }
7228 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7229 for path in changed_paths.into_iter() {
7230 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
7231 changed_path_statuses
7232 .push(Edit::Remove(PathKey(path.as_ref().clone())));
7233 }
7234 }
7235 anyhow::Ok(changed_path_statuses)
7236 })
7237 .await?;
7238
7239 this.update(&mut cx, |this, cx| {
7240 if this.snapshot.stash_entries != stash_entries {
7241 cx.emit(RepositoryEvent::StashEntriesChanged);
7242 this.snapshot.stash_entries = stash_entries;
7243 }
7244
7245 if !changed_path_statuses.is_empty() {
7246 cx.emit(RepositoryEvent::StatusesChanged);
7247 this.snapshot
7248 .statuses_by_path
7249 .edit(changed_path_statuses, ());
7250 this.snapshot.scan_id += 1;
7251 }
7252
7253 if let Some(updates_tx) = updates_tx {
7254 updates_tx
7255 .unbounded_send(DownstreamUpdate::UpdateRepository(
7256 this.snapshot.clone(),
7257 ))
7258 .ok();
7259 }
7260 })
7261 },
7262 );
7263 }
7264
7265 /// currently running git command and when it started
7266 pub fn current_job(&self) -> Option<JobInfo> {
7267 self.active_jobs.values().next().cloned()
7268 }
7269
7270 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7271 self.send_job(None, |_, _| async {})
7272 }
7273
7274 fn spawn_job_with_tracking<AsyncFn>(
7275 &mut self,
7276 paths: Vec<RepoPath>,
7277 git_status: pending_op::GitStatus,
7278 cx: &mut Context<Self>,
7279 f: AsyncFn,
7280 ) -> Task<Result<()>>
7281 where
7282 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7283 {
7284 let ids = self.new_pending_ops_for_paths(paths, git_status);
7285
7286 cx.spawn(async move |this, cx| {
7287 let (job_status, result) = match f(this.clone(), cx).await {
7288 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7289 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7290 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7291 };
7292
7293 this.update(cx, |this, _| {
7294 let mut edits = Vec::with_capacity(ids.len());
7295 for (id, entry) in ids {
7296 if let Some(mut ops) = this
7297 .pending_ops
7298 .get(&PathKey(entry.as_ref().clone()), ())
7299 .cloned()
7300 {
7301 if let Some(op) = ops.op_by_id_mut(id) {
7302 op.job_status = job_status;
7303 }
7304 edits.push(sum_tree::Edit::Insert(ops));
7305 }
7306 }
7307 this.pending_ops.edit(edits, ());
7308 })?;
7309
7310 result
7311 })
7312 }
7313
7314 fn new_pending_ops_for_paths(
7315 &mut self,
7316 paths: Vec<RepoPath>,
7317 git_status: pending_op::GitStatus,
7318 ) -> Vec<(PendingOpId, RepoPath)> {
7319 let mut edits = Vec::with_capacity(paths.len());
7320 let mut ids = Vec::with_capacity(paths.len());
7321 for path in paths {
7322 let mut ops = self
7323 .pending_ops
7324 .get(&PathKey(path.as_ref().clone()), ())
7325 .cloned()
7326 .unwrap_or_else(|| PendingOps::new(&path));
7327 let id = ops.max_id() + 1;
7328 ops.ops.push(PendingOp {
7329 id,
7330 git_status,
7331 job_status: pending_op::JobStatus::Running,
7332 });
7333 edits.push(sum_tree::Edit::Insert(ops));
7334 ids.push((id, path));
7335 }
7336 self.pending_ops.edit(edits, ());
7337 ids
7338 }
7339 pub fn default_remote_url(&self) -> Option<String> {
7340 self.remote_upstream_url
7341 .clone()
7342 .or(self.remote_origin_url.clone())
7343 }
7344}
7345
7346/// If `path` is a git linked worktree checkout, resolves it to the main
7347/// repository's working directory path. Returns `None` if `path` is a normal
7348/// repository, not a git repo, or if resolution fails.
7349///
7350/// Resolution works by:
7351/// 1. Reading the `.git` file to get the `gitdir:` pointer
7352/// 2. Following that to the worktree-specific git directory
7353/// 3. Reading the `commondir` file to find the shared `.git` directory
7354/// 4. Deriving the main repo's working directory from the common dir
7355pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7356 let dot_git = path.join(".git");
7357 let metadata = fs.metadata(&dot_git).await.ok()??;
7358 if metadata.is_dir {
7359 return None; // Normal repo, not a linked worktree
7360 }
7361 // It's a .git file — parse the gitdir: pointer
7362 let content = fs.load(&dot_git).await.ok()?;
7363 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7364 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7365 // Read commondir to find the main .git directory
7366 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7367 let common_dir = fs
7368 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7369 .await
7370 .ok()?;
7371 git::repository::original_repo_path_from_common_dir(&common_dir)
7372}
7373
7374/// Validates that the resolved worktree directory is acceptable:
7375/// - The setting must not be an absolute path.
7376/// - The resolved path must be either a subdirectory of the working
7377/// directory or a subdirectory of its parent (i.e., a sibling).
7378///
7379/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7380pub fn worktrees_directory_for_repo(
7381 original_repo_abs_path: &Path,
7382 worktree_directory_setting: &str,
7383) -> Result<PathBuf> {
7384 // Check the original setting before trimming, since a path like "///"
7385 // is absolute but becomes "" after stripping trailing separators.
7386 // Also check for leading `/` or `\` explicitly, because on Windows
7387 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7388 // would slip through even though it's clearly not a relative path.
7389 if Path::new(worktree_directory_setting).is_absolute()
7390 || worktree_directory_setting.starts_with('/')
7391 || worktree_directory_setting.starts_with('\\')
7392 {
7393 anyhow::bail!(
7394 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7395 );
7396 }
7397
7398 if worktree_directory_setting.is_empty() {
7399 anyhow::bail!("git.worktree_directory must not be empty");
7400 }
7401
7402 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7403 if trimmed == ".." {
7404 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7405 }
7406
7407 let joined = original_repo_abs_path.join(trimmed);
7408 let resolved = util::normalize_path(&joined);
7409 let resolved = if resolved.starts_with(original_repo_abs_path) {
7410 resolved
7411 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7412 resolved.join(repo_dir_name)
7413 } else {
7414 resolved
7415 };
7416
7417 let parent = original_repo_abs_path
7418 .parent()
7419 .unwrap_or(original_repo_abs_path);
7420
7421 if !resolved.starts_with(parent) {
7422 anyhow::bail!(
7423 "git.worktree_directory resolved to {resolved:?}, which is outside \
7424 the project root and its parent directory. It must resolve to a \
7425 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7426 );
7427 }
7428
7429 Ok(resolved)
7430}
7431
7432async fn remove_empty_managed_worktree_ancestors(fs: &dyn Fs, child_path: &Path, base_path: &Path) {
7433 let mut current = child_path;
7434 while let Some(parent) = current.parent() {
7435 if parent == base_path {
7436 break;
7437 }
7438 if !parent.starts_with(base_path) {
7439 break;
7440 }
7441
7442 let result = fs
7443 .remove_dir(
7444 parent,
7445 RemoveOptions {
7446 recursive: false,
7447 ignore_if_not_exists: true,
7448 },
7449 )
7450 .await;
7451
7452 match result {
7453 Ok(()) => {
7454 log::info!(
7455 "Removed empty managed worktree directory: {}",
7456 parent.display()
7457 );
7458 }
7459 Err(error) => {
7460 log::debug!(
7461 "Stopped removing managed worktree parent directories at {}: {error}",
7462 parent.display()
7463 );
7464 break;
7465 }
7466 }
7467
7468 current = parent;
7469 }
7470}
7471
7472/// Returns a short name for a linked worktree suitable for UI display
7473///
7474/// Uses the main worktree path to come up with a short name that disambiguates
7475/// the linked worktree from the main worktree.
7476pub fn linked_worktree_short_name(
7477 main_worktree_path: &Path,
7478 linked_worktree_path: &Path,
7479) -> Option<SharedString> {
7480 if main_worktree_path == linked_worktree_path {
7481 return None;
7482 }
7483
7484 let project_name = main_worktree_path.file_name()?.to_str()?;
7485 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7486 let name = if directory_name != project_name {
7487 directory_name.to_string()
7488 } else {
7489 linked_worktree_path
7490 .parent()?
7491 .file_name()?
7492 .to_str()?
7493 .to_string()
7494 };
7495 Some(name.into())
7496}
7497
7498fn get_permalink_in_rust_registry_src(
7499 provider_registry: Arc<GitHostingProviderRegistry>,
7500 path: PathBuf,
7501 selection: Range<u32>,
7502) -> Result<url::Url> {
7503 #[derive(Deserialize)]
7504 struct CargoVcsGit {
7505 sha1: String,
7506 }
7507
7508 #[derive(Deserialize)]
7509 struct CargoVcsInfo {
7510 git: CargoVcsGit,
7511 path_in_vcs: String,
7512 }
7513
7514 #[derive(Deserialize)]
7515 struct CargoPackage {
7516 repository: String,
7517 }
7518
7519 #[derive(Deserialize)]
7520 struct CargoToml {
7521 package: CargoPackage,
7522 }
7523
7524 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7525 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7526 Some((dir, json))
7527 }) else {
7528 bail!("No .cargo_vcs_info.json found in parent directories")
7529 };
7530 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7531 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7532 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7533 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7534 .context("parsing package.repository field of manifest")?;
7535 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7536 let permalink = provider.build_permalink(
7537 remote,
7538 BuildPermalinkParams::new(
7539 &cargo_vcs_info.git.sha1,
7540 &RepoPath::from_rel_path(
7541 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7542 ),
7543 Some(selection),
7544 ),
7545 );
7546 Ok(permalink)
7547}
7548
7549fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7550 let Some(blame) = blame else {
7551 return proto::BlameBufferResponse {
7552 blame_response: None,
7553 };
7554 };
7555
7556 let entries = blame
7557 .entries
7558 .into_iter()
7559 .map(|entry| proto::BlameEntry {
7560 sha: entry.sha.as_bytes().into(),
7561 start_line: entry.range.start,
7562 end_line: entry.range.end,
7563 original_line_number: entry.original_line_number,
7564 author: entry.author,
7565 author_mail: entry.author_mail,
7566 author_time: entry.author_time,
7567 author_tz: entry.author_tz,
7568 committer: entry.committer_name,
7569 committer_mail: entry.committer_email,
7570 committer_time: entry.committer_time,
7571 committer_tz: entry.committer_tz,
7572 summary: entry.summary,
7573 previous: entry.previous,
7574 filename: entry.filename,
7575 })
7576 .collect::<Vec<_>>();
7577
7578 let messages = blame
7579 .messages
7580 .into_iter()
7581 .map(|(oid, message)| proto::CommitMessage {
7582 oid: oid.as_bytes().into(),
7583 message,
7584 })
7585 .collect::<Vec<_>>();
7586
7587 proto::BlameBufferResponse {
7588 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7589 }
7590}
7591
7592fn deserialize_blame_buffer_response(
7593 response: proto::BlameBufferResponse,
7594) -> Option<git::blame::Blame> {
7595 let response = response.blame_response?;
7596 let entries = response
7597 .entries
7598 .into_iter()
7599 .filter_map(|entry| {
7600 Some(git::blame::BlameEntry {
7601 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7602 range: entry.start_line..entry.end_line,
7603 original_line_number: entry.original_line_number,
7604 committer_name: entry.committer,
7605 committer_time: entry.committer_time,
7606 committer_tz: entry.committer_tz,
7607 committer_email: entry.committer_mail,
7608 author: entry.author,
7609 author_mail: entry.author_mail,
7610 author_time: entry.author_time,
7611 author_tz: entry.author_tz,
7612 summary: entry.summary,
7613 previous: entry.previous,
7614 filename: entry.filename,
7615 })
7616 })
7617 .collect::<Vec<_>>();
7618
7619 let messages = response
7620 .messages
7621 .into_iter()
7622 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7623 .collect::<HashMap<_, _>>();
7624
7625 Some(Blame { entries, messages })
7626}
7627
7628fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7629 proto::Branch {
7630 is_head: branch.is_head,
7631 ref_name: branch.ref_name.to_string(),
7632 unix_timestamp: branch
7633 .most_recent_commit
7634 .as_ref()
7635 .map(|commit| commit.commit_timestamp as u64),
7636 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7637 ref_name: upstream.ref_name.to_string(),
7638 tracking: upstream
7639 .tracking
7640 .status()
7641 .map(|upstream| proto::UpstreamTracking {
7642 ahead: upstream.ahead as u64,
7643 behind: upstream.behind as u64,
7644 }),
7645 }),
7646 most_recent_commit: branch
7647 .most_recent_commit
7648 .as_ref()
7649 .map(|commit| proto::CommitSummary {
7650 sha: commit.sha.to_string(),
7651 subject: commit.subject.to_string(),
7652 commit_timestamp: commit.commit_timestamp,
7653 author_name: commit.author_name.to_string(),
7654 }),
7655 }
7656}
7657
7658fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7659 proto::Worktree {
7660 path: worktree.path.to_string_lossy().to_string(),
7661 ref_name: worktree
7662 .ref_name
7663 .as_ref()
7664 .map(|s| s.to_string())
7665 .unwrap_or_default(),
7666 sha: worktree.sha.to_string(),
7667 is_main: worktree.is_main,
7668 is_bare: worktree.is_bare,
7669 }
7670}
7671
7672fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7673 git::repository::Worktree {
7674 path: PathBuf::from(proto.path.clone()),
7675 ref_name: if proto.ref_name.is_empty() {
7676 None
7677 } else {
7678 Some(SharedString::from(&proto.ref_name))
7679 },
7680 sha: proto.sha.clone().into(),
7681 is_main: proto.is_main,
7682 is_bare: proto.is_bare,
7683 }
7684}
7685
7686fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7687 git::repository::Branch {
7688 is_head: proto.is_head,
7689 ref_name: proto.ref_name.clone().into(),
7690 upstream: proto
7691 .upstream
7692 .as_ref()
7693 .map(|upstream| git::repository::Upstream {
7694 ref_name: upstream.ref_name.to_string().into(),
7695 tracking: upstream
7696 .tracking
7697 .as_ref()
7698 .map(|tracking| {
7699 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7700 ahead: tracking.ahead as u32,
7701 behind: tracking.behind as u32,
7702 })
7703 })
7704 .unwrap_or(git::repository::UpstreamTracking::Gone),
7705 }),
7706 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7707 git::repository::CommitSummary {
7708 sha: commit.sha.to_string().into(),
7709 subject: commit.subject.to_string().into(),
7710 commit_timestamp: commit.commit_timestamp,
7711 author_name: commit.author_name.to_string().into(),
7712 has_parent: true,
7713 }
7714 }),
7715 }
7716}
7717
7718fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7719 proto::GitCommitDetails {
7720 sha: commit.sha.to_string(),
7721 message: commit.message.to_string(),
7722 commit_timestamp: commit.commit_timestamp,
7723 author_email: commit.author_email.to_string(),
7724 author_name: commit.author_name.to_string(),
7725 }
7726}
7727
7728fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7729 CommitDetails {
7730 sha: proto.sha.clone().into(),
7731 message: proto.message.clone().into(),
7732 commit_timestamp: proto.commit_timestamp,
7733 author_email: proto.author_email.clone().into(),
7734 author_name: proto.author_name.clone().into(),
7735 }
7736}
7737
7738/// This snapshot computes the repository state on the foreground thread while
7739/// running the git commands on the background thread. We update branch, head,
7740/// remotes, and worktrees first so the UI can react sooner, then compute file
7741/// state and emit those events immediately after.
7742async fn compute_snapshot(
7743 this: Entity<Repository>,
7744 backend: Arc<dyn GitRepository>,
7745 cx: &mut AsyncApp,
7746) -> Result<RepositorySnapshot> {
7747 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7748 this.paths_needing_status_update.clear();
7749 (
7750 this.id,
7751 this.work_directory_abs_path.clone(),
7752 this.snapshot.clone(),
7753 )
7754 });
7755
7756 let head_commit_future = {
7757 let backend = backend.clone();
7758 async move {
7759 Ok(match backend.head_sha().await {
7760 Some(head_sha) => backend.show(head_sha).await.log_err(),
7761 None => None,
7762 })
7763 }
7764 };
7765 let (branches, head_commit, all_worktrees) = cx
7766 .background_spawn({
7767 let backend = backend.clone();
7768 async move {
7769 futures::future::try_join3(
7770 backend.branches(),
7771 head_commit_future,
7772 backend.worktrees(),
7773 )
7774 .await
7775 }
7776 })
7777 .await?;
7778 let branch = branches.iter().find(|branch| branch.is_head).cloned();
7779 let branch_list: Arc<[Branch]> = branches.into();
7780
7781 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7782 .into_iter()
7783 .filter(|wt| wt.path != *work_directory_abs_path)
7784 .collect();
7785
7786 let (remote_origin_url, remote_upstream_url) = cx
7787 .background_spawn({
7788 let backend = backend.clone();
7789 async move {
7790 Ok::<_, anyhow::Error>(
7791 futures::future::join(
7792 backend.remote_url("origin"),
7793 backend.remote_url("upstream"),
7794 )
7795 .await,
7796 )
7797 }
7798 })
7799 .await?;
7800
7801 let snapshot = this.update(cx, |this, cx| {
7802 let head_changed =
7803 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7804 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
7805 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7806
7807 this.snapshot = RepositorySnapshot {
7808 id,
7809 work_directory_abs_path,
7810 branch,
7811 branch_list: branch_list.clone(),
7812 head_commit,
7813 remote_origin_url,
7814 remote_upstream_url,
7815 linked_worktrees,
7816 scan_id: prev_snapshot.scan_id + 1,
7817 ..prev_snapshot
7818 };
7819
7820 if head_changed {
7821 cx.emit(RepositoryEvent::HeadChanged);
7822 }
7823
7824 if branch_list_changed {
7825 cx.emit(RepositoryEvent::BranchListChanged);
7826 }
7827
7828 if worktrees_changed {
7829 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7830 }
7831
7832 this.snapshot.clone()
7833 });
7834
7835 let (statuses, diff_stats, stash_entries) = cx
7836 .background_spawn({
7837 let backend = backend.clone();
7838 let snapshot = snapshot.clone();
7839 async move {
7840 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7841 if snapshot.head_commit.is_some() {
7842 backend.diff_stat(&[])
7843 } else {
7844 future::ready(Ok(status::GitDiffStat {
7845 entries: Arc::default(),
7846 }))
7847 .boxed()
7848 };
7849 futures::future::try_join3(
7850 backend.status(&[RepoPath::from_rel_path(
7851 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7852 )]),
7853 diff_stat_future,
7854 backend.stash_entries(),
7855 )
7856 .await
7857 }
7858 })
7859 .await?;
7860
7861 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7862 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7863 let mut conflicted_paths = Vec::new();
7864 let statuses_by_path = SumTree::from_iter(
7865 statuses.entries.iter().map(|(repo_path, status)| {
7866 if status.is_conflicted() {
7867 conflicted_paths.push(repo_path.clone());
7868 }
7869 StatusEntry {
7870 repo_path: repo_path.clone(),
7871 status: *status,
7872 diff_stat: diff_stat_map.get(repo_path).copied(),
7873 }
7874 }),
7875 (),
7876 );
7877
7878 let merge_details = cx
7879 .background_spawn({
7880 let backend = backend.clone();
7881 let mut merge_details = snapshot.merge.clone();
7882 async move {
7883 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7884 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7885 }
7886 })
7887 .await?;
7888 let (merge_details, conflicts_changed) = merge_details;
7889 log::debug!("new merge details: {merge_details:?}");
7890
7891 Ok(this.update(cx, |this, cx| {
7892 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7893 cx.emit(RepositoryEvent::StatusesChanged);
7894 }
7895 if stash_entries != this.snapshot.stash_entries {
7896 cx.emit(RepositoryEvent::StashEntriesChanged);
7897 }
7898
7899 this.snapshot.scan_id += 1;
7900 this.snapshot.merge = merge_details;
7901 this.snapshot.statuses_by_path = statuses_by_path;
7902 this.snapshot.stash_entries = stash_entries;
7903
7904 this.snapshot.clone()
7905 }))
7906}
7907
7908fn status_from_proto(
7909 simple_status: i32,
7910 status: Option<proto::GitFileStatus>,
7911) -> anyhow::Result<FileStatus> {
7912 use proto::git_file_status::Variant;
7913
7914 let Some(variant) = status.and_then(|status| status.variant) else {
7915 let code = proto::GitStatus::from_i32(simple_status)
7916 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7917 let result = match code {
7918 proto::GitStatus::Added => TrackedStatus {
7919 worktree_status: StatusCode::Added,
7920 index_status: StatusCode::Unmodified,
7921 }
7922 .into(),
7923 proto::GitStatus::Modified => TrackedStatus {
7924 worktree_status: StatusCode::Modified,
7925 index_status: StatusCode::Unmodified,
7926 }
7927 .into(),
7928 proto::GitStatus::Conflict => UnmergedStatus {
7929 first_head: UnmergedStatusCode::Updated,
7930 second_head: UnmergedStatusCode::Updated,
7931 }
7932 .into(),
7933 proto::GitStatus::Deleted => TrackedStatus {
7934 worktree_status: StatusCode::Deleted,
7935 index_status: StatusCode::Unmodified,
7936 }
7937 .into(),
7938 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7939 };
7940 return Ok(result);
7941 };
7942
7943 let result = match variant {
7944 Variant::Untracked(_) => FileStatus::Untracked,
7945 Variant::Ignored(_) => FileStatus::Ignored,
7946 Variant::Unmerged(unmerged) => {
7947 let [first_head, second_head] =
7948 [unmerged.first_head, unmerged.second_head].map(|head| {
7949 let code = proto::GitStatus::from_i32(head)
7950 .with_context(|| format!("Invalid git status code: {head}"))?;
7951 let result = match code {
7952 proto::GitStatus::Added => UnmergedStatusCode::Added,
7953 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7954 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7955 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7956 };
7957 Ok(result)
7958 });
7959 let [first_head, second_head] = [first_head?, second_head?];
7960 UnmergedStatus {
7961 first_head,
7962 second_head,
7963 }
7964 .into()
7965 }
7966 Variant::Tracked(tracked) => {
7967 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7968 .map(|status| {
7969 let code = proto::GitStatus::from_i32(status)
7970 .with_context(|| format!("Invalid git status code: {status}"))?;
7971 let result = match code {
7972 proto::GitStatus::Modified => StatusCode::Modified,
7973 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7974 proto::GitStatus::Added => StatusCode::Added,
7975 proto::GitStatus::Deleted => StatusCode::Deleted,
7976 proto::GitStatus::Renamed => StatusCode::Renamed,
7977 proto::GitStatus::Copied => StatusCode::Copied,
7978 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7979 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7980 };
7981 Ok(result)
7982 });
7983 let [index_status, worktree_status] = [index_status?, worktree_status?];
7984 TrackedStatus {
7985 index_status,
7986 worktree_status,
7987 }
7988 .into()
7989 }
7990 };
7991 Ok(result)
7992}
7993
7994fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7995 use proto::git_file_status::{Tracked, Unmerged, Variant};
7996
7997 let variant = match status {
7998 FileStatus::Untracked => Variant::Untracked(Default::default()),
7999 FileStatus::Ignored => Variant::Ignored(Default::default()),
8000 FileStatus::Unmerged(UnmergedStatus {
8001 first_head,
8002 second_head,
8003 }) => Variant::Unmerged(Unmerged {
8004 first_head: unmerged_status_to_proto(first_head),
8005 second_head: unmerged_status_to_proto(second_head),
8006 }),
8007 FileStatus::Tracked(TrackedStatus {
8008 index_status,
8009 worktree_status,
8010 }) => Variant::Tracked(Tracked {
8011 index_status: tracked_status_to_proto(index_status),
8012 worktree_status: tracked_status_to_proto(worktree_status),
8013 }),
8014 };
8015 proto::GitFileStatus {
8016 variant: Some(variant),
8017 }
8018}
8019
8020fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
8021 match code {
8022 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
8023 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
8024 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
8025 }
8026}
8027
8028fn tracked_status_to_proto(code: StatusCode) -> i32 {
8029 match code {
8030 StatusCode::Added => proto::GitStatus::Added as _,
8031 StatusCode::Deleted => proto::GitStatus::Deleted as _,
8032 StatusCode::Modified => proto::GitStatus::Modified as _,
8033 StatusCode::Renamed => proto::GitStatus::Renamed as _,
8034 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
8035 StatusCode::Copied => proto::GitStatus::Copied as _,
8036 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
8037 }
8038}