1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, BoxFuture, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
43 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198 pub diff_stat: Option<DiffStat>,
199}
200
201impl StatusEntry {
202 fn to_proto(&self) -> proto::StatusEntry {
203 let simple_status = match self.status {
204 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
205 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
206 FileStatus::Tracked(TrackedStatus {
207 index_status,
208 worktree_status,
209 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
210 worktree_status
211 } else {
212 index_status
213 }),
214 };
215
216 proto::StatusEntry {
217 repo_path: self.repo_path.to_proto(),
218 simple_status,
219 status: Some(status_to_proto(self.status)),
220 diff_stat_added: self.diff_stat.map(|ds| ds.added),
221 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
222 }
223 }
224}
225
226impl TryFrom<proto::StatusEntry> for StatusEntry {
227 type Error = anyhow::Error;
228
229 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
230 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
231 let status = status_from_proto(value.simple_status, value.status)?;
232 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
233 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
234 _ => None,
235 };
236 Ok(Self {
237 repo_path,
238 status,
239 diff_stat,
240 })
241 }
242}
243
244impl sum_tree::Item for StatusEntry {
245 type Summary = PathSummary<GitSummary>;
246
247 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
248 PathSummary {
249 max_path: self.repo_path.as_ref().clone(),
250 item_summary: self.status.summary(),
251 }
252 }
253}
254
255impl sum_tree::KeyedItem for StatusEntry {
256 type Key = PathKey;
257
258 fn key(&self) -> Self::Key {
259 PathKey(self.repo_path.as_ref().clone())
260 }
261}
262
263#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
264pub struct RepositoryId(pub u64);
265
266#[derive(Clone, Debug, Default, PartialEq, Eq)]
267pub struct MergeDetails {
268 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
269 pub message: Option<SharedString>,
270}
271
272#[derive(Clone)]
273pub enum CommitDataState {
274 Loading,
275 Loaded(Arc<GraphCommitData>),
276}
277
278#[derive(Clone, Debug, PartialEq, Eq)]
279pub struct RepositorySnapshot {
280 pub id: RepositoryId,
281 pub statuses_by_path: SumTree<StatusEntry>,
282 pub work_directory_abs_path: Arc<Path>,
283 /// The working directory of the original repository. For a normal
284 /// checkout this equals `work_directory_abs_path`. For a git worktree
285 /// checkout, this is the original repo's working directory — used to
286 /// anchor new worktree creation so they don't nest.
287 pub original_repo_abs_path: Arc<Path>,
288 pub path_style: PathStyle,
289 pub branch: Option<Branch>,
290 pub head_commit: Option<CommitDetails>,
291 pub scan_id: u64,
292 pub merge: MergeDetails,
293 pub remote_origin_url: Option<String>,
294 pub remote_upstream_url: Option<String>,
295 pub stash_entries: GitStash,
296 pub linked_worktrees: Arc<[GitWorktree]>,
297}
298
299type JobId = u64;
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub struct JobInfo {
303 pub start: Instant,
304 pub message: SharedString,
305}
306
307struct GraphCommitDataHandler {
308 _task: Task<()>,
309 commit_data_request: smol::channel::Sender<Oid>,
310}
311
312enum GraphCommitHandlerState {
313 Starting,
314 Open(GraphCommitDataHandler),
315 Closed,
316}
317
318pub struct InitialGitGraphData {
319 fetch_task: Task<()>,
320 pub error: Option<SharedString>,
321 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
322 pub commit_oid_to_index: HashMap<Oid, usize>,
323}
324
325pub struct GraphDataResponse<'a> {
326 pub commits: &'a [Arc<InitialGraphCommitData>],
327 pub is_loading: bool,
328 pub error: Option<SharedString>,
329}
330
331pub struct Repository {
332 this: WeakEntity<Self>,
333 snapshot: RepositorySnapshot,
334 commit_message_buffer: Option<Entity<Buffer>>,
335 git_store: WeakEntity<GitStore>,
336 // For a local repository, holds paths that have had worktree events since the last status scan completed,
337 // and that should be examined during the next status scan.
338 paths_needing_status_update: Vec<Vec<RepoPath>>,
339 job_sender: mpsc::UnboundedSender<GitJob>,
340 active_jobs: HashMap<JobId, JobInfo>,
341 pending_ops: SumTree<PendingOps>,
342 job_id: JobId,
343 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
344 latest_askpass_id: u64,
345 repository_state: Shared<Task<Result<RepositoryState, String>>>,
346 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
347 graph_commit_data_handler: GraphCommitHandlerState,
348 commit_data: HashMap<Oid, CommitDataState>,
349}
350
351impl std::ops::Deref for Repository {
352 type Target = RepositorySnapshot;
353
354 fn deref(&self) -> &Self::Target {
355 &self.snapshot
356 }
357}
358
359#[derive(Clone)]
360pub struct LocalRepositoryState {
361 pub fs: Arc<dyn Fs>,
362 pub backend: Arc<dyn GitRepository>,
363 pub environment: Arc<HashMap<String, String>>,
364}
365
366impl LocalRepositoryState {
367 async fn new(
368 work_directory_abs_path: Arc<Path>,
369 dot_git_abs_path: Arc<Path>,
370 project_environment: WeakEntity<ProjectEnvironment>,
371 fs: Arc<dyn Fs>,
372 is_trusted: bool,
373 cx: &mut AsyncApp,
374 ) -> anyhow::Result<Self> {
375 let environment = project_environment
376 .update(cx, |project_environment, cx| {
377 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
378 })?
379 .await
380 .unwrap_or_else(|| {
381 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
382 HashMap::default()
383 });
384 let search_paths = environment.get("PATH").map(|val| val.to_owned());
385 let backend = cx
386 .background_spawn({
387 let fs = fs.clone();
388 async move {
389 let system_git_binary_path = search_paths
390 .and_then(|search_paths| {
391 which::which_in("git", Some(search_paths), &work_directory_abs_path)
392 .ok()
393 })
394 .or_else(|| which::which("git").ok());
395 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
396 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
397 }
398 })
399 .await?;
400 backend.set_trusted(is_trusted);
401 Ok(LocalRepositoryState {
402 backend,
403 environment: Arc::new(environment),
404 fs,
405 })
406 }
407}
408
409#[derive(Clone)]
410pub struct RemoteRepositoryState {
411 pub project_id: ProjectId,
412 pub client: AnyProtoClient,
413}
414
415#[derive(Clone)]
416pub enum RepositoryState {
417 Local(LocalRepositoryState),
418 Remote(RemoteRepositoryState),
419}
420
421#[derive(Clone, Debug, PartialEq, Eq)]
422pub enum GitGraphEvent {
423 CountUpdated(usize),
424 FullyLoaded,
425 LoadingError,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum RepositoryEvent {
430 StatusesChanged,
431 BranchChanged,
432 StashEntriesChanged,
433 GitWorktreeListChanged,
434 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
435 GraphEvent((LogSource, LogOrder), GitGraphEvent),
436}
437
438#[derive(Clone, Debug)]
439pub struct JobsUpdated;
440
441#[derive(Debug)]
442pub enum GitStoreEvent {
443 ActiveRepositoryChanged(Option<RepositoryId>),
444 /// Bool is true when the repository that's updated is the active repository
445 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
446 RepositoryAdded,
447 RepositoryRemoved(RepositoryId),
448 IndexWriteError(anyhow::Error),
449 JobsUpdated,
450 ConflictsUpdated,
451}
452
453impl EventEmitter<RepositoryEvent> for Repository {}
454impl EventEmitter<JobsUpdated> for Repository {}
455impl EventEmitter<GitStoreEvent> for GitStore {}
456
457pub struct GitJob {
458 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
459 key: Option<GitJobKey>,
460}
461
462#[derive(PartialEq, Eq)]
463enum GitJobKey {
464 WriteIndex(Vec<RepoPath>),
465 ReloadBufferDiffBases,
466 RefreshStatuses,
467 ReloadGitState,
468}
469
470impl GitStore {
471 pub fn local(
472 worktree_store: &Entity<WorktreeStore>,
473 buffer_store: Entity<BufferStore>,
474 environment: Entity<ProjectEnvironment>,
475 fs: Arc<dyn Fs>,
476 cx: &mut Context<Self>,
477 ) -> Self {
478 Self::new(
479 worktree_store.clone(),
480 buffer_store,
481 GitStoreState::Local {
482 next_repository_id: Arc::new(AtomicU64::new(1)),
483 downstream: None,
484 project_environment: environment,
485 fs,
486 },
487 cx,
488 )
489 }
490
491 pub fn remote(
492 worktree_store: &Entity<WorktreeStore>,
493 buffer_store: Entity<BufferStore>,
494 upstream_client: AnyProtoClient,
495 project_id: u64,
496 cx: &mut Context<Self>,
497 ) -> Self {
498 Self::new(
499 worktree_store.clone(),
500 buffer_store,
501 GitStoreState::Remote {
502 upstream_client,
503 upstream_project_id: project_id,
504 downstream: None,
505 },
506 cx,
507 )
508 }
509
510 fn new(
511 worktree_store: Entity<WorktreeStore>,
512 buffer_store: Entity<BufferStore>,
513 state: GitStoreState,
514 cx: &mut Context<Self>,
515 ) -> Self {
516 let mut _subscriptions = vec![
517 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
518 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
519 ];
520
521 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
522 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
523 }
524
525 GitStore {
526 state,
527 buffer_store,
528 worktree_store,
529 repositories: HashMap::default(),
530 worktree_ids: HashMap::default(),
531 active_repo_id: None,
532 _subscriptions,
533 loading_diffs: HashMap::default(),
534 shared_diffs: HashMap::default(),
535 diffs: HashMap::default(),
536 }
537 }
538
539 pub fn init(client: &AnyProtoClient) {
540 client.add_entity_request_handler(Self::handle_get_remotes);
541 client.add_entity_request_handler(Self::handle_get_branches);
542 client.add_entity_request_handler(Self::handle_get_default_branch);
543 client.add_entity_request_handler(Self::handle_change_branch);
544 client.add_entity_request_handler(Self::handle_create_branch);
545 client.add_entity_request_handler(Self::handle_rename_branch);
546 client.add_entity_request_handler(Self::handle_create_remote);
547 client.add_entity_request_handler(Self::handle_remove_remote);
548 client.add_entity_request_handler(Self::handle_delete_branch);
549 client.add_entity_request_handler(Self::handle_git_init);
550 client.add_entity_request_handler(Self::handle_push);
551 client.add_entity_request_handler(Self::handle_pull);
552 client.add_entity_request_handler(Self::handle_fetch);
553 client.add_entity_request_handler(Self::handle_stage);
554 client.add_entity_request_handler(Self::handle_unstage);
555 client.add_entity_request_handler(Self::handle_stash);
556 client.add_entity_request_handler(Self::handle_stash_pop);
557 client.add_entity_request_handler(Self::handle_stash_apply);
558 client.add_entity_request_handler(Self::handle_stash_drop);
559 client.add_entity_request_handler(Self::handle_commit);
560 client.add_entity_request_handler(Self::handle_run_hook);
561 client.add_entity_request_handler(Self::handle_reset);
562 client.add_entity_request_handler(Self::handle_show);
563 client.add_entity_request_handler(Self::handle_create_checkpoint);
564 client.add_entity_request_handler(Self::handle_restore_checkpoint);
565 client.add_entity_request_handler(Self::handle_compare_checkpoints);
566 client.add_entity_request_handler(Self::handle_diff_checkpoints);
567 client.add_entity_request_handler(Self::handle_load_commit_diff);
568 client.add_entity_request_handler(Self::handle_file_history);
569 client.add_entity_request_handler(Self::handle_checkout_files);
570 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
571 client.add_entity_request_handler(Self::handle_set_index_text);
572 client.add_entity_request_handler(Self::handle_askpass);
573 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
574 client.add_entity_request_handler(Self::handle_git_diff);
575 client.add_entity_request_handler(Self::handle_tree_diff);
576 client.add_entity_request_handler(Self::handle_get_blob_content);
577 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
578 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
579 client.add_entity_message_handler(Self::handle_update_diff_bases);
580 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
581 client.add_entity_request_handler(Self::handle_blame_buffer);
582 client.add_entity_message_handler(Self::handle_update_repository);
583 client.add_entity_message_handler(Self::handle_remove_repository);
584 client.add_entity_request_handler(Self::handle_git_clone);
585 client.add_entity_request_handler(Self::handle_get_worktrees);
586 client.add_entity_request_handler(Self::handle_create_worktree);
587 client.add_entity_request_handler(Self::handle_remove_worktree);
588 client.add_entity_request_handler(Self::handle_rename_worktree);
589 }
590
591 pub fn is_local(&self) -> bool {
592 matches!(self.state, GitStoreState::Local { .. })
593 }
594 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
595 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
596 let id = repo.read(cx).id;
597 if self.active_repo_id != Some(id) {
598 self.active_repo_id = Some(id);
599 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
600 }
601 }
602 }
603
604 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
605 match &mut self.state {
606 GitStoreState::Remote {
607 downstream: downstream_client,
608 ..
609 } => {
610 for repo in self.repositories.values() {
611 let update = repo.read(cx).snapshot.initial_update(project_id);
612 for update in split_repository_update(update) {
613 client.send(update).log_err();
614 }
615 }
616 *downstream_client = Some((client, ProjectId(project_id)));
617 }
618 GitStoreState::Local {
619 downstream: downstream_client,
620 ..
621 } => {
622 let mut snapshots = HashMap::default();
623 let (updates_tx, mut updates_rx) = mpsc::unbounded();
624 for repo in self.repositories.values() {
625 updates_tx
626 .unbounded_send(DownstreamUpdate::UpdateRepository(
627 repo.read(cx).snapshot.clone(),
628 ))
629 .ok();
630 }
631 *downstream_client = Some(LocalDownstreamState {
632 client: client.clone(),
633 project_id: ProjectId(project_id),
634 updates_tx,
635 _task: cx.spawn(async move |this, cx| {
636 cx.background_spawn(async move {
637 while let Some(update) = updates_rx.next().await {
638 match update {
639 DownstreamUpdate::UpdateRepository(snapshot) => {
640 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
641 {
642 let update =
643 snapshot.build_update(old_snapshot, project_id);
644 *old_snapshot = snapshot;
645 for update in split_repository_update(update) {
646 client.send(update)?;
647 }
648 } else {
649 let update = snapshot.initial_update(project_id);
650 for update in split_repository_update(update) {
651 client.send(update)?;
652 }
653 snapshots.insert(snapshot.id, snapshot);
654 }
655 }
656 DownstreamUpdate::RemoveRepository(id) => {
657 client.send(proto::RemoveRepository {
658 project_id,
659 id: id.to_proto(),
660 })?;
661 }
662 }
663 }
664 anyhow::Ok(())
665 })
666 .await
667 .ok();
668 this.update(cx, |this, _| {
669 if let GitStoreState::Local {
670 downstream: downstream_client,
671 ..
672 } = &mut this.state
673 {
674 downstream_client.take();
675 } else {
676 unreachable!("unshared called on remote store");
677 }
678 })
679 }),
680 });
681 }
682 }
683 }
684
685 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
686 match &mut self.state {
687 GitStoreState::Local {
688 downstream: downstream_client,
689 ..
690 } => {
691 downstream_client.take();
692 }
693 GitStoreState::Remote {
694 downstream: downstream_client,
695 ..
696 } => {
697 downstream_client.take();
698 }
699 }
700 self.shared_diffs.clear();
701 }
702
703 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
704 self.shared_diffs.remove(peer_id);
705 }
706
707 pub fn active_repository(&self) -> Option<Entity<Repository>> {
708 self.active_repo_id
709 .as_ref()
710 .map(|id| self.repositories[id].clone())
711 }
712
713 pub fn open_unstaged_diff(
714 &mut self,
715 buffer: Entity<Buffer>,
716 cx: &mut Context<Self>,
717 ) -> Task<Result<Entity<BufferDiff>>> {
718 let buffer_id = buffer.read(cx).remote_id();
719 if let Some(diff_state) = self.diffs.get(&buffer_id)
720 && let Some(unstaged_diff) = diff_state
721 .read(cx)
722 .unstaged_diff
723 .as_ref()
724 .and_then(|weak| weak.upgrade())
725 {
726 if let Some(task) =
727 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
728 {
729 return cx.background_executor().spawn(async move {
730 task.await;
731 Ok(unstaged_diff)
732 });
733 }
734 return Task::ready(Ok(unstaged_diff));
735 }
736
737 let Some((repo, repo_path)) =
738 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
739 else {
740 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
741 };
742
743 let task = self
744 .loading_diffs
745 .entry((buffer_id, DiffKind::Unstaged))
746 .or_insert_with(|| {
747 let staged_text = repo.update(cx, |repo, cx| {
748 repo.load_staged_text(buffer_id, repo_path, cx)
749 });
750 cx.spawn(async move |this, cx| {
751 Self::open_diff_internal(
752 this,
753 DiffKind::Unstaged,
754 staged_text.await.map(DiffBasesChange::SetIndex),
755 buffer,
756 cx,
757 )
758 .await
759 .map_err(Arc::new)
760 })
761 .shared()
762 })
763 .clone();
764
765 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
766 }
767
768 pub fn open_diff_since(
769 &mut self,
770 oid: Option<git::Oid>,
771 buffer: Entity<Buffer>,
772 repo: Entity<Repository>,
773 cx: &mut Context<Self>,
774 ) -> Task<Result<Entity<BufferDiff>>> {
775 let buffer_id = buffer.read(cx).remote_id();
776
777 if let Some(diff_state) = self.diffs.get(&buffer_id)
778 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
779 {
780 if let Some(task) =
781 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
782 {
783 return cx.background_executor().spawn(async move {
784 task.await;
785 Ok(oid_diff)
786 });
787 }
788 return Task::ready(Ok(oid_diff));
789 }
790
791 let diff_kind = DiffKind::SinceOid(oid);
792 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
793 let task = task.clone();
794 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
795 }
796
797 let task = cx
798 .spawn(async move |this, cx| {
799 let result: Result<Entity<BufferDiff>> = async {
800 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
801 let language_registry =
802 buffer.update(cx, |buffer, _| buffer.language_registry());
803 let content: Option<Arc<str>> = match oid {
804 None => None,
805 Some(oid) => Some(
806 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
807 .await?
808 .into(),
809 ),
810 };
811 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
812
813 buffer_diff
814 .update(cx, |buffer_diff, cx| {
815 buffer_diff.language_changed(
816 buffer_snapshot.language().cloned(),
817 language_registry,
818 cx,
819 );
820 buffer_diff.set_base_text(
821 content.clone(),
822 buffer_snapshot.language().cloned(),
823 buffer_snapshot.text,
824 cx,
825 )
826 })
827 .await?;
828 let unstaged_diff = this
829 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
830 .await?;
831 buffer_diff.update(cx, |buffer_diff, _| {
832 buffer_diff.set_secondary_diff(unstaged_diff);
833 });
834
835 this.update(cx, |this, cx| {
836 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
837 .detach();
838
839 this.loading_diffs.remove(&(buffer_id, diff_kind));
840
841 let git_store = cx.weak_entity();
842 let diff_state = this
843 .diffs
844 .entry(buffer_id)
845 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
846
847 diff_state.update(cx, |state, _| {
848 if let Some(oid) = oid {
849 if let Some(content) = content {
850 state.oid_texts.insert(oid, content);
851 }
852 }
853 state.oid_diffs.insert(oid, buffer_diff.downgrade());
854 });
855 })?;
856
857 Ok(buffer_diff)
858 }
859 .await;
860 result.map_err(Arc::new)
861 })
862 .shared();
863
864 self.loading_diffs
865 .insert((buffer_id, diff_kind), task.clone());
866 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
867 }
868
869 #[ztracing::instrument(skip_all)]
870 pub fn open_uncommitted_diff(
871 &mut self,
872 buffer: Entity<Buffer>,
873 cx: &mut Context<Self>,
874 ) -> Task<Result<Entity<BufferDiff>>> {
875 let buffer_id = buffer.read(cx).remote_id();
876
877 if let Some(diff_state) = self.diffs.get(&buffer_id)
878 && let Some(uncommitted_diff) = diff_state
879 .read(cx)
880 .uncommitted_diff
881 .as_ref()
882 .and_then(|weak| weak.upgrade())
883 {
884 if let Some(task) =
885 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
886 {
887 return cx.background_executor().spawn(async move {
888 task.await;
889 Ok(uncommitted_diff)
890 });
891 }
892 return Task::ready(Ok(uncommitted_diff));
893 }
894
895 let Some((repo, repo_path)) =
896 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
897 else {
898 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
899 };
900
901 let task = self
902 .loading_diffs
903 .entry((buffer_id, DiffKind::Uncommitted))
904 .or_insert_with(|| {
905 let changes = repo.update(cx, |repo, cx| {
906 repo.load_committed_text(buffer_id, repo_path, cx)
907 });
908
909 // todo(lw): hot foreground spawn
910 cx.spawn(async move |this, cx| {
911 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
912 .await
913 .map_err(Arc::new)
914 })
915 .shared()
916 })
917 .clone();
918
919 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
920 }
921
922 #[ztracing::instrument(skip_all)]
923 async fn open_diff_internal(
924 this: WeakEntity<Self>,
925 kind: DiffKind,
926 texts: Result<DiffBasesChange>,
927 buffer_entity: Entity<Buffer>,
928 cx: &mut AsyncApp,
929 ) -> Result<Entity<BufferDiff>> {
930 let diff_bases_change = match texts {
931 Err(e) => {
932 this.update(cx, |this, cx| {
933 let buffer = buffer_entity.read(cx);
934 let buffer_id = buffer.remote_id();
935 this.loading_diffs.remove(&(buffer_id, kind));
936 })?;
937 return Err(e);
938 }
939 Ok(change) => change,
940 };
941
942 this.update(cx, |this, cx| {
943 let buffer = buffer_entity.read(cx);
944 let buffer_id = buffer.remote_id();
945 let language = buffer.language().cloned();
946 let language_registry = buffer.language_registry();
947 let text_snapshot = buffer.text_snapshot();
948 this.loading_diffs.remove(&(buffer_id, kind));
949
950 let git_store = cx.weak_entity();
951 let diff_state = this
952 .diffs
953 .entry(buffer_id)
954 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
955
956 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
957
958 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
959 diff_state.update(cx, |diff_state, cx| {
960 diff_state.language_changed = true;
961 diff_state.language = language;
962 diff_state.language_registry = language_registry;
963
964 match kind {
965 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
966 DiffKind::Uncommitted => {
967 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
968 diff
969 } else {
970 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
971 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
972 unstaged_diff
973 };
974
975 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
976 diff_state.uncommitted_diff = Some(diff.downgrade())
977 }
978 DiffKind::SinceOid(_) => {
979 unreachable!("open_diff_internal is not used for OID diffs")
980 }
981 }
982
983 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
984 let rx = diff_state.wait_for_recalculation();
985
986 anyhow::Ok(async move {
987 if let Some(rx) = rx {
988 rx.await;
989 }
990 Ok(diff)
991 })
992 })
993 })??
994 .await
995 }
996
997 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
998 let diff_state = self.diffs.get(&buffer_id)?;
999 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1000 }
1001
1002 pub fn get_uncommitted_diff(
1003 &self,
1004 buffer_id: BufferId,
1005 cx: &App,
1006 ) -> Option<Entity<BufferDiff>> {
1007 let diff_state = self.diffs.get(&buffer_id)?;
1008 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1009 }
1010
1011 pub fn get_diff_since_oid(
1012 &self,
1013 buffer_id: BufferId,
1014 oid: Option<git::Oid>,
1015 cx: &App,
1016 ) -> Option<Entity<BufferDiff>> {
1017 let diff_state = self.diffs.get(&buffer_id)?;
1018 diff_state.read(cx).oid_diff(oid)
1019 }
1020
1021 pub fn open_conflict_set(
1022 &mut self,
1023 buffer: Entity<Buffer>,
1024 cx: &mut Context<Self>,
1025 ) -> Entity<ConflictSet> {
1026 log::debug!("open conflict set");
1027 let buffer_id = buffer.read(cx).remote_id();
1028
1029 if let Some(git_state) = self.diffs.get(&buffer_id)
1030 && let Some(conflict_set) = git_state
1031 .read(cx)
1032 .conflict_set
1033 .as_ref()
1034 .and_then(|weak| weak.upgrade())
1035 {
1036 let conflict_set = conflict_set;
1037 let buffer_snapshot = buffer.read(cx).text_snapshot();
1038
1039 git_state.update(cx, |state, cx| {
1040 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1041 });
1042
1043 return conflict_set;
1044 }
1045
1046 let is_unmerged = self
1047 .repository_and_path_for_buffer_id(buffer_id, cx)
1048 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1049 let git_store = cx.weak_entity();
1050 let buffer_git_state = self
1051 .diffs
1052 .entry(buffer_id)
1053 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1054 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1055
1056 self._subscriptions
1057 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1058 cx.emit(GitStoreEvent::ConflictsUpdated);
1059 }));
1060
1061 buffer_git_state.update(cx, |state, cx| {
1062 state.conflict_set = Some(conflict_set.downgrade());
1063 let buffer_snapshot = buffer.read(cx).text_snapshot();
1064 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1065 });
1066
1067 conflict_set
1068 }
1069
1070 pub fn project_path_git_status(
1071 &self,
1072 project_path: &ProjectPath,
1073 cx: &App,
1074 ) -> Option<FileStatus> {
1075 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1076 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1077 }
1078
1079 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1080 let mut work_directory_abs_paths = Vec::new();
1081 let mut checkpoints = Vec::new();
1082 for repository in self.repositories.values() {
1083 repository.update(cx, |repository, _| {
1084 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1085 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1086 });
1087 }
1088
1089 cx.background_executor().spawn(async move {
1090 let checkpoints = future::try_join_all(checkpoints).await?;
1091 Ok(GitStoreCheckpoint {
1092 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1093 .into_iter()
1094 .zip(checkpoints)
1095 .collect(),
1096 })
1097 })
1098 }
1099
1100 pub fn restore_checkpoint(
1101 &self,
1102 checkpoint: GitStoreCheckpoint,
1103 cx: &mut App,
1104 ) -> Task<Result<()>> {
1105 let repositories_by_work_dir_abs_path = self
1106 .repositories
1107 .values()
1108 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1109 .collect::<HashMap<_, _>>();
1110
1111 let mut tasks = Vec::new();
1112 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1113 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1114 let restore = repository.update(cx, |repository, _| {
1115 repository.restore_checkpoint(checkpoint)
1116 });
1117 tasks.push(async move { restore.await? });
1118 }
1119 }
1120 cx.background_spawn(async move {
1121 future::try_join_all(tasks).await?;
1122 Ok(())
1123 })
1124 }
1125
1126 /// Compares two checkpoints, returning true if they are equal.
1127 pub fn compare_checkpoints(
1128 &self,
1129 left: GitStoreCheckpoint,
1130 mut right: GitStoreCheckpoint,
1131 cx: &mut App,
1132 ) -> Task<Result<bool>> {
1133 let repositories_by_work_dir_abs_path = self
1134 .repositories
1135 .values()
1136 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1137 .collect::<HashMap<_, _>>();
1138
1139 let mut tasks = Vec::new();
1140 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1141 if let Some(right_checkpoint) = right
1142 .checkpoints_by_work_dir_abs_path
1143 .remove(&work_dir_abs_path)
1144 {
1145 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1146 {
1147 let compare = repository.update(cx, |repository, _| {
1148 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1149 });
1150
1151 tasks.push(async move { compare.await? });
1152 }
1153 } else {
1154 return Task::ready(Ok(false));
1155 }
1156 }
1157 cx.background_spawn(async move {
1158 Ok(future::try_join_all(tasks)
1159 .await?
1160 .into_iter()
1161 .all(|result| result))
1162 })
1163 }
1164
1165 /// Blames a buffer.
1166 pub fn blame_buffer(
1167 &self,
1168 buffer: &Entity<Buffer>,
1169 version: Option<clock::Global>,
1170 cx: &mut Context<Self>,
1171 ) -> Task<Result<Option<Blame>>> {
1172 let buffer = buffer.read(cx);
1173 let Some((repo, repo_path)) =
1174 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1175 else {
1176 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1177 };
1178 let content = match &version {
1179 Some(version) => buffer.rope_for_version(version),
1180 None => buffer.as_rope().clone(),
1181 };
1182 let line_ending = buffer.line_ending();
1183 let version = version.unwrap_or(buffer.version());
1184 let buffer_id = buffer.remote_id();
1185
1186 let repo = repo.downgrade();
1187 cx.spawn(async move |_, cx| {
1188 let repository_state = repo
1189 .update(cx, |repo, _| repo.repository_state.clone())?
1190 .await
1191 .map_err(|err| anyhow::anyhow!(err))?;
1192 match repository_state {
1193 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1194 .blame(repo_path.clone(), content, line_ending)
1195 .await
1196 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1197 .map(Some),
1198 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1199 let response = client
1200 .request(proto::BlameBuffer {
1201 project_id: project_id.to_proto(),
1202 buffer_id: buffer_id.into(),
1203 version: serialize_version(&version),
1204 })
1205 .await?;
1206 Ok(deserialize_blame_buffer_response(response))
1207 }
1208 }
1209 })
1210 }
1211
1212 pub fn file_history(
1213 &self,
1214 repo: &Entity<Repository>,
1215 path: RepoPath,
1216 cx: &mut App,
1217 ) -> Task<Result<git::repository::FileHistory>> {
1218 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1219
1220 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1221 }
1222
1223 pub fn file_history_paginated(
1224 &self,
1225 repo: &Entity<Repository>,
1226 path: RepoPath,
1227 skip: usize,
1228 limit: Option<usize>,
1229 cx: &mut App,
1230 ) -> Task<Result<git::repository::FileHistory>> {
1231 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1232
1233 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1234 }
1235
1236 pub fn get_permalink_to_line(
1237 &self,
1238 buffer: &Entity<Buffer>,
1239 selection: Range<u32>,
1240 cx: &mut App,
1241 ) -> Task<Result<url::Url>> {
1242 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1243 return Task::ready(Err(anyhow!("buffer has no file")));
1244 };
1245
1246 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1247 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1248 cx,
1249 ) else {
1250 // If we're not in a Git repo, check whether this is a Rust source
1251 // file in the Cargo registry (presumably opened with go-to-definition
1252 // from a normal Rust file). If so, we can put together a permalink
1253 // using crate metadata.
1254 if buffer
1255 .read(cx)
1256 .language()
1257 .is_none_or(|lang| lang.name() != "Rust")
1258 {
1259 return Task::ready(Err(anyhow!("no permalink available")));
1260 }
1261 let file_path = file.worktree.read(cx).absolutize(&file.path);
1262 return cx.spawn(async move |cx| {
1263 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1264 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1265 .context("no permalink available")
1266 });
1267 };
1268
1269 let buffer_id = buffer.read(cx).remote_id();
1270 let branch = repo.read(cx).branch.clone();
1271 let remote = branch
1272 .as_ref()
1273 .and_then(|b| b.upstream.as_ref())
1274 .and_then(|b| b.remote_name())
1275 .unwrap_or("origin")
1276 .to_string();
1277
1278 let rx = repo.update(cx, |repo, _| {
1279 repo.send_job(None, move |state, cx| async move {
1280 match state {
1281 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1282 let origin_url = backend
1283 .remote_url(&remote)
1284 .await
1285 .with_context(|| format!("remote \"{remote}\" not found"))?;
1286
1287 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1288
1289 let provider_registry =
1290 cx.update(GitHostingProviderRegistry::default_global);
1291
1292 let (provider, remote) =
1293 parse_git_remote_url(provider_registry, &origin_url)
1294 .context("parsing Git remote URL")?;
1295
1296 Ok(provider.build_permalink(
1297 remote,
1298 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1299 ))
1300 }
1301 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1302 let response = client
1303 .request(proto::GetPermalinkToLine {
1304 project_id: project_id.to_proto(),
1305 buffer_id: buffer_id.into(),
1306 selection: Some(proto::Range {
1307 start: selection.start as u64,
1308 end: selection.end as u64,
1309 }),
1310 })
1311 .await?;
1312
1313 url::Url::parse(&response.permalink).context("failed to parse permalink")
1314 }
1315 }
1316 })
1317 });
1318 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1319 }
1320
1321 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1322 match &self.state {
1323 GitStoreState::Local {
1324 downstream: downstream_client,
1325 ..
1326 } => downstream_client
1327 .as_ref()
1328 .map(|state| (state.client.clone(), state.project_id)),
1329 GitStoreState::Remote {
1330 downstream: downstream_client,
1331 ..
1332 } => downstream_client.clone(),
1333 }
1334 }
1335
1336 fn upstream_client(&self) -> Option<AnyProtoClient> {
1337 match &self.state {
1338 GitStoreState::Local { .. } => None,
1339 GitStoreState::Remote {
1340 upstream_client, ..
1341 } => Some(upstream_client.clone()),
1342 }
1343 }
1344
1345 fn on_worktree_store_event(
1346 &mut self,
1347 worktree_store: Entity<WorktreeStore>,
1348 event: &WorktreeStoreEvent,
1349 cx: &mut Context<Self>,
1350 ) {
1351 let GitStoreState::Local {
1352 project_environment,
1353 downstream,
1354 next_repository_id,
1355 fs,
1356 } = &self.state
1357 else {
1358 return;
1359 };
1360
1361 match event {
1362 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1363 if let Some(worktree) = self
1364 .worktree_store
1365 .read(cx)
1366 .worktree_for_id(*worktree_id, cx)
1367 {
1368 let paths_by_git_repo =
1369 self.process_updated_entries(&worktree, updated_entries, cx);
1370 let downstream = downstream
1371 .as_ref()
1372 .map(|downstream| downstream.updates_tx.clone());
1373 cx.spawn(async move |_, cx| {
1374 let paths_by_git_repo = paths_by_git_repo.await;
1375 for (repo, paths) in paths_by_git_repo {
1376 repo.update(cx, |repo, cx| {
1377 repo.paths_changed(paths, downstream.clone(), cx);
1378 });
1379 }
1380 })
1381 .detach();
1382 }
1383 }
1384 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1385 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1386 else {
1387 return;
1388 };
1389 if !worktree.read(cx).is_visible() {
1390 log::debug!(
1391 "not adding repositories for local worktree {:?} because it's not visible",
1392 worktree.read(cx).abs_path()
1393 );
1394 return;
1395 }
1396 self.update_repositories_from_worktree(
1397 *worktree_id,
1398 project_environment.clone(),
1399 next_repository_id.clone(),
1400 downstream
1401 .as_ref()
1402 .map(|downstream| downstream.updates_tx.clone()),
1403 changed_repos.clone(),
1404 fs.clone(),
1405 cx,
1406 );
1407 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1408 }
1409 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1410 let repos_without_worktree: Vec<RepositoryId> = self
1411 .worktree_ids
1412 .iter_mut()
1413 .filter_map(|(repo_id, worktree_ids)| {
1414 worktree_ids.remove(worktree_id);
1415 if worktree_ids.is_empty() {
1416 Some(*repo_id)
1417 } else {
1418 None
1419 }
1420 })
1421 .collect();
1422 let is_active_repo_removed = repos_without_worktree
1423 .iter()
1424 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1425
1426 for repo_id in repos_without_worktree {
1427 self.repositories.remove(&repo_id);
1428 self.worktree_ids.remove(&repo_id);
1429 if let Some(updates_tx) =
1430 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1431 {
1432 updates_tx
1433 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1434 .ok();
1435 }
1436 }
1437
1438 if is_active_repo_removed {
1439 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1440 self.active_repo_id = Some(repo_id);
1441 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1442 } else {
1443 self.active_repo_id = None;
1444 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1445 }
1446 }
1447 }
1448 _ => {}
1449 }
1450 }
1451 fn on_repository_event(
1452 &mut self,
1453 repo: Entity<Repository>,
1454 event: &RepositoryEvent,
1455 cx: &mut Context<Self>,
1456 ) {
1457 let id = repo.read(cx).id;
1458 let repo_snapshot = repo.read(cx).snapshot.clone();
1459 for (buffer_id, diff) in self.diffs.iter() {
1460 if let Some((buffer_repo, repo_path)) =
1461 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1462 && buffer_repo == repo
1463 {
1464 diff.update(cx, |diff, cx| {
1465 if let Some(conflict_set) = &diff.conflict_set {
1466 let conflict_status_changed =
1467 conflict_set.update(cx, |conflict_set, cx| {
1468 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1469 conflict_set.set_has_conflict(has_conflict, cx)
1470 })?;
1471 if conflict_status_changed {
1472 let buffer_store = self.buffer_store.read(cx);
1473 if let Some(buffer) = buffer_store.get(*buffer_id) {
1474 let _ = diff
1475 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1476 }
1477 }
1478 }
1479 anyhow::Ok(())
1480 })
1481 .ok();
1482 }
1483 }
1484 cx.emit(GitStoreEvent::RepositoryUpdated(
1485 id,
1486 event.clone(),
1487 self.active_repo_id == Some(id),
1488 ))
1489 }
1490
1491 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1492 cx.emit(GitStoreEvent::JobsUpdated)
1493 }
1494
1495 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1496 fn update_repositories_from_worktree(
1497 &mut self,
1498 worktree_id: WorktreeId,
1499 project_environment: Entity<ProjectEnvironment>,
1500 next_repository_id: Arc<AtomicU64>,
1501 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1502 updated_git_repositories: UpdatedGitRepositoriesSet,
1503 fs: Arc<dyn Fs>,
1504 cx: &mut Context<Self>,
1505 ) {
1506 let mut removed_ids = Vec::new();
1507 for update in updated_git_repositories.iter() {
1508 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1509 let existing_work_directory_abs_path =
1510 repo.read(cx).work_directory_abs_path.clone();
1511 Some(&existing_work_directory_abs_path)
1512 == update.old_work_directory_abs_path.as_ref()
1513 || Some(&existing_work_directory_abs_path)
1514 == update.new_work_directory_abs_path.as_ref()
1515 }) {
1516 let repo_id = *id;
1517 if let Some(new_work_directory_abs_path) =
1518 update.new_work_directory_abs_path.clone()
1519 {
1520 self.worktree_ids
1521 .entry(repo_id)
1522 .or_insert_with(HashSet::new)
1523 .insert(worktree_id);
1524 existing.update(cx, |existing, cx| {
1525 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1526 existing.schedule_scan(updates_tx.clone(), cx);
1527 });
1528 } else {
1529 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1530 worktree_ids.remove(&worktree_id);
1531 if worktree_ids.is_empty() {
1532 removed_ids.push(repo_id);
1533 }
1534 }
1535 }
1536 } else if let UpdatedGitRepository {
1537 new_work_directory_abs_path: Some(work_directory_abs_path),
1538 dot_git_abs_path: Some(dot_git_abs_path),
1539 repository_dir_abs_path: Some(repository_dir_abs_path),
1540 common_dir_abs_path: Some(common_dir_abs_path),
1541 ..
1542 } = update
1543 {
1544 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1545 work_directory_abs_path,
1546 common_dir_abs_path,
1547 repository_dir_abs_path,
1548 )
1549 .into();
1550 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1551 let is_trusted = TrustedWorktrees::try_get_global(cx)
1552 .map(|trusted_worktrees| {
1553 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1554 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1555 })
1556 })
1557 .unwrap_or(false);
1558 let git_store = cx.weak_entity();
1559 let repo = cx.new(|cx| {
1560 let mut repo = Repository::local(
1561 id,
1562 work_directory_abs_path.clone(),
1563 original_repo_abs_path.clone(),
1564 dot_git_abs_path.clone(),
1565 project_environment.downgrade(),
1566 fs.clone(),
1567 is_trusted,
1568 git_store,
1569 cx,
1570 );
1571 if let Some(updates_tx) = updates_tx.as_ref() {
1572 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1573 updates_tx
1574 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1575 .ok();
1576 }
1577 repo.schedule_scan(updates_tx.clone(), cx);
1578 repo
1579 });
1580 self._subscriptions
1581 .push(cx.subscribe(&repo, Self::on_repository_event));
1582 self._subscriptions
1583 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1584 self.repositories.insert(id, repo);
1585 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1586 cx.emit(GitStoreEvent::RepositoryAdded);
1587 self.active_repo_id.get_or_insert_with(|| {
1588 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1589 id
1590 });
1591 }
1592 }
1593
1594 for id in removed_ids {
1595 if self.active_repo_id == Some(id) {
1596 self.active_repo_id = None;
1597 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1598 }
1599 self.repositories.remove(&id);
1600 if let Some(updates_tx) = updates_tx.as_ref() {
1601 updates_tx
1602 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1603 .ok();
1604 }
1605 }
1606 }
1607
1608 fn on_trusted_worktrees_event(
1609 &mut self,
1610 _: Entity<TrustedWorktreesStore>,
1611 event: &TrustedWorktreesEvent,
1612 cx: &mut Context<Self>,
1613 ) {
1614 if !matches!(self.state, GitStoreState::Local { .. }) {
1615 return;
1616 }
1617
1618 let (is_trusted, event_paths) = match event {
1619 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1620 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1621 };
1622
1623 for (repo_id, worktree_ids) in &self.worktree_ids {
1624 if worktree_ids
1625 .iter()
1626 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1627 {
1628 if let Some(repo) = self.repositories.get(repo_id) {
1629 let repository_state = repo.read(cx).repository_state.clone();
1630 cx.background_spawn(async move {
1631 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1632 state.backend.set_trusted(is_trusted);
1633 }
1634 })
1635 .detach();
1636 }
1637 }
1638 }
1639 }
1640
1641 fn on_buffer_store_event(
1642 &mut self,
1643 _: Entity<BufferStore>,
1644 event: &BufferStoreEvent,
1645 cx: &mut Context<Self>,
1646 ) {
1647 match event {
1648 BufferStoreEvent::BufferAdded(buffer) => {
1649 cx.subscribe(buffer, |this, buffer, event, cx| {
1650 if let BufferEvent::LanguageChanged(_) = event {
1651 let buffer_id = buffer.read(cx).remote_id();
1652 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1653 diff_state.update(cx, |diff_state, cx| {
1654 diff_state.buffer_language_changed(buffer, cx);
1655 });
1656 }
1657 }
1658 })
1659 .detach();
1660 }
1661 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1662 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1663 diffs.remove(buffer_id);
1664 }
1665 }
1666 BufferStoreEvent::BufferDropped(buffer_id) => {
1667 self.diffs.remove(buffer_id);
1668 for diffs in self.shared_diffs.values_mut() {
1669 diffs.remove(buffer_id);
1670 }
1671 }
1672 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1673 // Whenever a buffer's file path changes, it's possible that the
1674 // new path is actually a path that is being tracked by a git
1675 // repository. In that case, we'll want to update the buffer's
1676 // `BufferDiffState`, in case it already has one.
1677 let buffer_id = buffer.read(cx).remote_id();
1678 let diff_state = self.diffs.get(&buffer_id);
1679 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1680
1681 if let Some(diff_state) = diff_state
1682 && let Some((repo, repo_path)) = repo
1683 {
1684 let buffer = buffer.clone();
1685 let diff_state = diff_state.clone();
1686
1687 cx.spawn(async move |_git_store, cx| {
1688 async {
1689 let diff_bases_change = repo
1690 .update(cx, |repo, cx| {
1691 repo.load_committed_text(buffer_id, repo_path, cx)
1692 })
1693 .await?;
1694
1695 diff_state.update(cx, |diff_state, cx| {
1696 let buffer_snapshot = buffer.read(cx).text_snapshot();
1697 diff_state.diff_bases_changed(
1698 buffer_snapshot,
1699 Some(diff_bases_change),
1700 cx,
1701 );
1702 });
1703 anyhow::Ok(())
1704 }
1705 .await
1706 .log_err();
1707 })
1708 .detach();
1709 }
1710 }
1711 }
1712 }
1713
1714 pub fn recalculate_buffer_diffs(
1715 &mut self,
1716 buffers: Vec<Entity<Buffer>>,
1717 cx: &mut Context<Self>,
1718 ) -> impl Future<Output = ()> + use<> {
1719 let mut futures = Vec::new();
1720 for buffer in buffers {
1721 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1722 let buffer = buffer.read(cx).text_snapshot();
1723 diff_state.update(cx, |diff_state, cx| {
1724 diff_state.recalculate_diffs(buffer.clone(), cx);
1725 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1726 });
1727 futures.push(diff_state.update(cx, |diff_state, cx| {
1728 diff_state
1729 .reparse_conflict_markers(buffer, cx)
1730 .map(|_| {})
1731 .boxed()
1732 }));
1733 }
1734 }
1735 async move {
1736 futures::future::join_all(futures).await;
1737 }
1738 }
1739
1740 fn on_buffer_diff_event(
1741 &mut self,
1742 diff: Entity<buffer_diff::BufferDiff>,
1743 event: &BufferDiffEvent,
1744 cx: &mut Context<Self>,
1745 ) {
1746 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1747 let buffer_id = diff.read(cx).buffer_id;
1748 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1749 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1750 diff_state.hunk_staging_operation_count += 1;
1751 diff_state.hunk_staging_operation_count
1752 });
1753 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1754 let recv = repo.update(cx, |repo, cx| {
1755 log::debug!("hunks changed for {}", path.as_unix_str());
1756 repo.spawn_set_index_text_job(
1757 path,
1758 new_index_text.as_ref().map(|rope| rope.to_string()),
1759 Some(hunk_staging_operation_count),
1760 cx,
1761 )
1762 });
1763 let diff = diff.downgrade();
1764 cx.spawn(async move |this, cx| {
1765 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1766 diff.update(cx, |diff, cx| {
1767 diff.clear_pending_hunks(cx);
1768 })
1769 .ok();
1770 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1771 .ok();
1772 }
1773 })
1774 .detach();
1775 }
1776 }
1777 }
1778 }
1779
1780 fn local_worktree_git_repos_changed(
1781 &mut self,
1782 worktree: Entity<Worktree>,
1783 changed_repos: &UpdatedGitRepositoriesSet,
1784 cx: &mut Context<Self>,
1785 ) {
1786 log::debug!("local worktree repos changed");
1787 debug_assert!(worktree.read(cx).is_local());
1788
1789 for repository in self.repositories.values() {
1790 repository.update(cx, |repository, cx| {
1791 let repo_abs_path = &repository.work_directory_abs_path;
1792 if changed_repos.iter().any(|update| {
1793 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1794 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1795 }) {
1796 repository.reload_buffer_diff_bases(cx);
1797 }
1798 });
1799 }
1800 }
1801
1802 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1803 &self.repositories
1804 }
1805
1806 /// Returns the original (main) repository working directory for the given worktree.
1807 /// For normal checkouts this equals the worktree's own path; for linked
1808 /// worktrees it points back to the original repo.
1809 pub fn original_repo_path_for_worktree(
1810 &self,
1811 worktree_id: WorktreeId,
1812 cx: &App,
1813 ) -> Option<Arc<Path>> {
1814 self.active_repo_id
1815 .iter()
1816 .chain(self.worktree_ids.keys())
1817 .find(|repo_id| {
1818 self.worktree_ids
1819 .get(repo_id)
1820 .is_some_and(|ids| ids.contains(&worktree_id))
1821 })
1822 .and_then(|repo_id| self.repositories.get(repo_id))
1823 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1824 }
1825
1826 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1827 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1828 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1829 Some(status.status)
1830 }
1831
1832 pub fn repository_and_path_for_buffer_id(
1833 &self,
1834 buffer_id: BufferId,
1835 cx: &App,
1836 ) -> Option<(Entity<Repository>, RepoPath)> {
1837 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1838 let project_path = buffer.read(cx).project_path(cx)?;
1839 self.repository_and_path_for_project_path(&project_path, cx)
1840 }
1841
1842 pub fn repository_and_path_for_project_path(
1843 &self,
1844 path: &ProjectPath,
1845 cx: &App,
1846 ) -> Option<(Entity<Repository>, RepoPath)> {
1847 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1848 self.repositories
1849 .values()
1850 .filter_map(|repo| {
1851 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1852 Some((repo.clone(), repo_path))
1853 })
1854 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1855 }
1856
1857 pub fn git_init(
1858 &self,
1859 path: Arc<Path>,
1860 fallback_branch_name: String,
1861 cx: &App,
1862 ) -> Task<Result<()>> {
1863 match &self.state {
1864 GitStoreState::Local { fs, .. } => {
1865 let fs = fs.clone();
1866 cx.background_executor()
1867 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1868 }
1869 GitStoreState::Remote {
1870 upstream_client,
1871 upstream_project_id: project_id,
1872 ..
1873 } => {
1874 let client = upstream_client.clone();
1875 let project_id = *project_id;
1876 cx.background_executor().spawn(async move {
1877 client
1878 .request(proto::GitInit {
1879 project_id: project_id,
1880 abs_path: path.to_string_lossy().into_owned(),
1881 fallback_branch_name,
1882 })
1883 .await?;
1884 Ok(())
1885 })
1886 }
1887 }
1888 }
1889
1890 pub fn git_clone(
1891 &self,
1892 repo: String,
1893 path: impl Into<Arc<std::path::Path>>,
1894 cx: &App,
1895 ) -> Task<Result<()>> {
1896 let path = path.into();
1897 match &self.state {
1898 GitStoreState::Local { fs, .. } => {
1899 let fs = fs.clone();
1900 cx.background_executor()
1901 .spawn(async move { fs.git_clone(&repo, &path).await })
1902 }
1903 GitStoreState::Remote {
1904 upstream_client,
1905 upstream_project_id,
1906 ..
1907 } => {
1908 if upstream_client.is_via_collab() {
1909 return Task::ready(Err(anyhow!(
1910 "Git Clone isn't supported for project guests"
1911 )));
1912 }
1913 let request = upstream_client.request(proto::GitClone {
1914 project_id: *upstream_project_id,
1915 abs_path: path.to_string_lossy().into_owned(),
1916 remote_repo: repo,
1917 });
1918
1919 cx.background_spawn(async move {
1920 let result = request.await?;
1921
1922 match result.success {
1923 true => Ok(()),
1924 false => Err(anyhow!("Git Clone failed")),
1925 }
1926 })
1927 }
1928 }
1929 }
1930
1931 async fn handle_update_repository(
1932 this: Entity<Self>,
1933 envelope: TypedEnvelope<proto::UpdateRepository>,
1934 mut cx: AsyncApp,
1935 ) -> Result<()> {
1936 this.update(&mut cx, |this, cx| {
1937 let path_style = this.worktree_store.read(cx).path_style();
1938 let mut update = envelope.payload;
1939
1940 let id = RepositoryId::from_proto(update.id);
1941 let client = this.upstream_client().context("no upstream client")?;
1942
1943 let original_repo_abs_path: Option<Arc<Path>> = update
1944 .original_repo_abs_path
1945 .as_deref()
1946 .map(|p| Path::new(p).into());
1947
1948 let mut repo_subscription = None;
1949 let repo = this.repositories.entry(id).or_insert_with(|| {
1950 let git_store = cx.weak_entity();
1951 let repo = cx.new(|cx| {
1952 Repository::remote(
1953 id,
1954 Path::new(&update.abs_path).into(),
1955 original_repo_abs_path.clone(),
1956 path_style,
1957 ProjectId(update.project_id),
1958 client,
1959 git_store,
1960 cx,
1961 )
1962 });
1963 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1964 cx.emit(GitStoreEvent::RepositoryAdded);
1965 repo
1966 });
1967 this._subscriptions.extend(repo_subscription);
1968
1969 repo.update(cx, {
1970 let update = update.clone();
1971 |repo, cx| repo.apply_remote_update(update, cx)
1972 })?;
1973
1974 this.active_repo_id.get_or_insert_with(|| {
1975 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1976 id
1977 });
1978
1979 if let Some((client, project_id)) = this.downstream_client() {
1980 update.project_id = project_id.to_proto();
1981 client.send(update).log_err();
1982 }
1983 Ok(())
1984 })
1985 }
1986
1987 async fn handle_remove_repository(
1988 this: Entity<Self>,
1989 envelope: TypedEnvelope<proto::RemoveRepository>,
1990 mut cx: AsyncApp,
1991 ) -> Result<()> {
1992 this.update(&mut cx, |this, cx| {
1993 let mut update = envelope.payload;
1994 let id = RepositoryId::from_proto(update.id);
1995 this.repositories.remove(&id);
1996 if let Some((client, project_id)) = this.downstream_client() {
1997 update.project_id = project_id.to_proto();
1998 client.send(update).log_err();
1999 }
2000 if this.active_repo_id == Some(id) {
2001 this.active_repo_id = None;
2002 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2003 }
2004 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2005 });
2006 Ok(())
2007 }
2008
2009 async fn handle_git_init(
2010 this: Entity<Self>,
2011 envelope: TypedEnvelope<proto::GitInit>,
2012 cx: AsyncApp,
2013 ) -> Result<proto::Ack> {
2014 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2015 let name = envelope.payload.fallback_branch_name;
2016 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2017 .await?;
2018
2019 Ok(proto::Ack {})
2020 }
2021
2022 async fn handle_git_clone(
2023 this: Entity<Self>,
2024 envelope: TypedEnvelope<proto::GitClone>,
2025 cx: AsyncApp,
2026 ) -> Result<proto::GitCloneResponse> {
2027 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2028 let repo_name = envelope.payload.remote_repo;
2029 let result = cx
2030 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2031 .await;
2032
2033 Ok(proto::GitCloneResponse {
2034 success: result.is_ok(),
2035 })
2036 }
2037
2038 async fn handle_fetch(
2039 this: Entity<Self>,
2040 envelope: TypedEnvelope<proto::Fetch>,
2041 mut cx: AsyncApp,
2042 ) -> Result<proto::RemoteMessageResponse> {
2043 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2044 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2045 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2046 let askpass_id = envelope.payload.askpass_id;
2047
2048 let askpass = make_remote_delegate(
2049 this,
2050 envelope.payload.project_id,
2051 repository_id,
2052 askpass_id,
2053 &mut cx,
2054 );
2055
2056 let remote_output = repository_handle
2057 .update(&mut cx, |repository_handle, cx| {
2058 repository_handle.fetch(fetch_options, askpass, cx)
2059 })
2060 .await??;
2061
2062 Ok(proto::RemoteMessageResponse {
2063 stdout: remote_output.stdout,
2064 stderr: remote_output.stderr,
2065 })
2066 }
2067
2068 async fn handle_push(
2069 this: Entity<Self>,
2070 envelope: TypedEnvelope<proto::Push>,
2071 mut cx: AsyncApp,
2072 ) -> Result<proto::RemoteMessageResponse> {
2073 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2074 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2075
2076 let askpass_id = envelope.payload.askpass_id;
2077 let askpass = make_remote_delegate(
2078 this,
2079 envelope.payload.project_id,
2080 repository_id,
2081 askpass_id,
2082 &mut cx,
2083 );
2084
2085 let options = envelope
2086 .payload
2087 .options
2088 .as_ref()
2089 .map(|_| match envelope.payload.options() {
2090 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2091 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2092 });
2093
2094 let branch_name = envelope.payload.branch_name.into();
2095 let remote_branch_name = envelope.payload.remote_branch_name.into();
2096 let remote_name = envelope.payload.remote_name.into();
2097
2098 let remote_output = repository_handle
2099 .update(&mut cx, |repository_handle, cx| {
2100 repository_handle.push(
2101 branch_name,
2102 remote_branch_name,
2103 remote_name,
2104 options,
2105 askpass,
2106 cx,
2107 )
2108 })
2109 .await??;
2110 Ok(proto::RemoteMessageResponse {
2111 stdout: remote_output.stdout,
2112 stderr: remote_output.stderr,
2113 })
2114 }
2115
2116 async fn handle_pull(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::Pull>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::RemoteMessageResponse> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123 let askpass_id = envelope.payload.askpass_id;
2124 let askpass = make_remote_delegate(
2125 this,
2126 envelope.payload.project_id,
2127 repository_id,
2128 askpass_id,
2129 &mut cx,
2130 );
2131
2132 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2133 let remote_name = envelope.payload.remote_name.into();
2134 let rebase = envelope.payload.rebase;
2135
2136 let remote_message = repository_handle
2137 .update(&mut cx, |repository_handle, cx| {
2138 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2139 })
2140 .await??;
2141
2142 Ok(proto::RemoteMessageResponse {
2143 stdout: remote_message.stdout,
2144 stderr: remote_message.stderr,
2145 })
2146 }
2147
2148 async fn handle_stage(
2149 this: Entity<Self>,
2150 envelope: TypedEnvelope<proto::Stage>,
2151 mut cx: AsyncApp,
2152 ) -> Result<proto::Ack> {
2153 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2154 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2155
2156 let entries = envelope
2157 .payload
2158 .paths
2159 .into_iter()
2160 .map(|path| RepoPath::new(&path))
2161 .collect::<Result<Vec<_>>>()?;
2162
2163 repository_handle
2164 .update(&mut cx, |repository_handle, cx| {
2165 repository_handle.stage_entries(entries, cx)
2166 })
2167 .await?;
2168 Ok(proto::Ack {})
2169 }
2170
2171 async fn handle_unstage(
2172 this: Entity<Self>,
2173 envelope: TypedEnvelope<proto::Unstage>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::Ack> {
2176 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2177 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2178
2179 let entries = envelope
2180 .payload
2181 .paths
2182 .into_iter()
2183 .map(|path| RepoPath::new(&path))
2184 .collect::<Result<Vec<_>>>()?;
2185
2186 repository_handle
2187 .update(&mut cx, |repository_handle, cx| {
2188 repository_handle.unstage_entries(entries, cx)
2189 })
2190 .await?;
2191
2192 Ok(proto::Ack {})
2193 }
2194
2195 async fn handle_stash(
2196 this: Entity<Self>,
2197 envelope: TypedEnvelope<proto::Stash>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::Ack> {
2200 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2201 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2202
2203 let entries = envelope
2204 .payload
2205 .paths
2206 .into_iter()
2207 .map(|path| RepoPath::new(&path))
2208 .collect::<Result<Vec<_>>>()?;
2209
2210 repository_handle
2211 .update(&mut cx, |repository_handle, cx| {
2212 repository_handle.stash_entries(entries, cx)
2213 })
2214 .await?;
2215
2216 Ok(proto::Ack {})
2217 }
2218
2219 async fn handle_stash_pop(
2220 this: Entity<Self>,
2221 envelope: TypedEnvelope<proto::StashPop>,
2222 mut cx: AsyncApp,
2223 ) -> Result<proto::Ack> {
2224 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2225 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2226 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2227
2228 repository_handle
2229 .update(&mut cx, |repository_handle, cx| {
2230 repository_handle.stash_pop(stash_index, cx)
2231 })
2232 .await?;
2233
2234 Ok(proto::Ack {})
2235 }
2236
2237 async fn handle_stash_apply(
2238 this: Entity<Self>,
2239 envelope: TypedEnvelope<proto::StashApply>,
2240 mut cx: AsyncApp,
2241 ) -> Result<proto::Ack> {
2242 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2243 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2244 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2245
2246 repository_handle
2247 .update(&mut cx, |repository_handle, cx| {
2248 repository_handle.stash_apply(stash_index, cx)
2249 })
2250 .await?;
2251
2252 Ok(proto::Ack {})
2253 }
2254
2255 async fn handle_stash_drop(
2256 this: Entity<Self>,
2257 envelope: TypedEnvelope<proto::StashDrop>,
2258 mut cx: AsyncApp,
2259 ) -> Result<proto::Ack> {
2260 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2261 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2262 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2263
2264 repository_handle
2265 .update(&mut cx, |repository_handle, cx| {
2266 repository_handle.stash_drop(stash_index, cx)
2267 })
2268 .await??;
2269
2270 Ok(proto::Ack {})
2271 }
2272
2273 async fn handle_set_index_text(
2274 this: Entity<Self>,
2275 envelope: TypedEnvelope<proto::SetIndexText>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::Ack> {
2278 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2279 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2280 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2281
2282 repository_handle
2283 .update(&mut cx, |repository_handle, cx| {
2284 repository_handle.spawn_set_index_text_job(
2285 repo_path,
2286 envelope.payload.text,
2287 None,
2288 cx,
2289 )
2290 })
2291 .await??;
2292 Ok(proto::Ack {})
2293 }
2294
2295 async fn handle_run_hook(
2296 this: Entity<Self>,
2297 envelope: TypedEnvelope<proto::RunGitHook>,
2298 mut cx: AsyncApp,
2299 ) -> Result<proto::Ack> {
2300 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2301 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2302 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2303 repository_handle
2304 .update(&mut cx, |repository_handle, cx| {
2305 repository_handle.run_hook(hook, cx)
2306 })
2307 .await??;
2308 Ok(proto::Ack {})
2309 }
2310
2311 async fn handle_commit(
2312 this: Entity<Self>,
2313 envelope: TypedEnvelope<proto::Commit>,
2314 mut cx: AsyncApp,
2315 ) -> Result<proto::Ack> {
2316 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2317 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2318 let askpass_id = envelope.payload.askpass_id;
2319
2320 let askpass = make_remote_delegate(
2321 this,
2322 envelope.payload.project_id,
2323 repository_id,
2324 askpass_id,
2325 &mut cx,
2326 );
2327
2328 let message = SharedString::from(envelope.payload.message);
2329 let name = envelope.payload.name.map(SharedString::from);
2330 let email = envelope.payload.email.map(SharedString::from);
2331 let options = envelope.payload.options.unwrap_or_default();
2332
2333 repository_handle
2334 .update(&mut cx, |repository_handle, cx| {
2335 repository_handle.commit(
2336 message,
2337 name.zip(email),
2338 CommitOptions {
2339 amend: options.amend,
2340 signoff: options.signoff,
2341 },
2342 askpass,
2343 cx,
2344 )
2345 })
2346 .await??;
2347 Ok(proto::Ack {})
2348 }
2349
2350 async fn handle_get_remotes(
2351 this: Entity<Self>,
2352 envelope: TypedEnvelope<proto::GetRemotes>,
2353 mut cx: AsyncApp,
2354 ) -> Result<proto::GetRemotesResponse> {
2355 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2356 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2357
2358 let branch_name = envelope.payload.branch_name;
2359 let is_push = envelope.payload.is_push;
2360
2361 let remotes = repository_handle
2362 .update(&mut cx, |repository_handle, _| {
2363 repository_handle.get_remotes(branch_name, is_push)
2364 })
2365 .await??;
2366
2367 Ok(proto::GetRemotesResponse {
2368 remotes: remotes
2369 .into_iter()
2370 .map(|remotes| proto::get_remotes_response::Remote {
2371 name: remotes.name.to_string(),
2372 })
2373 .collect::<Vec<_>>(),
2374 })
2375 }
2376
2377 async fn handle_get_worktrees(
2378 this: Entity<Self>,
2379 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2380 mut cx: AsyncApp,
2381 ) -> Result<proto::GitWorktreesResponse> {
2382 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2383 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2384
2385 let worktrees = repository_handle
2386 .update(&mut cx, |repository_handle, _| {
2387 repository_handle.worktrees()
2388 })
2389 .await??;
2390
2391 Ok(proto::GitWorktreesResponse {
2392 worktrees: worktrees
2393 .into_iter()
2394 .map(|worktree| worktree_to_proto(&worktree))
2395 .collect::<Vec<_>>(),
2396 })
2397 }
2398
2399 async fn handle_create_worktree(
2400 this: Entity<Self>,
2401 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2402 mut cx: AsyncApp,
2403 ) -> Result<proto::Ack> {
2404 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2405 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2406 let directory = PathBuf::from(envelope.payload.directory);
2407 let name = envelope.payload.name;
2408 let commit = envelope.payload.commit;
2409
2410 repository_handle
2411 .update(&mut cx, |repository_handle, _| {
2412 repository_handle.create_worktree(name, directory, commit)
2413 })
2414 .await??;
2415
2416 Ok(proto::Ack {})
2417 }
2418
2419 async fn handle_remove_worktree(
2420 this: Entity<Self>,
2421 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2422 mut cx: AsyncApp,
2423 ) -> Result<proto::Ack> {
2424 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2425 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2426 let path = PathBuf::from(envelope.payload.path);
2427 let force = envelope.payload.force;
2428
2429 repository_handle
2430 .update(&mut cx, |repository_handle, _| {
2431 repository_handle.remove_worktree(path, force)
2432 })
2433 .await??;
2434
2435 Ok(proto::Ack {})
2436 }
2437
2438 async fn handle_rename_worktree(
2439 this: Entity<Self>,
2440 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2441 mut cx: AsyncApp,
2442 ) -> Result<proto::Ack> {
2443 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2444 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2445 let old_path = PathBuf::from(envelope.payload.old_path);
2446 let new_path = PathBuf::from(envelope.payload.new_path);
2447
2448 repository_handle
2449 .update(&mut cx, |repository_handle, _| {
2450 repository_handle.rename_worktree(old_path, new_path)
2451 })
2452 .await??;
2453
2454 Ok(proto::Ack {})
2455 }
2456
2457 async fn handle_get_branches(
2458 this: Entity<Self>,
2459 envelope: TypedEnvelope<proto::GitGetBranches>,
2460 mut cx: AsyncApp,
2461 ) -> Result<proto::GitBranchesResponse> {
2462 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2463 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2464
2465 let branches = repository_handle
2466 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2467 .await??;
2468
2469 Ok(proto::GitBranchesResponse {
2470 branches: branches
2471 .into_iter()
2472 .map(|branch| branch_to_proto(&branch))
2473 .collect::<Vec<_>>(),
2474 })
2475 }
2476 async fn handle_get_default_branch(
2477 this: Entity<Self>,
2478 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2479 mut cx: AsyncApp,
2480 ) -> Result<proto::GetDefaultBranchResponse> {
2481 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2482 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2483
2484 let branch = repository_handle
2485 .update(&mut cx, |repository_handle, _| {
2486 repository_handle.default_branch(false)
2487 })
2488 .await??
2489 .map(Into::into);
2490
2491 Ok(proto::GetDefaultBranchResponse { branch })
2492 }
2493 async fn handle_create_branch(
2494 this: Entity<Self>,
2495 envelope: TypedEnvelope<proto::GitCreateBranch>,
2496 mut cx: AsyncApp,
2497 ) -> Result<proto::Ack> {
2498 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2499 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2500 let branch_name = envelope.payload.branch_name;
2501
2502 repository_handle
2503 .update(&mut cx, |repository_handle, _| {
2504 repository_handle.create_branch(branch_name, None)
2505 })
2506 .await??;
2507
2508 Ok(proto::Ack {})
2509 }
2510
2511 async fn handle_change_branch(
2512 this: Entity<Self>,
2513 envelope: TypedEnvelope<proto::GitChangeBranch>,
2514 mut cx: AsyncApp,
2515 ) -> Result<proto::Ack> {
2516 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2517 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2518 let branch_name = envelope.payload.branch_name;
2519
2520 repository_handle
2521 .update(&mut cx, |repository_handle, _| {
2522 repository_handle.change_branch(branch_name)
2523 })
2524 .await??;
2525
2526 Ok(proto::Ack {})
2527 }
2528
2529 async fn handle_rename_branch(
2530 this: Entity<Self>,
2531 envelope: TypedEnvelope<proto::GitRenameBranch>,
2532 mut cx: AsyncApp,
2533 ) -> Result<proto::Ack> {
2534 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2535 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2536 let branch = envelope.payload.branch;
2537 let new_name = envelope.payload.new_name;
2538
2539 repository_handle
2540 .update(&mut cx, |repository_handle, _| {
2541 repository_handle.rename_branch(branch, new_name)
2542 })
2543 .await??;
2544
2545 Ok(proto::Ack {})
2546 }
2547
2548 async fn handle_create_remote(
2549 this: Entity<Self>,
2550 envelope: TypedEnvelope<proto::GitCreateRemote>,
2551 mut cx: AsyncApp,
2552 ) -> Result<proto::Ack> {
2553 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2554 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2555 let remote_name = envelope.payload.remote_name;
2556 let remote_url = envelope.payload.remote_url;
2557
2558 repository_handle
2559 .update(&mut cx, |repository_handle, _| {
2560 repository_handle.create_remote(remote_name, remote_url)
2561 })
2562 .await??;
2563
2564 Ok(proto::Ack {})
2565 }
2566
2567 async fn handle_delete_branch(
2568 this: Entity<Self>,
2569 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2570 mut cx: AsyncApp,
2571 ) -> Result<proto::Ack> {
2572 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2573 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2574 let is_remote = envelope.payload.is_remote;
2575 let branch_name = envelope.payload.branch_name;
2576
2577 repository_handle
2578 .update(&mut cx, |repository_handle, _| {
2579 repository_handle.delete_branch(is_remote, branch_name)
2580 })
2581 .await??;
2582
2583 Ok(proto::Ack {})
2584 }
2585
2586 async fn handle_remove_remote(
2587 this: Entity<Self>,
2588 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2589 mut cx: AsyncApp,
2590 ) -> Result<proto::Ack> {
2591 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2592 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2593 let remote_name = envelope.payload.remote_name;
2594
2595 repository_handle
2596 .update(&mut cx, |repository_handle, _| {
2597 repository_handle.remove_remote(remote_name)
2598 })
2599 .await??;
2600
2601 Ok(proto::Ack {})
2602 }
2603
2604 async fn handle_show(
2605 this: Entity<Self>,
2606 envelope: TypedEnvelope<proto::GitShow>,
2607 mut cx: AsyncApp,
2608 ) -> Result<proto::GitCommitDetails> {
2609 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2610 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2611
2612 let commit = repository_handle
2613 .update(&mut cx, |repository_handle, _| {
2614 repository_handle.show(envelope.payload.commit)
2615 })
2616 .await??;
2617 Ok(proto::GitCommitDetails {
2618 sha: commit.sha.into(),
2619 message: commit.message.into(),
2620 commit_timestamp: commit.commit_timestamp,
2621 author_email: commit.author_email.into(),
2622 author_name: commit.author_name.into(),
2623 })
2624 }
2625
2626 async fn handle_create_checkpoint(
2627 this: Entity<Self>,
2628 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2629 mut cx: AsyncApp,
2630 ) -> Result<proto::GitCreateCheckpointResponse> {
2631 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2632 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2633
2634 let checkpoint = repository_handle
2635 .update(&mut cx, |repository, _| repository.checkpoint())
2636 .await??;
2637
2638 Ok(proto::GitCreateCheckpointResponse {
2639 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2640 })
2641 }
2642
2643 async fn handle_restore_checkpoint(
2644 this: Entity<Self>,
2645 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2646 mut cx: AsyncApp,
2647 ) -> Result<proto::Ack> {
2648 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2649 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2650
2651 let checkpoint = GitRepositoryCheckpoint {
2652 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2653 };
2654
2655 repository_handle
2656 .update(&mut cx, |repository, _| {
2657 repository.restore_checkpoint(checkpoint)
2658 })
2659 .await??;
2660
2661 Ok(proto::Ack {})
2662 }
2663
2664 async fn handle_compare_checkpoints(
2665 this: Entity<Self>,
2666 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2667 mut cx: AsyncApp,
2668 ) -> Result<proto::GitCompareCheckpointsResponse> {
2669 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2670 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2671
2672 let left = GitRepositoryCheckpoint {
2673 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2674 };
2675 let right = GitRepositoryCheckpoint {
2676 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2677 };
2678
2679 let equal = repository_handle
2680 .update(&mut cx, |repository, _| {
2681 repository.compare_checkpoints(left, right)
2682 })
2683 .await??;
2684
2685 Ok(proto::GitCompareCheckpointsResponse { equal })
2686 }
2687
2688 async fn handle_diff_checkpoints(
2689 this: Entity<Self>,
2690 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2691 mut cx: AsyncApp,
2692 ) -> Result<proto::GitDiffCheckpointsResponse> {
2693 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2694 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2695
2696 let base = GitRepositoryCheckpoint {
2697 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2698 };
2699 let target = GitRepositoryCheckpoint {
2700 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2701 };
2702
2703 let diff = repository_handle
2704 .update(&mut cx, |repository, _| {
2705 repository.diff_checkpoints(base, target)
2706 })
2707 .await??;
2708
2709 Ok(proto::GitDiffCheckpointsResponse { diff })
2710 }
2711
2712 async fn handle_load_commit_diff(
2713 this: Entity<Self>,
2714 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2715 mut cx: AsyncApp,
2716 ) -> Result<proto::LoadCommitDiffResponse> {
2717 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2718 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2719
2720 let commit_diff = repository_handle
2721 .update(&mut cx, |repository_handle, _| {
2722 repository_handle.load_commit_diff(envelope.payload.commit)
2723 })
2724 .await??;
2725 Ok(proto::LoadCommitDiffResponse {
2726 files: commit_diff
2727 .files
2728 .into_iter()
2729 .map(|file| proto::CommitFile {
2730 path: file.path.to_proto(),
2731 old_text: file.old_text,
2732 new_text: file.new_text,
2733 is_binary: file.is_binary,
2734 })
2735 .collect(),
2736 })
2737 }
2738
2739 async fn handle_file_history(
2740 this: Entity<Self>,
2741 envelope: TypedEnvelope<proto::GitFileHistory>,
2742 mut cx: AsyncApp,
2743 ) -> Result<proto::GitFileHistoryResponse> {
2744 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2745 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2746 let path = RepoPath::from_proto(&envelope.payload.path)?;
2747 let skip = envelope.payload.skip as usize;
2748 let limit = envelope.payload.limit.map(|l| l as usize);
2749
2750 let file_history = repository_handle
2751 .update(&mut cx, |repository_handle, _| {
2752 repository_handle.file_history_paginated(path, skip, limit)
2753 })
2754 .await??;
2755
2756 Ok(proto::GitFileHistoryResponse {
2757 entries: file_history
2758 .entries
2759 .into_iter()
2760 .map(|entry| proto::FileHistoryEntry {
2761 sha: entry.sha.to_string(),
2762 subject: entry.subject.to_string(),
2763 message: entry.message.to_string(),
2764 commit_timestamp: entry.commit_timestamp,
2765 author_name: entry.author_name.to_string(),
2766 author_email: entry.author_email.to_string(),
2767 })
2768 .collect(),
2769 path: file_history.path.to_proto(),
2770 })
2771 }
2772
2773 async fn handle_reset(
2774 this: Entity<Self>,
2775 envelope: TypedEnvelope<proto::GitReset>,
2776 mut cx: AsyncApp,
2777 ) -> Result<proto::Ack> {
2778 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2779 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2780
2781 let mode = match envelope.payload.mode() {
2782 git_reset::ResetMode::Soft => ResetMode::Soft,
2783 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2784 };
2785
2786 repository_handle
2787 .update(&mut cx, |repository_handle, cx| {
2788 repository_handle.reset(envelope.payload.commit, mode, cx)
2789 })
2790 .await??;
2791 Ok(proto::Ack {})
2792 }
2793
2794 async fn handle_checkout_files(
2795 this: Entity<Self>,
2796 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2797 mut cx: AsyncApp,
2798 ) -> Result<proto::Ack> {
2799 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2800 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2801 let paths = envelope
2802 .payload
2803 .paths
2804 .iter()
2805 .map(|s| RepoPath::from_proto(s))
2806 .collect::<Result<Vec<_>>>()?;
2807
2808 repository_handle
2809 .update(&mut cx, |repository_handle, cx| {
2810 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2811 })
2812 .await?;
2813 Ok(proto::Ack {})
2814 }
2815
2816 async fn handle_open_commit_message_buffer(
2817 this: Entity<Self>,
2818 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2819 mut cx: AsyncApp,
2820 ) -> Result<proto::OpenBufferResponse> {
2821 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2822 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2823 let buffer = repository
2824 .update(&mut cx, |repository, cx| {
2825 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2826 })
2827 .await?;
2828
2829 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2830 this.update(&mut cx, |this, cx| {
2831 this.buffer_store.update(cx, |buffer_store, cx| {
2832 buffer_store
2833 .create_buffer_for_peer(
2834 &buffer,
2835 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2836 cx,
2837 )
2838 .detach_and_log_err(cx);
2839 })
2840 });
2841
2842 Ok(proto::OpenBufferResponse {
2843 buffer_id: buffer_id.to_proto(),
2844 })
2845 }
2846
2847 async fn handle_askpass(
2848 this: Entity<Self>,
2849 envelope: TypedEnvelope<proto::AskPassRequest>,
2850 mut cx: AsyncApp,
2851 ) -> Result<proto::AskPassResponse> {
2852 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2853 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2854
2855 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2856 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2857 debug_panic!("no askpass found");
2858 anyhow::bail!("no askpass found");
2859 };
2860
2861 let response = askpass
2862 .ask_password(envelope.payload.prompt)
2863 .await
2864 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2865
2866 delegates
2867 .lock()
2868 .insert(envelope.payload.askpass_id, askpass);
2869
2870 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2871 Ok(proto::AskPassResponse {
2872 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2873 })
2874 }
2875
2876 async fn handle_check_for_pushed_commits(
2877 this: Entity<Self>,
2878 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2879 mut cx: AsyncApp,
2880 ) -> Result<proto::CheckForPushedCommitsResponse> {
2881 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2882 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2883
2884 let branches = repository_handle
2885 .update(&mut cx, |repository_handle, _| {
2886 repository_handle.check_for_pushed_commits()
2887 })
2888 .await??;
2889 Ok(proto::CheckForPushedCommitsResponse {
2890 pushed_to: branches
2891 .into_iter()
2892 .map(|commit| commit.to_string())
2893 .collect(),
2894 })
2895 }
2896
2897 async fn handle_git_diff(
2898 this: Entity<Self>,
2899 envelope: TypedEnvelope<proto::GitDiff>,
2900 mut cx: AsyncApp,
2901 ) -> Result<proto::GitDiffResponse> {
2902 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2903 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2904 let diff_type = match envelope.payload.diff_type() {
2905 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2906 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2907 proto::git_diff::DiffType::MergeBase => {
2908 let base_ref = envelope
2909 .payload
2910 .merge_base_ref
2911 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2912 DiffType::MergeBase {
2913 base_ref: base_ref.into(),
2914 }
2915 }
2916 };
2917
2918 let mut diff = repository_handle
2919 .update(&mut cx, |repository_handle, cx| {
2920 repository_handle.diff(diff_type, cx)
2921 })
2922 .await??;
2923 const ONE_MB: usize = 1_000_000;
2924 if diff.len() > ONE_MB {
2925 diff = diff.chars().take(ONE_MB).collect()
2926 }
2927
2928 Ok(proto::GitDiffResponse { diff })
2929 }
2930
2931 async fn handle_tree_diff(
2932 this: Entity<Self>,
2933 request: TypedEnvelope<proto::GetTreeDiff>,
2934 mut cx: AsyncApp,
2935 ) -> Result<proto::GetTreeDiffResponse> {
2936 let repository_id = RepositoryId(request.payload.repository_id);
2937 let diff_type = if request.payload.is_merge {
2938 DiffTreeType::MergeBase {
2939 base: request.payload.base.into(),
2940 head: request.payload.head.into(),
2941 }
2942 } else {
2943 DiffTreeType::Since {
2944 base: request.payload.base.into(),
2945 head: request.payload.head.into(),
2946 }
2947 };
2948
2949 let diff = this
2950 .update(&mut cx, |this, cx| {
2951 let repository = this.repositories().get(&repository_id)?;
2952 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2953 })
2954 .context("missing repository")?
2955 .await??;
2956
2957 Ok(proto::GetTreeDiffResponse {
2958 entries: diff
2959 .entries
2960 .into_iter()
2961 .map(|(path, status)| proto::TreeDiffStatus {
2962 path: path.as_ref().to_proto(),
2963 status: match status {
2964 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2965 TreeDiffStatus::Modified { .. } => {
2966 proto::tree_diff_status::Status::Modified.into()
2967 }
2968 TreeDiffStatus::Deleted { .. } => {
2969 proto::tree_diff_status::Status::Deleted.into()
2970 }
2971 },
2972 oid: match status {
2973 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2974 Some(old.to_string())
2975 }
2976 TreeDiffStatus::Added => None,
2977 },
2978 })
2979 .collect(),
2980 })
2981 }
2982
2983 async fn handle_get_blob_content(
2984 this: Entity<Self>,
2985 request: TypedEnvelope<proto::GetBlobContent>,
2986 mut cx: AsyncApp,
2987 ) -> Result<proto::GetBlobContentResponse> {
2988 let oid = git::Oid::from_str(&request.payload.oid)?;
2989 let repository_id = RepositoryId(request.payload.repository_id);
2990 let content = this
2991 .update(&mut cx, |this, cx| {
2992 let repository = this.repositories().get(&repository_id)?;
2993 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2994 })
2995 .context("missing repository")?
2996 .await?;
2997 Ok(proto::GetBlobContentResponse { content })
2998 }
2999
3000 async fn handle_open_unstaged_diff(
3001 this: Entity<Self>,
3002 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3003 mut cx: AsyncApp,
3004 ) -> Result<proto::OpenUnstagedDiffResponse> {
3005 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3006 let diff = this
3007 .update(&mut cx, |this, cx| {
3008 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3009 Some(this.open_unstaged_diff(buffer, cx))
3010 })
3011 .context("missing buffer")?
3012 .await?;
3013 this.update(&mut cx, |this, _| {
3014 let shared_diffs = this
3015 .shared_diffs
3016 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3017 .or_default();
3018 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3019 });
3020 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3021 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3022 }
3023
3024 async fn handle_open_uncommitted_diff(
3025 this: Entity<Self>,
3026 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3027 mut cx: AsyncApp,
3028 ) -> Result<proto::OpenUncommittedDiffResponse> {
3029 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3030 let diff = this
3031 .update(&mut cx, |this, cx| {
3032 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3033 Some(this.open_uncommitted_diff(buffer, cx))
3034 })
3035 .context("missing buffer")?
3036 .await?;
3037 this.update(&mut cx, |this, _| {
3038 let shared_diffs = this
3039 .shared_diffs
3040 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3041 .or_default();
3042 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3043 });
3044 Ok(diff.read_with(&cx, |diff, cx| {
3045 use proto::open_uncommitted_diff_response::Mode;
3046
3047 let unstaged_diff = diff.secondary_diff();
3048 let index_snapshot = unstaged_diff.and_then(|diff| {
3049 let diff = diff.read(cx);
3050 diff.base_text_exists().then(|| diff.base_text(cx))
3051 });
3052
3053 let mode;
3054 let staged_text;
3055 let committed_text;
3056 if diff.base_text_exists() {
3057 let committed_snapshot = diff.base_text(cx);
3058 committed_text = Some(committed_snapshot.text());
3059 if let Some(index_text) = index_snapshot {
3060 if index_text.remote_id() == committed_snapshot.remote_id() {
3061 mode = Mode::IndexMatchesHead;
3062 staged_text = None;
3063 } else {
3064 mode = Mode::IndexAndHead;
3065 staged_text = Some(index_text.text());
3066 }
3067 } else {
3068 mode = Mode::IndexAndHead;
3069 staged_text = None;
3070 }
3071 } else {
3072 mode = Mode::IndexAndHead;
3073 committed_text = None;
3074 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3075 }
3076
3077 proto::OpenUncommittedDiffResponse {
3078 committed_text,
3079 staged_text,
3080 mode: mode.into(),
3081 }
3082 }))
3083 }
3084
3085 async fn handle_update_diff_bases(
3086 this: Entity<Self>,
3087 request: TypedEnvelope<proto::UpdateDiffBases>,
3088 mut cx: AsyncApp,
3089 ) -> Result<()> {
3090 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3091 this.update(&mut cx, |this, cx| {
3092 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3093 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3094 {
3095 let buffer = buffer.read(cx).text_snapshot();
3096 diff_state.update(cx, |diff_state, cx| {
3097 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3098 })
3099 }
3100 });
3101 Ok(())
3102 }
3103
3104 async fn handle_blame_buffer(
3105 this: Entity<Self>,
3106 envelope: TypedEnvelope<proto::BlameBuffer>,
3107 mut cx: AsyncApp,
3108 ) -> Result<proto::BlameBufferResponse> {
3109 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3110 let version = deserialize_version(&envelope.payload.version);
3111 let buffer = this.read_with(&cx, |this, cx| {
3112 this.buffer_store.read(cx).get_existing(buffer_id)
3113 })?;
3114 buffer
3115 .update(&mut cx, |buffer, _| {
3116 buffer.wait_for_version(version.clone())
3117 })
3118 .await?;
3119 let blame = this
3120 .update(&mut cx, |this, cx| {
3121 this.blame_buffer(&buffer, Some(version), cx)
3122 })
3123 .await?;
3124 Ok(serialize_blame_buffer_response(blame))
3125 }
3126
3127 async fn handle_get_permalink_to_line(
3128 this: Entity<Self>,
3129 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3130 mut cx: AsyncApp,
3131 ) -> Result<proto::GetPermalinkToLineResponse> {
3132 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3133 // let version = deserialize_version(&envelope.payload.version);
3134 let selection = {
3135 let proto_selection = envelope
3136 .payload
3137 .selection
3138 .context("no selection to get permalink for defined")?;
3139 proto_selection.start as u32..proto_selection.end as u32
3140 };
3141 let buffer = this.read_with(&cx, |this, cx| {
3142 this.buffer_store.read(cx).get_existing(buffer_id)
3143 })?;
3144 let permalink = this
3145 .update(&mut cx, |this, cx| {
3146 this.get_permalink_to_line(&buffer, selection, cx)
3147 })
3148 .await?;
3149 Ok(proto::GetPermalinkToLineResponse {
3150 permalink: permalink.to_string(),
3151 })
3152 }
3153
3154 fn repository_for_request(
3155 this: &Entity<Self>,
3156 id: RepositoryId,
3157 cx: &mut AsyncApp,
3158 ) -> Result<Entity<Repository>> {
3159 this.read_with(cx, |this, _| {
3160 this.repositories
3161 .get(&id)
3162 .context("missing repository handle")
3163 .cloned()
3164 })
3165 }
3166
3167 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3168 self.repositories
3169 .iter()
3170 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3171 .collect()
3172 }
3173
3174 fn process_updated_entries(
3175 &self,
3176 worktree: &Entity<Worktree>,
3177 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3178 cx: &mut App,
3179 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3180 let path_style = worktree.read(cx).path_style();
3181 let mut repo_paths = self
3182 .repositories
3183 .values()
3184 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3185 .collect::<Vec<_>>();
3186 let mut entries: Vec<_> = updated_entries
3187 .iter()
3188 .map(|(path, _, _)| path.clone())
3189 .collect();
3190 entries.sort();
3191 let worktree = worktree.read(cx);
3192
3193 let entries = entries
3194 .into_iter()
3195 .map(|path| worktree.absolutize(&path))
3196 .collect::<Arc<[_]>>();
3197
3198 let executor = cx.background_executor().clone();
3199 cx.background_executor().spawn(async move {
3200 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3201 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3202 let mut tasks = FuturesOrdered::new();
3203 for (repo_path, repo) in repo_paths.into_iter().rev() {
3204 let entries = entries.clone();
3205 let task = executor.spawn(async move {
3206 // Find all repository paths that belong to this repo
3207 let mut ix = entries.partition_point(|path| path < &*repo_path);
3208 if ix == entries.len() {
3209 return None;
3210 };
3211
3212 let mut paths = Vec::new();
3213 // All paths prefixed by a given repo will constitute a continuous range.
3214 while let Some(path) = entries.get(ix)
3215 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3216 &repo_path, path, path_style,
3217 )
3218 {
3219 paths.push((repo_path, ix));
3220 ix += 1;
3221 }
3222 if paths.is_empty() {
3223 None
3224 } else {
3225 Some((repo, paths))
3226 }
3227 });
3228 tasks.push_back(task);
3229 }
3230
3231 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3232 let mut path_was_used = vec![false; entries.len()];
3233 let tasks = tasks.collect::<Vec<_>>().await;
3234 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3235 // We always want to assign a path to it's innermost repository.
3236 for t in tasks {
3237 let Some((repo, paths)) = t else {
3238 continue;
3239 };
3240 let entry = paths_by_git_repo.entry(repo).or_default();
3241 for (repo_path, ix) in paths {
3242 if path_was_used[ix] {
3243 continue;
3244 }
3245 path_was_used[ix] = true;
3246 entry.push(repo_path);
3247 }
3248 }
3249
3250 paths_by_git_repo
3251 })
3252 }
3253}
3254
3255impl BufferGitState {
3256 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3257 Self {
3258 unstaged_diff: Default::default(),
3259 uncommitted_diff: Default::default(),
3260 oid_diffs: Default::default(),
3261 recalculate_diff_task: Default::default(),
3262 language: Default::default(),
3263 language_registry: Default::default(),
3264 recalculating_tx: postage::watch::channel_with(false).0,
3265 hunk_staging_operation_count: 0,
3266 hunk_staging_operation_count_as_of_write: 0,
3267 head_text: Default::default(),
3268 index_text: Default::default(),
3269 oid_texts: Default::default(),
3270 head_changed: Default::default(),
3271 index_changed: Default::default(),
3272 language_changed: Default::default(),
3273 conflict_updated_futures: Default::default(),
3274 conflict_set: Default::default(),
3275 reparse_conflict_markers_task: Default::default(),
3276 }
3277 }
3278
3279 #[ztracing::instrument(skip_all)]
3280 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3281 self.language = buffer.read(cx).language().cloned();
3282 self.language_changed = true;
3283 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3284 }
3285
3286 fn reparse_conflict_markers(
3287 &mut self,
3288 buffer: text::BufferSnapshot,
3289 cx: &mut Context<Self>,
3290 ) -> oneshot::Receiver<()> {
3291 let (tx, rx) = oneshot::channel();
3292
3293 let Some(conflict_set) = self
3294 .conflict_set
3295 .as_ref()
3296 .and_then(|conflict_set| conflict_set.upgrade())
3297 else {
3298 return rx;
3299 };
3300
3301 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3302 if conflict_set.has_conflict {
3303 Some(conflict_set.snapshot())
3304 } else {
3305 None
3306 }
3307 });
3308
3309 if let Some(old_snapshot) = old_snapshot {
3310 self.conflict_updated_futures.push(tx);
3311 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3312 let (snapshot, changed_range) = cx
3313 .background_spawn(async move {
3314 let new_snapshot = ConflictSet::parse(&buffer);
3315 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3316 (new_snapshot, changed_range)
3317 })
3318 .await;
3319 this.update(cx, |this, cx| {
3320 if let Some(conflict_set) = &this.conflict_set {
3321 conflict_set
3322 .update(cx, |conflict_set, cx| {
3323 conflict_set.set_snapshot(snapshot, changed_range, cx);
3324 })
3325 .ok();
3326 }
3327 let futures = std::mem::take(&mut this.conflict_updated_futures);
3328 for tx in futures {
3329 tx.send(()).ok();
3330 }
3331 })
3332 }))
3333 }
3334
3335 rx
3336 }
3337
3338 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3339 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3340 }
3341
3342 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3343 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3344 }
3345
3346 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3347 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3348 }
3349
3350 fn handle_base_texts_updated(
3351 &mut self,
3352 buffer: text::BufferSnapshot,
3353 message: proto::UpdateDiffBases,
3354 cx: &mut Context<Self>,
3355 ) {
3356 use proto::update_diff_bases::Mode;
3357
3358 let Some(mode) = Mode::from_i32(message.mode) else {
3359 return;
3360 };
3361
3362 let diff_bases_change = match mode {
3363 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3364 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3365 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3366 Mode::IndexAndHead => DiffBasesChange::SetEach {
3367 index: message.staged_text,
3368 head: message.committed_text,
3369 },
3370 };
3371
3372 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3373 }
3374
3375 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3376 if *self.recalculating_tx.borrow() {
3377 let mut rx = self.recalculating_tx.subscribe();
3378 Some(async move {
3379 loop {
3380 let is_recalculating = rx.recv().await;
3381 if is_recalculating != Some(true) {
3382 break;
3383 }
3384 }
3385 })
3386 } else {
3387 None
3388 }
3389 }
3390
3391 fn diff_bases_changed(
3392 &mut self,
3393 buffer: text::BufferSnapshot,
3394 diff_bases_change: Option<DiffBasesChange>,
3395 cx: &mut Context<Self>,
3396 ) {
3397 match diff_bases_change {
3398 Some(DiffBasesChange::SetIndex(index)) => {
3399 self.index_text = index.map(|mut index| {
3400 text::LineEnding::normalize(&mut index);
3401 Arc::from(index.as_str())
3402 });
3403 self.index_changed = true;
3404 }
3405 Some(DiffBasesChange::SetHead(head)) => {
3406 self.head_text = head.map(|mut head| {
3407 text::LineEnding::normalize(&mut head);
3408 Arc::from(head.as_str())
3409 });
3410 self.head_changed = true;
3411 }
3412 Some(DiffBasesChange::SetBoth(text)) => {
3413 let text = text.map(|mut text| {
3414 text::LineEnding::normalize(&mut text);
3415 Arc::from(text.as_str())
3416 });
3417 self.head_text = text.clone();
3418 self.index_text = text;
3419 self.head_changed = true;
3420 self.index_changed = true;
3421 }
3422 Some(DiffBasesChange::SetEach { index, head }) => {
3423 self.index_text = index.map(|mut index| {
3424 text::LineEnding::normalize(&mut index);
3425 Arc::from(index.as_str())
3426 });
3427 self.index_changed = true;
3428 self.head_text = head.map(|mut head| {
3429 text::LineEnding::normalize(&mut head);
3430 Arc::from(head.as_str())
3431 });
3432 self.head_changed = true;
3433 }
3434 None => {}
3435 }
3436
3437 self.recalculate_diffs(buffer, cx)
3438 }
3439
3440 #[ztracing::instrument(skip_all)]
3441 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3442 *self.recalculating_tx.borrow_mut() = true;
3443
3444 let language = self.language.clone();
3445 let language_registry = self.language_registry.clone();
3446 let unstaged_diff = self.unstaged_diff();
3447 let uncommitted_diff = self.uncommitted_diff();
3448 let head = self.head_text.clone();
3449 let index = self.index_text.clone();
3450 let index_changed = self.index_changed;
3451 let head_changed = self.head_changed;
3452 let language_changed = self.language_changed;
3453 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3454 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3455 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3456 (None, None) => true,
3457 _ => false,
3458 };
3459
3460 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3461 .oid_diffs
3462 .iter()
3463 .filter_map(|(oid, weak)| {
3464 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3465 weak.upgrade().map(|diff| (*oid, diff, base_text))
3466 })
3467 .collect();
3468
3469 self.oid_diffs.retain(|oid, weak| {
3470 let alive = weak.upgrade().is_some();
3471 if !alive {
3472 if let Some(oid) = oid {
3473 self.oid_texts.remove(oid);
3474 }
3475 }
3476 alive
3477 });
3478 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3479 log::debug!(
3480 "start recalculating diffs for buffer {}",
3481 buffer.remote_id()
3482 );
3483
3484 let mut new_unstaged_diff = None;
3485 if let Some(unstaged_diff) = &unstaged_diff {
3486 new_unstaged_diff = Some(
3487 cx.update(|cx| {
3488 unstaged_diff.read(cx).update_diff(
3489 buffer.clone(),
3490 index,
3491 index_changed.then_some(false),
3492 language.clone(),
3493 cx,
3494 )
3495 })
3496 .await,
3497 );
3498 }
3499
3500 // Dropping BufferDiff can be expensive, so yield back to the event loop
3501 // for a bit
3502 yield_now().await;
3503
3504 let mut new_uncommitted_diff = None;
3505 if let Some(uncommitted_diff) = &uncommitted_diff {
3506 new_uncommitted_diff = if index_matches_head {
3507 new_unstaged_diff.clone()
3508 } else {
3509 Some(
3510 cx.update(|cx| {
3511 uncommitted_diff.read(cx).update_diff(
3512 buffer.clone(),
3513 head,
3514 head_changed.then_some(true),
3515 language.clone(),
3516 cx,
3517 )
3518 })
3519 .await,
3520 )
3521 }
3522 }
3523
3524 // Dropping BufferDiff can be expensive, so yield back to the event loop
3525 // for a bit
3526 yield_now().await;
3527
3528 let cancel = this.update(cx, |this, _| {
3529 // This checks whether all pending stage/unstage operations
3530 // have quiesced (i.e. both the corresponding write and the
3531 // read of that write have completed). If not, then we cancel
3532 // this recalculation attempt to avoid invalidating pending
3533 // state too quickly; another recalculation will come along
3534 // later and clear the pending state once the state of the index has settled.
3535 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3536 *this.recalculating_tx.borrow_mut() = false;
3537 true
3538 } else {
3539 false
3540 }
3541 })?;
3542 if cancel {
3543 log::debug!(
3544 concat!(
3545 "aborting recalculating diffs for buffer {}",
3546 "due to subsequent hunk operations",
3547 ),
3548 buffer.remote_id()
3549 );
3550 return Ok(());
3551 }
3552
3553 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3554 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3555 {
3556 let task = unstaged_diff.update(cx, |diff, cx| {
3557 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3558 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3559 // view multibuffers.
3560 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3561 });
3562 Some(task.await)
3563 } else {
3564 None
3565 };
3566
3567 yield_now().await;
3568
3569 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3570 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3571 {
3572 uncommitted_diff
3573 .update(cx, |diff, cx| {
3574 if language_changed {
3575 diff.language_changed(language.clone(), language_registry, cx);
3576 }
3577 diff.set_snapshot_with_secondary(
3578 new_uncommitted_diff,
3579 &buffer,
3580 unstaged_changed_range.flatten(),
3581 true,
3582 cx,
3583 )
3584 })
3585 .await;
3586 }
3587
3588 yield_now().await;
3589
3590 for (oid, oid_diff, base_text) in oid_diffs {
3591 let new_oid_diff = cx
3592 .update(|cx| {
3593 oid_diff.read(cx).update_diff(
3594 buffer.clone(),
3595 base_text,
3596 None,
3597 language.clone(),
3598 cx,
3599 )
3600 })
3601 .await;
3602
3603 oid_diff
3604 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3605 .await;
3606
3607 log::debug!(
3608 "finished recalculating oid diff for buffer {} oid {:?}",
3609 buffer.remote_id(),
3610 oid
3611 );
3612
3613 yield_now().await;
3614 }
3615
3616 log::debug!(
3617 "finished recalculating diffs for buffer {}",
3618 buffer.remote_id()
3619 );
3620
3621 if let Some(this) = this.upgrade() {
3622 this.update(cx, |this, _| {
3623 this.index_changed = false;
3624 this.head_changed = false;
3625 this.language_changed = false;
3626 *this.recalculating_tx.borrow_mut() = false;
3627 });
3628 }
3629
3630 Ok(())
3631 }));
3632 }
3633}
3634
3635fn make_remote_delegate(
3636 this: Entity<GitStore>,
3637 project_id: u64,
3638 repository_id: RepositoryId,
3639 askpass_id: u64,
3640 cx: &mut AsyncApp,
3641) -> AskPassDelegate {
3642 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3643 this.update(cx, |this, cx| {
3644 let Some((client, _)) = this.downstream_client() else {
3645 return;
3646 };
3647 let response = client.request(proto::AskPassRequest {
3648 project_id,
3649 repository_id: repository_id.to_proto(),
3650 askpass_id,
3651 prompt,
3652 });
3653 cx.spawn(async move |_, _| {
3654 let mut response = response.await?.response;
3655 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3656 .ok();
3657 response.zeroize();
3658 anyhow::Ok(())
3659 })
3660 .detach_and_log_err(cx);
3661 });
3662 })
3663}
3664
3665impl RepositoryId {
3666 pub fn to_proto(self) -> u64 {
3667 self.0
3668 }
3669
3670 pub fn from_proto(id: u64) -> Self {
3671 RepositoryId(id)
3672 }
3673}
3674
3675impl RepositorySnapshot {
3676 fn empty(
3677 id: RepositoryId,
3678 work_directory_abs_path: Arc<Path>,
3679 original_repo_abs_path: Option<Arc<Path>>,
3680 path_style: PathStyle,
3681 ) -> Self {
3682 Self {
3683 id,
3684 statuses_by_path: Default::default(),
3685 original_repo_abs_path: original_repo_abs_path
3686 .unwrap_or_else(|| work_directory_abs_path.clone()),
3687 work_directory_abs_path,
3688 branch: None,
3689 head_commit: None,
3690 scan_id: 0,
3691 merge: Default::default(),
3692 remote_origin_url: None,
3693 remote_upstream_url: None,
3694 stash_entries: Default::default(),
3695 linked_worktrees: Arc::from([]),
3696 path_style,
3697 }
3698 }
3699
3700 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3701 proto::UpdateRepository {
3702 branch_summary: self.branch.as_ref().map(branch_to_proto),
3703 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3704 updated_statuses: self
3705 .statuses_by_path
3706 .iter()
3707 .map(|entry| entry.to_proto())
3708 .collect(),
3709 removed_statuses: Default::default(),
3710 current_merge_conflicts: self
3711 .merge
3712 .merge_heads_by_conflicted_path
3713 .iter()
3714 .map(|(repo_path, _)| repo_path.to_proto())
3715 .collect(),
3716 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3717 project_id,
3718 id: self.id.to_proto(),
3719 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3720 entry_ids: vec![self.id.to_proto()],
3721 scan_id: self.scan_id,
3722 is_last_update: true,
3723 stash_entries: self
3724 .stash_entries
3725 .entries
3726 .iter()
3727 .map(stash_to_proto)
3728 .collect(),
3729 remote_upstream_url: self.remote_upstream_url.clone(),
3730 remote_origin_url: self.remote_origin_url.clone(),
3731 original_repo_abs_path: Some(
3732 self.original_repo_abs_path.to_string_lossy().into_owned(),
3733 ),
3734 linked_worktrees: self
3735 .linked_worktrees
3736 .iter()
3737 .map(worktree_to_proto)
3738 .collect(),
3739 }
3740 }
3741
3742 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3743 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3744 let mut removed_statuses: Vec<String> = Vec::new();
3745
3746 let mut new_statuses = self.statuses_by_path.iter().peekable();
3747 let mut old_statuses = old.statuses_by_path.iter().peekable();
3748
3749 let mut current_new_entry = new_statuses.next();
3750 let mut current_old_entry = old_statuses.next();
3751 loop {
3752 match (current_new_entry, current_old_entry) {
3753 (Some(new_entry), Some(old_entry)) => {
3754 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3755 Ordering::Less => {
3756 updated_statuses.push(new_entry.to_proto());
3757 current_new_entry = new_statuses.next();
3758 }
3759 Ordering::Equal => {
3760 if new_entry.status != old_entry.status
3761 || new_entry.diff_stat != old_entry.diff_stat
3762 {
3763 updated_statuses.push(new_entry.to_proto());
3764 }
3765 current_old_entry = old_statuses.next();
3766 current_new_entry = new_statuses.next();
3767 }
3768 Ordering::Greater => {
3769 removed_statuses.push(old_entry.repo_path.to_proto());
3770 current_old_entry = old_statuses.next();
3771 }
3772 }
3773 }
3774 (None, Some(old_entry)) => {
3775 removed_statuses.push(old_entry.repo_path.to_proto());
3776 current_old_entry = old_statuses.next();
3777 }
3778 (Some(new_entry), None) => {
3779 updated_statuses.push(new_entry.to_proto());
3780 current_new_entry = new_statuses.next();
3781 }
3782 (None, None) => break,
3783 }
3784 }
3785
3786 proto::UpdateRepository {
3787 branch_summary: self.branch.as_ref().map(branch_to_proto),
3788 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3789 updated_statuses,
3790 removed_statuses,
3791 current_merge_conflicts: self
3792 .merge
3793 .merge_heads_by_conflicted_path
3794 .iter()
3795 .map(|(path, _)| path.to_proto())
3796 .collect(),
3797 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3798 project_id,
3799 id: self.id.to_proto(),
3800 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3801 entry_ids: vec![],
3802 scan_id: self.scan_id,
3803 is_last_update: true,
3804 stash_entries: self
3805 .stash_entries
3806 .entries
3807 .iter()
3808 .map(stash_to_proto)
3809 .collect(),
3810 remote_upstream_url: self.remote_upstream_url.clone(),
3811 remote_origin_url: self.remote_origin_url.clone(),
3812 original_repo_abs_path: Some(
3813 self.original_repo_abs_path.to_string_lossy().into_owned(),
3814 ),
3815 linked_worktrees: self
3816 .linked_worktrees
3817 .iter()
3818 .map(worktree_to_proto)
3819 .collect(),
3820 }
3821 }
3822
3823 /// The main worktree is the original checkout that other worktrees were
3824 /// created from.
3825 ///
3826 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
3827 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
3828 ///
3829 /// Submodules also return `true` here, since they are not linked worktrees.
3830 pub fn is_main_worktree(&self) -> bool {
3831 self.work_directory_abs_path == self.original_repo_abs_path
3832 }
3833
3834 /// Returns true if this repository is a linked worktree, that is, one that
3835 /// was created from another worktree.
3836 ///
3837 /// Returns `false` for both the main worktree and submodules.
3838 pub fn is_linked_worktree(&self) -> bool {
3839 !self.is_main_worktree()
3840 }
3841
3842 pub fn linked_worktrees(&self) -> &[GitWorktree] {
3843 &self.linked_worktrees
3844 }
3845
3846 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3847 self.statuses_by_path.iter().cloned()
3848 }
3849
3850 pub fn status_summary(&self) -> GitSummary {
3851 self.statuses_by_path.summary().item_summary
3852 }
3853
3854 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3855 self.statuses_by_path
3856 .get(&PathKey(path.as_ref().clone()), ())
3857 .cloned()
3858 }
3859
3860 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
3861 self.statuses_by_path
3862 .get(&PathKey(path.as_ref().clone()), ())
3863 .and_then(|entry| entry.diff_stat)
3864 }
3865
3866 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3867 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3868 }
3869
3870 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3871 self.path_style
3872 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3873 .unwrap()
3874 .into()
3875 }
3876
3877 #[inline]
3878 fn abs_path_to_repo_path_inner(
3879 work_directory_abs_path: &Path,
3880 abs_path: &Path,
3881 path_style: PathStyle,
3882 ) -> Option<RepoPath> {
3883 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3884 Some(RepoPath::from_rel_path(&rel_path))
3885 }
3886
3887 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3888 self.merge
3889 .merge_heads_by_conflicted_path
3890 .contains_key(repo_path)
3891 }
3892
3893 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3894 let had_conflict_on_last_merge_head_change = self
3895 .merge
3896 .merge_heads_by_conflicted_path
3897 .contains_key(repo_path);
3898 let has_conflict_currently = self
3899 .status_for_path(repo_path)
3900 .is_some_and(|entry| entry.status.is_conflicted());
3901 had_conflict_on_last_merge_head_change || has_conflict_currently
3902 }
3903
3904 /// This is the name that will be displayed in the repository selector for this repository.
3905 pub fn display_name(&self) -> SharedString {
3906 self.work_directory_abs_path
3907 .file_name()
3908 .unwrap_or_default()
3909 .to_string_lossy()
3910 .to_string()
3911 .into()
3912 }
3913}
3914
3915pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3916 proto::StashEntry {
3917 oid: entry.oid.as_bytes().to_vec(),
3918 message: entry.message.clone(),
3919 branch: entry.branch.clone(),
3920 index: entry.index as u64,
3921 timestamp: entry.timestamp,
3922 }
3923}
3924
3925pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3926 Ok(StashEntry {
3927 oid: Oid::from_bytes(&entry.oid)?,
3928 message: entry.message.clone(),
3929 index: entry.index as usize,
3930 branch: entry.branch.clone(),
3931 timestamp: entry.timestamp,
3932 })
3933}
3934
3935impl MergeDetails {
3936 async fn update(
3937 &mut self,
3938 backend: &Arc<dyn GitRepository>,
3939 current_conflicted_paths: Vec<RepoPath>,
3940 ) -> Result<bool> {
3941 log::debug!("load merge details");
3942 self.message = backend.merge_message().await.map(SharedString::from);
3943 let heads = backend
3944 .revparse_batch(vec![
3945 "MERGE_HEAD".into(),
3946 "CHERRY_PICK_HEAD".into(),
3947 "REBASE_HEAD".into(),
3948 "REVERT_HEAD".into(),
3949 "APPLY_HEAD".into(),
3950 ])
3951 .await
3952 .log_err()
3953 .unwrap_or_default()
3954 .into_iter()
3955 .map(|opt| opt.map(SharedString::from))
3956 .collect::<Vec<_>>();
3957
3958 let mut conflicts_changed = false;
3959
3960 // Record the merge state for newly conflicted paths
3961 for path in ¤t_conflicted_paths {
3962 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3963 conflicts_changed = true;
3964 self.merge_heads_by_conflicted_path
3965 .insert(path.clone(), heads.clone());
3966 }
3967 }
3968
3969 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3970 self.merge_heads_by_conflicted_path
3971 .retain(|path, old_merge_heads| {
3972 let keep = current_conflicted_paths.contains(path)
3973 || (old_merge_heads == &heads
3974 && old_merge_heads.iter().any(|head| head.is_some()));
3975 if !keep {
3976 conflicts_changed = true;
3977 }
3978 keep
3979 });
3980
3981 Ok(conflicts_changed)
3982 }
3983}
3984
3985impl Repository {
3986 pub fn is_trusted(&self) -> bool {
3987 match self.repository_state.peek() {
3988 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3989 _ => false,
3990 }
3991 }
3992
3993 pub fn snapshot(&self) -> RepositorySnapshot {
3994 self.snapshot.clone()
3995 }
3996
3997 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3998 self.pending_ops.iter().cloned()
3999 }
4000
4001 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4002 self.pending_ops.summary().clone()
4003 }
4004
4005 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4006 self.pending_ops
4007 .get(&PathKey(path.as_ref().clone()), ())
4008 .cloned()
4009 }
4010
4011 fn local(
4012 id: RepositoryId,
4013 work_directory_abs_path: Arc<Path>,
4014 original_repo_abs_path: Arc<Path>,
4015 dot_git_abs_path: Arc<Path>,
4016 project_environment: WeakEntity<ProjectEnvironment>,
4017 fs: Arc<dyn Fs>,
4018 is_trusted: bool,
4019 git_store: WeakEntity<GitStore>,
4020 cx: &mut Context<Self>,
4021 ) -> Self {
4022 let snapshot = RepositorySnapshot::empty(
4023 id,
4024 work_directory_abs_path.clone(),
4025 Some(original_repo_abs_path),
4026 PathStyle::local(),
4027 );
4028 let state = cx
4029 .spawn(async move |_, cx| {
4030 LocalRepositoryState::new(
4031 work_directory_abs_path,
4032 dot_git_abs_path,
4033 project_environment,
4034 fs,
4035 is_trusted,
4036 cx,
4037 )
4038 .await
4039 .map_err(|err| err.to_string())
4040 })
4041 .shared();
4042 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4043 let state = cx
4044 .spawn(async move |_, _| {
4045 let state = state.await?;
4046 Ok(RepositoryState::Local(state))
4047 })
4048 .shared();
4049
4050 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4051 RepositoryEvent::BranchChanged => {
4052 if this.scan_id > 1 {
4053 this.initial_graph_data.clear();
4054 }
4055 }
4056 _ => {}
4057 })
4058 .detach();
4059
4060 Repository {
4061 this: cx.weak_entity(),
4062 git_store,
4063 snapshot,
4064 pending_ops: Default::default(),
4065 repository_state: state,
4066 commit_message_buffer: None,
4067 askpass_delegates: Default::default(),
4068 paths_needing_status_update: Default::default(),
4069 latest_askpass_id: 0,
4070 job_sender,
4071 job_id: 0,
4072 active_jobs: Default::default(),
4073 initial_graph_data: Default::default(),
4074 commit_data: Default::default(),
4075 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4076 }
4077 }
4078
4079 fn remote(
4080 id: RepositoryId,
4081 work_directory_abs_path: Arc<Path>,
4082 original_repo_abs_path: Option<Arc<Path>>,
4083 path_style: PathStyle,
4084 project_id: ProjectId,
4085 client: AnyProtoClient,
4086 git_store: WeakEntity<GitStore>,
4087 cx: &mut Context<Self>,
4088 ) -> Self {
4089 let snapshot = RepositorySnapshot::empty(
4090 id,
4091 work_directory_abs_path,
4092 original_repo_abs_path,
4093 path_style,
4094 );
4095 let repository_state = RemoteRepositoryState { project_id, client };
4096 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4097 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4098 Self {
4099 this: cx.weak_entity(),
4100 snapshot,
4101 commit_message_buffer: None,
4102 git_store,
4103 pending_ops: Default::default(),
4104 paths_needing_status_update: Default::default(),
4105 job_sender,
4106 repository_state,
4107 askpass_delegates: Default::default(),
4108 latest_askpass_id: 0,
4109 active_jobs: Default::default(),
4110 job_id: 0,
4111 initial_graph_data: Default::default(),
4112 commit_data: Default::default(),
4113 graph_commit_data_handler: GraphCommitHandlerState::Closed,
4114 }
4115 }
4116
4117 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4118 self.git_store.upgrade()
4119 }
4120
4121 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4122 let this = cx.weak_entity();
4123 let git_store = self.git_store.clone();
4124 let _ = self.send_keyed_job(
4125 Some(GitJobKey::ReloadBufferDiffBases),
4126 None,
4127 |state, mut cx| async move {
4128 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4129 log::error!("tried to recompute diffs for a non-local repository");
4130 return Ok(());
4131 };
4132
4133 let Some(this) = this.upgrade() else {
4134 return Ok(());
4135 };
4136
4137 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4138 git_store.update(cx, |git_store, cx| {
4139 git_store
4140 .diffs
4141 .iter()
4142 .filter_map(|(buffer_id, diff_state)| {
4143 let buffer_store = git_store.buffer_store.read(cx);
4144 let buffer = buffer_store.get(*buffer_id)?;
4145 let file = File::from_dyn(buffer.read(cx).file())?;
4146 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4147 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4148 log::debug!(
4149 "start reload diff bases for repo path {}",
4150 repo_path.as_unix_str()
4151 );
4152 diff_state.update(cx, |diff_state, _| {
4153 let has_unstaged_diff = diff_state
4154 .unstaged_diff
4155 .as_ref()
4156 .is_some_and(|diff| diff.is_upgradable());
4157 let has_uncommitted_diff = diff_state
4158 .uncommitted_diff
4159 .as_ref()
4160 .is_some_and(|set| set.is_upgradable());
4161
4162 Some((
4163 buffer,
4164 repo_path,
4165 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4166 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4167 ))
4168 })
4169 })
4170 .collect::<Vec<_>>()
4171 })
4172 })?;
4173
4174 let buffer_diff_base_changes = cx
4175 .background_spawn(async move {
4176 let mut changes = Vec::new();
4177 for (buffer, repo_path, current_index_text, current_head_text) in
4178 &repo_diff_state_updates
4179 {
4180 let index_text = if current_index_text.is_some() {
4181 backend.load_index_text(repo_path.clone()).await
4182 } else {
4183 None
4184 };
4185 let head_text = if current_head_text.is_some() {
4186 backend.load_committed_text(repo_path.clone()).await
4187 } else {
4188 None
4189 };
4190
4191 let change =
4192 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4193 (Some(current_index), Some(current_head)) => {
4194 let index_changed =
4195 index_text.as_deref() != current_index.as_deref();
4196 let head_changed =
4197 head_text.as_deref() != current_head.as_deref();
4198 if index_changed && head_changed {
4199 if index_text == head_text {
4200 Some(DiffBasesChange::SetBoth(head_text))
4201 } else {
4202 Some(DiffBasesChange::SetEach {
4203 index: index_text,
4204 head: head_text,
4205 })
4206 }
4207 } else if index_changed {
4208 Some(DiffBasesChange::SetIndex(index_text))
4209 } else if head_changed {
4210 Some(DiffBasesChange::SetHead(head_text))
4211 } else {
4212 None
4213 }
4214 }
4215 (Some(current_index), None) => {
4216 let index_changed =
4217 index_text.as_deref() != current_index.as_deref();
4218 index_changed
4219 .then_some(DiffBasesChange::SetIndex(index_text))
4220 }
4221 (None, Some(current_head)) => {
4222 let head_changed =
4223 head_text.as_deref() != current_head.as_deref();
4224 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4225 }
4226 (None, None) => None,
4227 };
4228
4229 changes.push((buffer.clone(), change))
4230 }
4231 changes
4232 })
4233 .await;
4234
4235 git_store.update(&mut cx, |git_store, cx| {
4236 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4237 let buffer_snapshot = buffer.read(cx).text_snapshot();
4238 let buffer_id = buffer_snapshot.remote_id();
4239 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4240 continue;
4241 };
4242
4243 let downstream_client = git_store.downstream_client();
4244 diff_state.update(cx, |diff_state, cx| {
4245 use proto::update_diff_bases::Mode;
4246
4247 if let Some((diff_bases_change, (client, project_id))) =
4248 diff_bases_change.clone().zip(downstream_client)
4249 {
4250 let (staged_text, committed_text, mode) = match diff_bases_change {
4251 DiffBasesChange::SetIndex(index) => {
4252 (index, None, Mode::IndexOnly)
4253 }
4254 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4255 DiffBasesChange::SetEach { index, head } => {
4256 (index, head, Mode::IndexAndHead)
4257 }
4258 DiffBasesChange::SetBoth(text) => {
4259 (None, text, Mode::IndexMatchesHead)
4260 }
4261 };
4262 client
4263 .send(proto::UpdateDiffBases {
4264 project_id: project_id.to_proto(),
4265 buffer_id: buffer_id.to_proto(),
4266 staged_text,
4267 committed_text,
4268 mode: mode as i32,
4269 })
4270 .log_err();
4271 }
4272
4273 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4274 });
4275 }
4276 })
4277 },
4278 );
4279 }
4280
4281 pub fn send_job<F, Fut, R>(
4282 &mut self,
4283 status: Option<SharedString>,
4284 job: F,
4285 ) -> oneshot::Receiver<R>
4286 where
4287 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4288 Fut: Future<Output = R> + 'static,
4289 R: Send + 'static,
4290 {
4291 self.send_keyed_job(None, status, job)
4292 }
4293
4294 fn send_keyed_job<F, Fut, R>(
4295 &mut self,
4296 key: Option<GitJobKey>,
4297 status: Option<SharedString>,
4298 job: F,
4299 ) -> oneshot::Receiver<R>
4300 where
4301 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4302 Fut: Future<Output = R> + 'static,
4303 R: Send + 'static,
4304 {
4305 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4306 let job_id = post_inc(&mut self.job_id);
4307 let this = self.this.clone();
4308 self.job_sender
4309 .unbounded_send(GitJob {
4310 key,
4311 job: Box::new(move |state, cx: &mut AsyncApp| {
4312 let job = job(state, cx.clone());
4313 cx.spawn(async move |cx| {
4314 if let Some(s) = status.clone() {
4315 this.update(cx, |this, cx| {
4316 this.active_jobs.insert(
4317 job_id,
4318 JobInfo {
4319 start: Instant::now(),
4320 message: s.clone(),
4321 },
4322 );
4323
4324 cx.notify();
4325 })
4326 .ok();
4327 }
4328 let result = job.await;
4329
4330 this.update(cx, |this, cx| {
4331 this.active_jobs.remove(&job_id);
4332 cx.notify();
4333 })
4334 .ok();
4335
4336 result_tx.send(result).ok();
4337 })
4338 }),
4339 })
4340 .ok();
4341 result_rx
4342 }
4343
4344 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4345 let Some(git_store) = self.git_store.upgrade() else {
4346 return;
4347 };
4348 let entity = cx.entity();
4349 git_store.update(cx, |git_store, cx| {
4350 let Some((&id, _)) = git_store
4351 .repositories
4352 .iter()
4353 .find(|(_, handle)| *handle == &entity)
4354 else {
4355 return;
4356 };
4357 git_store.active_repo_id = Some(id);
4358 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4359 });
4360 }
4361
4362 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4363 self.snapshot.status()
4364 }
4365
4366 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4367 self.snapshot.diff_stat_for_path(path)
4368 }
4369
4370 pub fn cached_stash(&self) -> GitStash {
4371 self.snapshot.stash_entries.clone()
4372 }
4373
4374 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4375 let git_store = self.git_store.upgrade()?;
4376 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4377 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4378 let abs_path = SanitizedPath::new(&abs_path);
4379 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4380 Some(ProjectPath {
4381 worktree_id: worktree.read(cx).id(),
4382 path: relative_path,
4383 })
4384 }
4385
4386 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4387 let git_store = self.git_store.upgrade()?;
4388 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4389 let abs_path = worktree_store.absolutize(path, cx)?;
4390 self.snapshot.abs_path_to_repo_path(&abs_path)
4391 }
4392
4393 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4394 other
4395 .read(cx)
4396 .snapshot
4397 .work_directory_abs_path
4398 .starts_with(&self.snapshot.work_directory_abs_path)
4399 }
4400
4401 pub fn open_commit_buffer(
4402 &mut self,
4403 languages: Option<Arc<LanguageRegistry>>,
4404 buffer_store: Entity<BufferStore>,
4405 cx: &mut Context<Self>,
4406 ) -> Task<Result<Entity<Buffer>>> {
4407 let id = self.id;
4408 if let Some(buffer) = self.commit_message_buffer.clone() {
4409 return Task::ready(Ok(buffer));
4410 }
4411 let this = cx.weak_entity();
4412
4413 let rx = self.send_job(None, move |state, mut cx| async move {
4414 let Some(this) = this.upgrade() else {
4415 bail!("git store was dropped");
4416 };
4417 match state {
4418 RepositoryState::Local(..) => {
4419 this.update(&mut cx, |_, cx| {
4420 Self::open_local_commit_buffer(languages, buffer_store, cx)
4421 })
4422 .await
4423 }
4424 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4425 let request = client.request(proto::OpenCommitMessageBuffer {
4426 project_id: project_id.0,
4427 repository_id: id.to_proto(),
4428 });
4429 let response = request.await.context("requesting to open commit buffer")?;
4430 let buffer_id = BufferId::new(response.buffer_id)?;
4431 let buffer = buffer_store
4432 .update(&mut cx, |buffer_store, cx| {
4433 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4434 })
4435 .await?;
4436 if let Some(language_registry) = languages {
4437 let git_commit_language =
4438 language_registry.language_for_name("Git Commit").await?;
4439 buffer.update(&mut cx, |buffer, cx| {
4440 buffer.set_language(Some(git_commit_language), cx);
4441 });
4442 }
4443 this.update(&mut cx, |this, _| {
4444 this.commit_message_buffer = Some(buffer.clone());
4445 });
4446 Ok(buffer)
4447 }
4448 }
4449 });
4450
4451 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4452 }
4453
4454 fn open_local_commit_buffer(
4455 language_registry: Option<Arc<LanguageRegistry>>,
4456 buffer_store: Entity<BufferStore>,
4457 cx: &mut Context<Self>,
4458 ) -> Task<Result<Entity<Buffer>>> {
4459 cx.spawn(async move |repository, cx| {
4460 let git_commit_language = match language_registry {
4461 Some(language_registry) => {
4462 Some(language_registry.language_for_name("Git Commit").await?)
4463 }
4464 None => None,
4465 };
4466 let buffer = buffer_store
4467 .update(cx, |buffer_store, cx| {
4468 buffer_store.create_buffer(git_commit_language, false, cx)
4469 })
4470 .await?;
4471
4472 repository.update(cx, |repository, _| {
4473 repository.commit_message_buffer = Some(buffer.clone());
4474 })?;
4475 Ok(buffer)
4476 })
4477 }
4478
4479 pub fn checkout_files(
4480 &mut self,
4481 commit: &str,
4482 paths: Vec<RepoPath>,
4483 cx: &mut Context<Self>,
4484 ) -> Task<Result<()>> {
4485 let commit = commit.to_string();
4486 let id = self.id;
4487
4488 self.spawn_job_with_tracking(
4489 paths.clone(),
4490 pending_op::GitStatus::Reverted,
4491 cx,
4492 async move |this, cx| {
4493 this.update(cx, |this, _cx| {
4494 this.send_job(
4495 Some(format!("git checkout {}", commit).into()),
4496 move |git_repo, _| async move {
4497 match git_repo {
4498 RepositoryState::Local(LocalRepositoryState {
4499 backend,
4500 environment,
4501 ..
4502 }) => {
4503 backend
4504 .checkout_files(commit, paths, environment.clone())
4505 .await
4506 }
4507 RepositoryState::Remote(RemoteRepositoryState {
4508 project_id,
4509 client,
4510 }) => {
4511 client
4512 .request(proto::GitCheckoutFiles {
4513 project_id: project_id.0,
4514 repository_id: id.to_proto(),
4515 commit,
4516 paths: paths
4517 .into_iter()
4518 .map(|p| p.to_proto())
4519 .collect(),
4520 })
4521 .await?;
4522
4523 Ok(())
4524 }
4525 }
4526 },
4527 )
4528 })?
4529 .await?
4530 },
4531 )
4532 }
4533
4534 pub fn reset(
4535 &mut self,
4536 commit: String,
4537 reset_mode: ResetMode,
4538 _cx: &mut App,
4539 ) -> oneshot::Receiver<Result<()>> {
4540 let id = self.id;
4541
4542 self.send_job(None, move |git_repo, _| async move {
4543 match git_repo {
4544 RepositoryState::Local(LocalRepositoryState {
4545 backend,
4546 environment,
4547 ..
4548 }) => backend.reset(commit, reset_mode, environment).await,
4549 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4550 client
4551 .request(proto::GitReset {
4552 project_id: project_id.0,
4553 repository_id: id.to_proto(),
4554 commit,
4555 mode: match reset_mode {
4556 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4557 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4558 },
4559 })
4560 .await?;
4561
4562 Ok(())
4563 }
4564 }
4565 })
4566 }
4567
4568 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4569 let id = self.id;
4570 self.send_job(None, move |git_repo, _cx| async move {
4571 match git_repo {
4572 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4573 backend.show(commit).await
4574 }
4575 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4576 let resp = client
4577 .request(proto::GitShow {
4578 project_id: project_id.0,
4579 repository_id: id.to_proto(),
4580 commit,
4581 })
4582 .await?;
4583
4584 Ok(CommitDetails {
4585 sha: resp.sha.into(),
4586 message: resp.message.into(),
4587 commit_timestamp: resp.commit_timestamp,
4588 author_email: resp.author_email.into(),
4589 author_name: resp.author_name.into(),
4590 })
4591 }
4592 }
4593 })
4594 }
4595
4596 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4597 let id = self.id;
4598 self.send_job(None, move |git_repo, cx| async move {
4599 match git_repo {
4600 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4601 backend.load_commit(commit, cx).await
4602 }
4603 RepositoryState::Remote(RemoteRepositoryState {
4604 client, project_id, ..
4605 }) => {
4606 let response = client
4607 .request(proto::LoadCommitDiff {
4608 project_id: project_id.0,
4609 repository_id: id.to_proto(),
4610 commit,
4611 })
4612 .await?;
4613 Ok(CommitDiff {
4614 files: response
4615 .files
4616 .into_iter()
4617 .map(|file| {
4618 Ok(CommitFile {
4619 path: RepoPath::from_proto(&file.path)?,
4620 old_text: file.old_text,
4621 new_text: file.new_text,
4622 is_binary: file.is_binary,
4623 })
4624 })
4625 .collect::<Result<Vec<_>>>()?,
4626 })
4627 }
4628 }
4629 })
4630 }
4631
4632 pub fn file_history(
4633 &mut self,
4634 path: RepoPath,
4635 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4636 self.file_history_paginated(path, 0, None)
4637 }
4638
4639 pub fn file_history_paginated(
4640 &mut self,
4641 path: RepoPath,
4642 skip: usize,
4643 limit: Option<usize>,
4644 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4645 let id = self.id;
4646 self.send_job(None, move |git_repo, _cx| async move {
4647 match git_repo {
4648 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4649 backend.file_history_paginated(path, skip, limit).await
4650 }
4651 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4652 let response = client
4653 .request(proto::GitFileHistory {
4654 project_id: project_id.0,
4655 repository_id: id.to_proto(),
4656 path: path.to_proto(),
4657 skip: skip as u64,
4658 limit: limit.map(|l| l as u64),
4659 })
4660 .await?;
4661 Ok(git::repository::FileHistory {
4662 entries: response
4663 .entries
4664 .into_iter()
4665 .map(|entry| git::repository::FileHistoryEntry {
4666 sha: entry.sha.into(),
4667 subject: entry.subject.into(),
4668 message: entry.message.into(),
4669 commit_timestamp: entry.commit_timestamp,
4670 author_name: entry.author_name.into(),
4671 author_email: entry.author_email.into(),
4672 })
4673 .collect(),
4674 path: RepoPath::from_proto(&response.path)?,
4675 })
4676 }
4677 }
4678 })
4679 }
4680
4681 pub fn get_graph_data(
4682 &self,
4683 log_source: LogSource,
4684 log_order: LogOrder,
4685 ) -> Option<&InitialGitGraphData> {
4686 self.initial_graph_data.get(&(log_source, log_order))
4687 }
4688
4689 pub fn search_commits(
4690 &mut self,
4691 log_source: LogSource,
4692 search_args: SearchCommitArgs,
4693 request_tx: smol::channel::Sender<Oid>,
4694 cx: &mut Context<Self>,
4695 ) {
4696 let repository_state = self.repository_state.clone();
4697
4698 cx.background_spawn(async move {
4699 let repo_state = repository_state.await;
4700
4701 match repo_state {
4702 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4703 backend
4704 .search_commits(log_source, search_args, request_tx)
4705 .await
4706 .log_err();
4707 }
4708 Ok(RepositoryState::Remote(_)) => {}
4709 Err(_) => {}
4710 };
4711 })
4712 .detach();
4713 }
4714
4715 pub fn graph_data(
4716 &mut self,
4717 log_source: LogSource,
4718 log_order: LogOrder,
4719 range: Range<usize>,
4720 cx: &mut Context<Self>,
4721 ) -> GraphDataResponse<'_> {
4722 let initial_commit_data = self
4723 .initial_graph_data
4724 .entry((log_source.clone(), log_order))
4725 .or_insert_with(|| {
4726 let state = self.repository_state.clone();
4727 let log_source = log_source.clone();
4728
4729 let fetch_task = cx.spawn(async move |repository, cx| {
4730 let state = state.await;
4731 let result = match state {
4732 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4733 Self::local_git_graph_data(
4734 repository.clone(),
4735 backend,
4736 log_source.clone(),
4737 log_order,
4738 cx,
4739 )
4740 .await
4741 }
4742 Ok(RepositoryState::Remote(_)) => {
4743 Err("Git graph is not supported for collab yet".into())
4744 }
4745 Err(e) => Err(SharedString::from(e)),
4746 };
4747
4748 if let Err(fetch_task_error) = result {
4749 repository
4750 .update(cx, |repository, _| {
4751 if let Some(data) = repository
4752 .initial_graph_data
4753 .get_mut(&(log_source, log_order))
4754 {
4755 data.error = Some(fetch_task_error);
4756 } else {
4757 debug_panic!(
4758 "This task would be dropped if this entry doesn't exist"
4759 );
4760 }
4761 })
4762 .ok();
4763 }
4764 });
4765
4766 InitialGitGraphData {
4767 fetch_task,
4768 error: None,
4769 commit_data: Vec::new(),
4770 commit_oid_to_index: HashMap::default(),
4771 }
4772 });
4773
4774 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4775 let max_end = initial_commit_data.commit_data.len();
4776
4777 GraphDataResponse {
4778 commits: &initial_commit_data.commit_data
4779 [range.start.min(max_start)..range.end.min(max_end)],
4780 is_loading: !initial_commit_data.fetch_task.is_ready(),
4781 error: initial_commit_data.error.clone(),
4782 }
4783 }
4784
4785 async fn local_git_graph_data(
4786 this: WeakEntity<Self>,
4787 backend: Arc<dyn GitRepository>,
4788 log_source: LogSource,
4789 log_order: LogOrder,
4790 cx: &mut AsyncApp,
4791 ) -> Result<(), SharedString> {
4792 let (request_tx, request_rx) =
4793 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4794
4795 let task = cx.background_executor().spawn({
4796 let log_source = log_source.clone();
4797 async move {
4798 backend
4799 .initial_graph_data(log_source, log_order, request_tx)
4800 .await
4801 .map_err(|err| SharedString::from(err.to_string()))
4802 }
4803 });
4804
4805 let graph_data_key = (log_source, log_order);
4806
4807 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4808 this.update(cx, |repository, cx| {
4809 let graph_data = repository
4810 .initial_graph_data
4811 .entry(graph_data_key.clone())
4812 .and_modify(|graph_data| {
4813 for commit_data in initial_graph_commit_data {
4814 graph_data
4815 .commit_oid_to_index
4816 .insert(commit_data.sha, graph_data.commit_data.len());
4817 graph_data.commit_data.push(commit_data);
4818 }
4819 cx.emit(RepositoryEvent::GraphEvent(
4820 graph_data_key.clone(),
4821 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4822 ));
4823 });
4824
4825 match &graph_data {
4826 Entry::Occupied(_) => {}
4827 Entry::Vacant(_) => {
4828 debug_panic!("This task should be dropped if data doesn't exist");
4829 }
4830 }
4831 })
4832 .ok();
4833 }
4834
4835 task.await?;
4836 Ok(())
4837 }
4838
4839 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4840 if !self.commit_data.contains_key(&sha) {
4841 match &self.graph_commit_data_handler {
4842 GraphCommitHandlerState::Open(handler) => {
4843 if handler.commit_data_request.try_send(sha).is_ok() {
4844 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4845 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4846 }
4847 }
4848 GraphCommitHandlerState::Closed => {
4849 self.open_graph_commit_data_handler(cx);
4850 }
4851 GraphCommitHandlerState::Starting => {}
4852 }
4853 }
4854
4855 self.commit_data
4856 .get(&sha)
4857 .unwrap_or(&CommitDataState::Loading)
4858 }
4859
4860 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4861 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4862
4863 let state = self.repository_state.clone();
4864 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4865 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4866
4867 let foreground_task = cx.spawn(async move |this, cx| {
4868 while let Ok((sha, commit_data)) = result_rx.recv().await {
4869 let result = this.update(cx, |this, cx| {
4870 let old_value = this
4871 .commit_data
4872 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4873 debug_assert!(
4874 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4875 "We should never overwrite commit data"
4876 );
4877
4878 cx.notify();
4879 });
4880 if result.is_err() {
4881 break;
4882 }
4883 }
4884
4885 this.update(cx, |this, _cx| {
4886 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4887 })
4888 .ok();
4889 });
4890
4891 let request_tx_for_handler = request_tx;
4892 let background_executor = cx.background_executor().clone();
4893
4894 cx.background_spawn(async move {
4895 let backend = match state.await {
4896 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4897 Ok(RepositoryState::Remote(_)) => {
4898 log::error!("commit_data_reader not supported for remote repositories");
4899 return;
4900 }
4901 Err(error) => {
4902 log::error!("failed to get repository state: {error}");
4903 return;
4904 }
4905 };
4906
4907 let reader = match backend.commit_data_reader() {
4908 Ok(reader) => reader,
4909 Err(error) => {
4910 log::error!("failed to create commit data reader: {error:?}");
4911 return;
4912 }
4913 };
4914
4915 loop {
4916 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4917
4918 futures::select_biased! {
4919 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4920 let Ok(sha) = sha else {
4921 break;
4922 };
4923
4924 match reader.read(sha).await {
4925 Ok(commit_data) => {
4926 if result_tx.send((sha, commit_data)).await.is_err() {
4927 break;
4928 }
4929 }
4930 Err(error) => {
4931 log::error!("failed to read commit data for {sha}: {error:?}");
4932 }
4933 }
4934 }
4935 _ = futures::FutureExt::fuse(timeout) => {
4936 break;
4937 }
4938 }
4939 }
4940
4941 drop(result_tx);
4942 })
4943 .detach();
4944
4945 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4946 _task: foreground_task,
4947 commit_data_request: request_tx_for_handler,
4948 });
4949 }
4950
4951 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4952 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4953 }
4954
4955 fn save_buffers<'a>(
4956 &self,
4957 entries: impl IntoIterator<Item = &'a RepoPath>,
4958 cx: &mut Context<Self>,
4959 ) -> Vec<Task<anyhow::Result<()>>> {
4960 let mut save_futures = Vec::new();
4961 if let Some(buffer_store) = self.buffer_store(cx) {
4962 buffer_store.update(cx, |buffer_store, cx| {
4963 for path in entries {
4964 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4965 continue;
4966 };
4967 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4968 && buffer
4969 .read(cx)
4970 .file()
4971 .is_some_and(|file| file.disk_state().exists())
4972 && buffer.read(cx).has_unsaved_edits()
4973 {
4974 save_futures.push(buffer_store.save_buffer(buffer, cx));
4975 }
4976 }
4977 })
4978 }
4979 save_futures
4980 }
4981
4982 pub fn stage_entries(
4983 &mut self,
4984 entries: Vec<RepoPath>,
4985 cx: &mut Context<Self>,
4986 ) -> Task<anyhow::Result<()>> {
4987 self.stage_or_unstage_entries(true, entries, cx)
4988 }
4989
4990 pub fn unstage_entries(
4991 &mut self,
4992 entries: Vec<RepoPath>,
4993 cx: &mut Context<Self>,
4994 ) -> Task<anyhow::Result<()>> {
4995 self.stage_or_unstage_entries(false, entries, cx)
4996 }
4997
4998 fn stage_or_unstage_entries(
4999 &mut self,
5000 stage: bool,
5001 entries: Vec<RepoPath>,
5002 cx: &mut Context<Self>,
5003 ) -> Task<anyhow::Result<()>> {
5004 if entries.is_empty() {
5005 return Task::ready(Ok(()));
5006 }
5007 let Some(git_store) = self.git_store.upgrade() else {
5008 return Task::ready(Ok(()));
5009 };
5010 let id = self.id;
5011 let save_tasks = self.save_buffers(&entries, cx);
5012 let paths = entries
5013 .iter()
5014 .map(|p| p.as_unix_str())
5015 .collect::<Vec<_>>()
5016 .join(" ");
5017 let status = if stage {
5018 format!("git add {paths}")
5019 } else {
5020 format!("git reset {paths}")
5021 };
5022 let job_key = GitJobKey::WriteIndex(entries.clone());
5023
5024 self.spawn_job_with_tracking(
5025 entries.clone(),
5026 if stage {
5027 pending_op::GitStatus::Staged
5028 } else {
5029 pending_op::GitStatus::Unstaged
5030 },
5031 cx,
5032 async move |this, cx| {
5033 for save_task in save_tasks {
5034 save_task.await?;
5035 }
5036
5037 this.update(cx, |this, cx| {
5038 let weak_this = cx.weak_entity();
5039 this.send_keyed_job(
5040 Some(job_key),
5041 Some(status.into()),
5042 move |git_repo, mut cx| async move {
5043 let hunk_staging_operation_counts = weak_this
5044 .update(&mut cx, |this, cx| {
5045 let mut hunk_staging_operation_counts = HashMap::default();
5046 for path in &entries {
5047 let Some(project_path) =
5048 this.repo_path_to_project_path(path, cx)
5049 else {
5050 continue;
5051 };
5052 let Some(buffer) = git_store
5053 .read(cx)
5054 .buffer_store
5055 .read(cx)
5056 .get_by_path(&project_path)
5057 else {
5058 continue;
5059 };
5060 let Some(diff_state) = git_store
5061 .read(cx)
5062 .diffs
5063 .get(&buffer.read(cx).remote_id())
5064 .cloned()
5065 else {
5066 continue;
5067 };
5068 let Some(uncommitted_diff) =
5069 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5070 |uncommitted_diff| uncommitted_diff.upgrade(),
5071 )
5072 else {
5073 continue;
5074 };
5075 let buffer_snapshot = buffer.read(cx).text_snapshot();
5076 let file_exists = buffer
5077 .read(cx)
5078 .file()
5079 .is_some_and(|file| file.disk_state().exists());
5080 let hunk_staging_operation_count =
5081 diff_state.update(cx, |diff_state, cx| {
5082 uncommitted_diff.update(
5083 cx,
5084 |uncommitted_diff, cx| {
5085 uncommitted_diff
5086 .stage_or_unstage_all_hunks(
5087 stage,
5088 &buffer_snapshot,
5089 file_exists,
5090 cx,
5091 );
5092 },
5093 );
5094
5095 diff_state.hunk_staging_operation_count += 1;
5096 diff_state.hunk_staging_operation_count
5097 });
5098 hunk_staging_operation_counts.insert(
5099 diff_state.downgrade(),
5100 hunk_staging_operation_count,
5101 );
5102 }
5103 hunk_staging_operation_counts
5104 })
5105 .unwrap_or_default();
5106
5107 let result = match git_repo {
5108 RepositoryState::Local(LocalRepositoryState {
5109 backend,
5110 environment,
5111 ..
5112 }) => {
5113 if stage {
5114 backend.stage_paths(entries, environment.clone()).await
5115 } else {
5116 backend.unstage_paths(entries, environment.clone()).await
5117 }
5118 }
5119 RepositoryState::Remote(RemoteRepositoryState {
5120 project_id,
5121 client,
5122 }) => {
5123 if stage {
5124 client
5125 .request(proto::Stage {
5126 project_id: project_id.0,
5127 repository_id: id.to_proto(),
5128 paths: entries
5129 .into_iter()
5130 .map(|repo_path| repo_path.to_proto())
5131 .collect(),
5132 })
5133 .await
5134 .context("sending stage request")
5135 .map(|_| ())
5136 } else {
5137 client
5138 .request(proto::Unstage {
5139 project_id: project_id.0,
5140 repository_id: id.to_proto(),
5141 paths: entries
5142 .into_iter()
5143 .map(|repo_path| repo_path.to_proto())
5144 .collect(),
5145 })
5146 .await
5147 .context("sending unstage request")
5148 .map(|_| ())
5149 }
5150 }
5151 };
5152
5153 for (diff_state, hunk_staging_operation_count) in
5154 hunk_staging_operation_counts
5155 {
5156 diff_state
5157 .update(&mut cx, |diff_state, cx| {
5158 if result.is_ok() {
5159 diff_state.hunk_staging_operation_count_as_of_write =
5160 hunk_staging_operation_count;
5161 } else if let Some(uncommitted_diff) =
5162 &diff_state.uncommitted_diff
5163 {
5164 uncommitted_diff
5165 .update(cx, |uncommitted_diff, cx| {
5166 uncommitted_diff.clear_pending_hunks(cx);
5167 })
5168 .ok();
5169 }
5170 })
5171 .ok();
5172 }
5173
5174 result
5175 },
5176 )
5177 })?
5178 .await?
5179 },
5180 )
5181 }
5182
5183 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5184 let snapshot = self.snapshot.clone();
5185 let pending_ops = self.pending_ops.clone();
5186 let to_stage = cx.background_spawn(async move {
5187 snapshot
5188 .status()
5189 .filter_map(|entry| {
5190 if let Some(ops) =
5191 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5192 {
5193 if ops.staging() || ops.staged() {
5194 None
5195 } else {
5196 Some(entry.repo_path)
5197 }
5198 } else if entry.status.staging().is_fully_staged() {
5199 None
5200 } else {
5201 Some(entry.repo_path)
5202 }
5203 })
5204 .collect()
5205 });
5206
5207 cx.spawn(async move |this, cx| {
5208 let to_stage = to_stage.await;
5209 this.update(cx, |this, cx| {
5210 this.stage_or_unstage_entries(true, to_stage, cx)
5211 })?
5212 .await
5213 })
5214 }
5215
5216 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5217 let snapshot = self.snapshot.clone();
5218 let pending_ops = self.pending_ops.clone();
5219 let to_unstage = cx.background_spawn(async move {
5220 snapshot
5221 .status()
5222 .filter_map(|entry| {
5223 if let Some(ops) =
5224 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5225 {
5226 if !ops.staging() && !ops.staged() {
5227 None
5228 } else {
5229 Some(entry.repo_path)
5230 }
5231 } else if entry.status.staging().is_fully_unstaged() {
5232 None
5233 } else {
5234 Some(entry.repo_path)
5235 }
5236 })
5237 .collect()
5238 });
5239
5240 cx.spawn(async move |this, cx| {
5241 let to_unstage = to_unstage.await;
5242 this.update(cx, |this, cx| {
5243 this.stage_or_unstage_entries(false, to_unstage, cx)
5244 })?
5245 .await
5246 })
5247 }
5248
5249 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5250 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5251
5252 self.stash_entries(to_stash, cx)
5253 }
5254
5255 pub fn stash_entries(
5256 &mut self,
5257 entries: Vec<RepoPath>,
5258 cx: &mut Context<Self>,
5259 ) -> Task<anyhow::Result<()>> {
5260 let id = self.id;
5261
5262 cx.spawn(async move |this, cx| {
5263 this.update(cx, |this, _| {
5264 this.send_job(None, move |git_repo, _cx| async move {
5265 match git_repo {
5266 RepositoryState::Local(LocalRepositoryState {
5267 backend,
5268 environment,
5269 ..
5270 }) => backend.stash_paths(entries, environment).await,
5271 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5272 client
5273 .request(proto::Stash {
5274 project_id: project_id.0,
5275 repository_id: id.to_proto(),
5276 paths: entries
5277 .into_iter()
5278 .map(|repo_path| repo_path.to_proto())
5279 .collect(),
5280 })
5281 .await?;
5282 Ok(())
5283 }
5284 }
5285 })
5286 })?
5287 .await??;
5288 Ok(())
5289 })
5290 }
5291
5292 pub fn stash_pop(
5293 &mut self,
5294 index: Option<usize>,
5295 cx: &mut Context<Self>,
5296 ) -> Task<anyhow::Result<()>> {
5297 let id = self.id;
5298 cx.spawn(async move |this, cx| {
5299 this.update(cx, |this, _| {
5300 this.send_job(None, move |git_repo, _cx| async move {
5301 match git_repo {
5302 RepositoryState::Local(LocalRepositoryState {
5303 backend,
5304 environment,
5305 ..
5306 }) => backend.stash_pop(index, environment).await,
5307 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5308 client
5309 .request(proto::StashPop {
5310 project_id: project_id.0,
5311 repository_id: id.to_proto(),
5312 stash_index: index.map(|i| i as u64),
5313 })
5314 .await
5315 .context("sending stash pop request")?;
5316 Ok(())
5317 }
5318 }
5319 })
5320 })?
5321 .await??;
5322 Ok(())
5323 })
5324 }
5325
5326 pub fn stash_apply(
5327 &mut self,
5328 index: Option<usize>,
5329 cx: &mut Context<Self>,
5330 ) -> Task<anyhow::Result<()>> {
5331 let id = self.id;
5332 cx.spawn(async move |this, cx| {
5333 this.update(cx, |this, _| {
5334 this.send_job(None, move |git_repo, _cx| async move {
5335 match git_repo {
5336 RepositoryState::Local(LocalRepositoryState {
5337 backend,
5338 environment,
5339 ..
5340 }) => backend.stash_apply(index, environment).await,
5341 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5342 client
5343 .request(proto::StashApply {
5344 project_id: project_id.0,
5345 repository_id: id.to_proto(),
5346 stash_index: index.map(|i| i as u64),
5347 })
5348 .await
5349 .context("sending stash apply request")?;
5350 Ok(())
5351 }
5352 }
5353 })
5354 })?
5355 .await??;
5356 Ok(())
5357 })
5358 }
5359
5360 pub fn stash_drop(
5361 &mut self,
5362 index: Option<usize>,
5363 cx: &mut Context<Self>,
5364 ) -> oneshot::Receiver<anyhow::Result<()>> {
5365 let id = self.id;
5366 let updates_tx = self
5367 .git_store()
5368 .and_then(|git_store| match &git_store.read(cx).state {
5369 GitStoreState::Local { downstream, .. } => downstream
5370 .as_ref()
5371 .map(|downstream| downstream.updates_tx.clone()),
5372 _ => None,
5373 });
5374 let this = cx.weak_entity();
5375 self.send_job(None, move |git_repo, mut cx| async move {
5376 match git_repo {
5377 RepositoryState::Local(LocalRepositoryState {
5378 backend,
5379 environment,
5380 ..
5381 }) => {
5382 // TODO would be nice to not have to do this manually
5383 let result = backend.stash_drop(index, environment).await;
5384 if result.is_ok()
5385 && let Ok(stash_entries) = backend.stash_entries().await
5386 {
5387 let snapshot = this.update(&mut cx, |this, cx| {
5388 this.snapshot.stash_entries = stash_entries;
5389 cx.emit(RepositoryEvent::StashEntriesChanged);
5390 this.snapshot.clone()
5391 })?;
5392 if let Some(updates_tx) = updates_tx {
5393 updates_tx
5394 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5395 .ok();
5396 }
5397 }
5398
5399 result
5400 }
5401 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5402 client
5403 .request(proto::StashDrop {
5404 project_id: project_id.0,
5405 repository_id: id.to_proto(),
5406 stash_index: index.map(|i| i as u64),
5407 })
5408 .await
5409 .context("sending stash pop request")?;
5410 Ok(())
5411 }
5412 }
5413 })
5414 }
5415
5416 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5417 let id = self.id;
5418 self.send_job(
5419 Some(format!("git hook {}", hook.as_str()).into()),
5420 move |git_repo, _cx| async move {
5421 match git_repo {
5422 RepositoryState::Local(LocalRepositoryState {
5423 backend,
5424 environment,
5425 ..
5426 }) => backend.run_hook(hook, environment.clone()).await,
5427 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5428 client
5429 .request(proto::RunGitHook {
5430 project_id: project_id.0,
5431 repository_id: id.to_proto(),
5432 hook: hook.to_proto(),
5433 })
5434 .await?;
5435
5436 Ok(())
5437 }
5438 }
5439 },
5440 )
5441 }
5442
5443 pub fn commit(
5444 &mut self,
5445 message: SharedString,
5446 name_and_email: Option<(SharedString, SharedString)>,
5447 options: CommitOptions,
5448 askpass: AskPassDelegate,
5449 cx: &mut App,
5450 ) -> oneshot::Receiver<Result<()>> {
5451 let id = self.id;
5452 let askpass_delegates = self.askpass_delegates.clone();
5453 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5454
5455 let rx = self.run_hook(RunHook::PreCommit, cx);
5456
5457 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5458 rx.await??;
5459
5460 match git_repo {
5461 RepositoryState::Local(LocalRepositoryState {
5462 backend,
5463 environment,
5464 ..
5465 }) => {
5466 backend
5467 .commit(message, name_and_email, options, askpass, environment)
5468 .await
5469 }
5470 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5471 askpass_delegates.lock().insert(askpass_id, askpass);
5472 let _defer = util::defer(|| {
5473 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5474 debug_assert!(askpass_delegate.is_some());
5475 });
5476 let (name, email) = name_and_email.unzip();
5477 client
5478 .request(proto::Commit {
5479 project_id: project_id.0,
5480 repository_id: id.to_proto(),
5481 message: String::from(message),
5482 name: name.map(String::from),
5483 email: email.map(String::from),
5484 options: Some(proto::commit::CommitOptions {
5485 amend: options.amend,
5486 signoff: options.signoff,
5487 }),
5488 askpass_id,
5489 })
5490 .await?;
5491
5492 Ok(())
5493 }
5494 }
5495 })
5496 }
5497
5498 pub fn fetch(
5499 &mut self,
5500 fetch_options: FetchOptions,
5501 askpass: AskPassDelegate,
5502 _cx: &mut App,
5503 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5504 let askpass_delegates = self.askpass_delegates.clone();
5505 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5506 let id = self.id;
5507
5508 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5509 match git_repo {
5510 RepositoryState::Local(LocalRepositoryState {
5511 backend,
5512 environment,
5513 ..
5514 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5515 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5516 askpass_delegates.lock().insert(askpass_id, askpass);
5517 let _defer = util::defer(|| {
5518 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5519 debug_assert!(askpass_delegate.is_some());
5520 });
5521
5522 let response = client
5523 .request(proto::Fetch {
5524 project_id: project_id.0,
5525 repository_id: id.to_proto(),
5526 askpass_id,
5527 remote: fetch_options.to_proto(),
5528 })
5529 .await?;
5530
5531 Ok(RemoteCommandOutput {
5532 stdout: response.stdout,
5533 stderr: response.stderr,
5534 })
5535 }
5536 }
5537 })
5538 }
5539
5540 pub fn push(
5541 &mut self,
5542 branch: SharedString,
5543 remote_branch: SharedString,
5544 remote: SharedString,
5545 options: Option<PushOptions>,
5546 askpass: AskPassDelegate,
5547 cx: &mut Context<Self>,
5548 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5549 let askpass_delegates = self.askpass_delegates.clone();
5550 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5551 let id = self.id;
5552
5553 let args = options
5554 .map(|option| match option {
5555 PushOptions::SetUpstream => " --set-upstream",
5556 PushOptions::Force => " --force-with-lease",
5557 })
5558 .unwrap_or("");
5559
5560 let updates_tx = self
5561 .git_store()
5562 .and_then(|git_store| match &git_store.read(cx).state {
5563 GitStoreState::Local { downstream, .. } => downstream
5564 .as_ref()
5565 .map(|downstream| downstream.updates_tx.clone()),
5566 _ => None,
5567 });
5568
5569 let this = cx.weak_entity();
5570 self.send_job(
5571 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5572 move |git_repo, mut cx| async move {
5573 match git_repo {
5574 RepositoryState::Local(LocalRepositoryState {
5575 backend,
5576 environment,
5577 ..
5578 }) => {
5579 let result = backend
5580 .push(
5581 branch.to_string(),
5582 remote_branch.to_string(),
5583 remote.to_string(),
5584 options,
5585 askpass,
5586 environment.clone(),
5587 cx.clone(),
5588 )
5589 .await;
5590 // TODO would be nice to not have to do this manually
5591 if result.is_ok() {
5592 let branches = backend.branches().await?;
5593 let branch = branches.into_iter().find(|branch| branch.is_head);
5594 log::info!("head branch after scan is {branch:?}");
5595 let snapshot = this.update(&mut cx, |this, cx| {
5596 this.snapshot.branch = branch;
5597 cx.emit(RepositoryEvent::BranchChanged);
5598 this.snapshot.clone()
5599 })?;
5600 if let Some(updates_tx) = updates_tx {
5601 updates_tx
5602 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5603 .ok();
5604 }
5605 }
5606 result
5607 }
5608 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5609 askpass_delegates.lock().insert(askpass_id, askpass);
5610 let _defer = util::defer(|| {
5611 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5612 debug_assert!(askpass_delegate.is_some());
5613 });
5614 let response = client
5615 .request(proto::Push {
5616 project_id: project_id.0,
5617 repository_id: id.to_proto(),
5618 askpass_id,
5619 branch_name: branch.to_string(),
5620 remote_branch_name: remote_branch.to_string(),
5621 remote_name: remote.to_string(),
5622 options: options.map(|options| match options {
5623 PushOptions::Force => proto::push::PushOptions::Force,
5624 PushOptions::SetUpstream => {
5625 proto::push::PushOptions::SetUpstream
5626 }
5627 }
5628 as i32),
5629 })
5630 .await?;
5631
5632 Ok(RemoteCommandOutput {
5633 stdout: response.stdout,
5634 stderr: response.stderr,
5635 })
5636 }
5637 }
5638 },
5639 )
5640 }
5641
5642 pub fn pull(
5643 &mut self,
5644 branch: Option<SharedString>,
5645 remote: SharedString,
5646 rebase: bool,
5647 askpass: AskPassDelegate,
5648 _cx: &mut App,
5649 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5650 let askpass_delegates = self.askpass_delegates.clone();
5651 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5652 let id = self.id;
5653
5654 let mut status = "git pull".to_string();
5655 if rebase {
5656 status.push_str(" --rebase");
5657 }
5658 status.push_str(&format!(" {}", remote));
5659 if let Some(b) = &branch {
5660 status.push_str(&format!(" {}", b));
5661 }
5662
5663 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5664 match git_repo {
5665 RepositoryState::Local(LocalRepositoryState {
5666 backend,
5667 environment,
5668 ..
5669 }) => {
5670 backend
5671 .pull(
5672 branch.as_ref().map(|b| b.to_string()),
5673 remote.to_string(),
5674 rebase,
5675 askpass,
5676 environment.clone(),
5677 cx,
5678 )
5679 .await
5680 }
5681 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5682 askpass_delegates.lock().insert(askpass_id, askpass);
5683 let _defer = util::defer(|| {
5684 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5685 debug_assert!(askpass_delegate.is_some());
5686 });
5687 let response = client
5688 .request(proto::Pull {
5689 project_id: project_id.0,
5690 repository_id: id.to_proto(),
5691 askpass_id,
5692 rebase,
5693 branch_name: branch.as_ref().map(|b| b.to_string()),
5694 remote_name: remote.to_string(),
5695 })
5696 .await?;
5697
5698 Ok(RemoteCommandOutput {
5699 stdout: response.stdout,
5700 stderr: response.stderr,
5701 })
5702 }
5703 }
5704 })
5705 }
5706
5707 fn spawn_set_index_text_job(
5708 &mut self,
5709 path: RepoPath,
5710 content: Option<String>,
5711 hunk_staging_operation_count: Option<usize>,
5712 cx: &mut Context<Self>,
5713 ) -> oneshot::Receiver<anyhow::Result<()>> {
5714 let id = self.id;
5715 let this = cx.weak_entity();
5716 let git_store = self.git_store.clone();
5717 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5718 self.send_keyed_job(
5719 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5720 None,
5721 move |git_repo, mut cx| async move {
5722 log::debug!(
5723 "start updating index text for buffer {}",
5724 path.as_unix_str()
5725 );
5726
5727 match git_repo {
5728 RepositoryState::Local(LocalRepositoryState {
5729 fs,
5730 backend,
5731 environment,
5732 ..
5733 }) => {
5734 let executable = match fs.metadata(&abs_path).await {
5735 Ok(Some(meta)) => meta.is_executable,
5736 Ok(None) => false,
5737 Err(_err) => false,
5738 };
5739 backend
5740 .set_index_text(path.clone(), content, environment.clone(), executable)
5741 .await?;
5742 }
5743 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5744 client
5745 .request(proto::SetIndexText {
5746 project_id: project_id.0,
5747 repository_id: id.to_proto(),
5748 path: path.to_proto(),
5749 text: content,
5750 })
5751 .await?;
5752 }
5753 }
5754 log::debug!(
5755 "finish updating index text for buffer {}",
5756 path.as_unix_str()
5757 );
5758
5759 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5760 let project_path = this
5761 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5762 .ok()
5763 .flatten();
5764 git_store
5765 .update(&mut cx, |git_store, cx| {
5766 let buffer_id = git_store
5767 .buffer_store
5768 .read(cx)
5769 .get_by_path(&project_path?)?
5770 .read(cx)
5771 .remote_id();
5772 let diff_state = git_store.diffs.get(&buffer_id)?;
5773 diff_state.update(cx, |diff_state, _| {
5774 diff_state.hunk_staging_operation_count_as_of_write =
5775 hunk_staging_operation_count;
5776 });
5777 Some(())
5778 })
5779 .context("Git store dropped")?;
5780 }
5781 Ok(())
5782 },
5783 )
5784 }
5785
5786 pub fn create_remote(
5787 &mut self,
5788 remote_name: String,
5789 remote_url: String,
5790 ) -> oneshot::Receiver<Result<()>> {
5791 let id = self.id;
5792 self.send_job(
5793 Some(format!("git remote add {remote_name} {remote_url}").into()),
5794 move |repo, _cx| async move {
5795 match repo {
5796 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5797 backend.create_remote(remote_name, remote_url).await
5798 }
5799 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5800 client
5801 .request(proto::GitCreateRemote {
5802 project_id: project_id.0,
5803 repository_id: id.to_proto(),
5804 remote_name,
5805 remote_url,
5806 })
5807 .await?;
5808
5809 Ok(())
5810 }
5811 }
5812 },
5813 )
5814 }
5815
5816 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5817 let id = self.id;
5818 self.send_job(
5819 Some(format!("git remove remote {remote_name}").into()),
5820 move |repo, _cx| async move {
5821 match repo {
5822 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5823 backend.remove_remote(remote_name).await
5824 }
5825 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5826 client
5827 .request(proto::GitRemoveRemote {
5828 project_id: project_id.0,
5829 repository_id: id.to_proto(),
5830 remote_name,
5831 })
5832 .await?;
5833
5834 Ok(())
5835 }
5836 }
5837 },
5838 )
5839 }
5840
5841 pub fn get_remotes(
5842 &mut self,
5843 branch_name: Option<String>,
5844 is_push: bool,
5845 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5846 let id = self.id;
5847 self.send_job(None, move |repo, _cx| async move {
5848 match repo {
5849 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5850 let remote = if let Some(branch_name) = branch_name {
5851 if is_push {
5852 backend.get_push_remote(branch_name).await?
5853 } else {
5854 backend.get_branch_remote(branch_name).await?
5855 }
5856 } else {
5857 None
5858 };
5859
5860 match remote {
5861 Some(remote) => Ok(vec![remote]),
5862 None => backend.get_all_remotes().await,
5863 }
5864 }
5865 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5866 let response = client
5867 .request(proto::GetRemotes {
5868 project_id: project_id.0,
5869 repository_id: id.to_proto(),
5870 branch_name,
5871 is_push,
5872 })
5873 .await?;
5874
5875 let remotes = response
5876 .remotes
5877 .into_iter()
5878 .map(|remotes| Remote {
5879 name: remotes.name.into(),
5880 })
5881 .collect();
5882
5883 Ok(remotes)
5884 }
5885 }
5886 })
5887 }
5888
5889 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5890 let id = self.id;
5891 self.send_job(None, move |repo, _| async move {
5892 match repo {
5893 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5894 backend.branches().await
5895 }
5896 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5897 let response = client
5898 .request(proto::GitGetBranches {
5899 project_id: project_id.0,
5900 repository_id: id.to_proto(),
5901 })
5902 .await?;
5903
5904 let branches = response
5905 .branches
5906 .into_iter()
5907 .map(|branch| proto_to_branch(&branch))
5908 .collect();
5909
5910 Ok(branches)
5911 }
5912 }
5913 })
5914 }
5915
5916 /// If this is a linked worktree (*NOT* the main checkout of a repository),
5917 /// returns the pathed for the linked worktree.
5918 ///
5919 /// Returns None if this is the main checkout.
5920 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
5921 if self.work_directory_abs_path != self.original_repo_abs_path {
5922 Some(&self.work_directory_abs_path)
5923 } else {
5924 None
5925 }
5926 }
5927
5928 pub fn path_for_new_linked_worktree(
5929 &self,
5930 branch_name: &str,
5931 worktree_directory_setting: &str,
5932 ) -> Result<PathBuf> {
5933 let original_repo = self.original_repo_abs_path.clone();
5934 let project_name = original_repo
5935 .file_name()
5936 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
5937 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
5938 Ok(directory.join(branch_name).join(project_name))
5939 }
5940
5941 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5942 let id = self.id;
5943 self.send_job(None, move |repo, _| async move {
5944 match repo {
5945 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5946 backend.worktrees().await
5947 }
5948 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5949 let response = client
5950 .request(proto::GitGetWorktrees {
5951 project_id: project_id.0,
5952 repository_id: id.to_proto(),
5953 })
5954 .await?;
5955
5956 let worktrees = response
5957 .worktrees
5958 .into_iter()
5959 .map(|worktree| proto_to_worktree(&worktree))
5960 .collect();
5961
5962 Ok(worktrees)
5963 }
5964 }
5965 })
5966 }
5967
5968 pub fn create_worktree(
5969 &mut self,
5970 branch_name: String,
5971 path: PathBuf,
5972 commit: Option<String>,
5973 ) -> oneshot::Receiver<Result<()>> {
5974 let id = self.id;
5975 self.send_job(
5976 Some(format!("git worktree add: {}", branch_name).into()),
5977 move |repo, _cx| async move {
5978 match repo {
5979 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5980 backend.create_worktree(branch_name, path, commit).await
5981 }
5982 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5983 client
5984 .request(proto::GitCreateWorktree {
5985 project_id: project_id.0,
5986 repository_id: id.to_proto(),
5987 name: branch_name,
5988 directory: path.to_string_lossy().to_string(),
5989 commit,
5990 })
5991 .await?;
5992
5993 Ok(())
5994 }
5995 }
5996 },
5997 )
5998 }
5999
6000 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6001 let id = self.id;
6002 self.send_job(
6003 Some(format!("git worktree remove: {}", path.display()).into()),
6004 move |repo, _cx| async move {
6005 match repo {
6006 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6007 backend.remove_worktree(path, force).await
6008 }
6009 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6010 client
6011 .request(proto::GitRemoveWorktree {
6012 project_id: project_id.0,
6013 repository_id: id.to_proto(),
6014 path: path.to_string_lossy().to_string(),
6015 force,
6016 })
6017 .await?;
6018
6019 Ok(())
6020 }
6021 }
6022 },
6023 )
6024 }
6025
6026 pub fn rename_worktree(
6027 &mut self,
6028 old_path: PathBuf,
6029 new_path: PathBuf,
6030 ) -> oneshot::Receiver<Result<()>> {
6031 let id = self.id;
6032 self.send_job(
6033 Some(format!("git worktree move: {}", old_path.display()).into()),
6034 move |repo, _cx| async move {
6035 match repo {
6036 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6037 backend.rename_worktree(old_path, new_path).await
6038 }
6039 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6040 client
6041 .request(proto::GitRenameWorktree {
6042 project_id: project_id.0,
6043 repository_id: id.to_proto(),
6044 old_path: old_path.to_string_lossy().to_string(),
6045 new_path: new_path.to_string_lossy().to_string(),
6046 })
6047 .await?;
6048
6049 Ok(())
6050 }
6051 }
6052 },
6053 )
6054 }
6055
6056 pub fn default_branch(
6057 &mut self,
6058 include_remote_name: bool,
6059 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6060 let id = self.id;
6061 self.send_job(None, move |repo, _| async move {
6062 match repo {
6063 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6064 backend.default_branch(include_remote_name).await
6065 }
6066 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6067 let response = client
6068 .request(proto::GetDefaultBranch {
6069 project_id: project_id.0,
6070 repository_id: id.to_proto(),
6071 })
6072 .await?;
6073
6074 anyhow::Ok(response.branch.map(SharedString::from))
6075 }
6076 }
6077 })
6078 }
6079
6080 pub fn diff_tree(
6081 &mut self,
6082 diff_type: DiffTreeType,
6083 _cx: &App,
6084 ) -> oneshot::Receiver<Result<TreeDiff>> {
6085 let repository_id = self.snapshot.id;
6086 self.send_job(None, move |repo, _cx| async move {
6087 match repo {
6088 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6089 backend.diff_tree(diff_type).await
6090 }
6091 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6092 let response = client
6093 .request(proto::GetTreeDiff {
6094 project_id: project_id.0,
6095 repository_id: repository_id.0,
6096 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6097 base: diff_type.base().to_string(),
6098 head: diff_type.head().to_string(),
6099 })
6100 .await?;
6101
6102 let entries = response
6103 .entries
6104 .into_iter()
6105 .filter_map(|entry| {
6106 let status = match entry.status() {
6107 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6108 proto::tree_diff_status::Status::Modified => {
6109 TreeDiffStatus::Modified {
6110 old: git::Oid::from_str(
6111 &entry.oid.context("missing oid").log_err()?,
6112 )
6113 .log_err()?,
6114 }
6115 }
6116 proto::tree_diff_status::Status::Deleted => {
6117 TreeDiffStatus::Deleted {
6118 old: git::Oid::from_str(
6119 &entry.oid.context("missing oid").log_err()?,
6120 )
6121 .log_err()?,
6122 }
6123 }
6124 };
6125 Some((
6126 RepoPath::from_rel_path(
6127 &RelPath::from_proto(&entry.path).log_err()?,
6128 ),
6129 status,
6130 ))
6131 })
6132 .collect();
6133
6134 Ok(TreeDiff { entries })
6135 }
6136 }
6137 })
6138 }
6139
6140 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6141 let id = self.id;
6142 self.send_job(None, move |repo, _cx| async move {
6143 match repo {
6144 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6145 backend.diff(diff_type).await
6146 }
6147 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6148 let (proto_diff_type, merge_base_ref) = match &diff_type {
6149 DiffType::HeadToIndex => {
6150 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6151 }
6152 DiffType::HeadToWorktree => {
6153 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6154 }
6155 DiffType::MergeBase { base_ref } => (
6156 proto::git_diff::DiffType::MergeBase.into(),
6157 Some(base_ref.to_string()),
6158 ),
6159 };
6160 let response = client
6161 .request(proto::GitDiff {
6162 project_id: project_id.0,
6163 repository_id: id.to_proto(),
6164 diff_type: proto_diff_type,
6165 merge_base_ref,
6166 })
6167 .await?;
6168
6169 Ok(response.diff)
6170 }
6171 }
6172 })
6173 }
6174
6175 pub fn create_branch(
6176 &mut self,
6177 branch_name: String,
6178 base_branch: Option<String>,
6179 ) -> oneshot::Receiver<Result<()>> {
6180 let id = self.id;
6181 let status_msg = if let Some(ref base) = base_branch {
6182 format!("git switch -c {branch_name} {base}").into()
6183 } else {
6184 format!("git switch -c {branch_name}").into()
6185 };
6186 self.send_job(Some(status_msg), move |repo, _cx| async move {
6187 match repo {
6188 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6189 backend.create_branch(branch_name, base_branch).await
6190 }
6191 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6192 client
6193 .request(proto::GitCreateBranch {
6194 project_id: project_id.0,
6195 repository_id: id.to_proto(),
6196 branch_name,
6197 })
6198 .await?;
6199
6200 Ok(())
6201 }
6202 }
6203 })
6204 }
6205
6206 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6207 let id = self.id;
6208 self.send_job(
6209 Some(format!("git switch {branch_name}").into()),
6210 move |repo, _cx| async move {
6211 match repo {
6212 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6213 backend.change_branch(branch_name).await
6214 }
6215 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6216 client
6217 .request(proto::GitChangeBranch {
6218 project_id: project_id.0,
6219 repository_id: id.to_proto(),
6220 branch_name,
6221 })
6222 .await?;
6223
6224 Ok(())
6225 }
6226 }
6227 },
6228 )
6229 }
6230
6231 pub fn delete_branch(
6232 &mut self,
6233 is_remote: bool,
6234 branch_name: String,
6235 ) -> oneshot::Receiver<Result<()>> {
6236 let id = self.id;
6237 self.send_job(
6238 Some(
6239 format!(
6240 "git branch {} {}",
6241 if is_remote { "-dr" } else { "-d" },
6242 branch_name
6243 )
6244 .into(),
6245 ),
6246 move |repo, _cx| async move {
6247 match repo {
6248 RepositoryState::Local(state) => {
6249 state.backend.delete_branch(is_remote, branch_name).await
6250 }
6251 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6252 client
6253 .request(proto::GitDeleteBranch {
6254 project_id: project_id.0,
6255 repository_id: id.to_proto(),
6256 is_remote,
6257 branch_name,
6258 })
6259 .await?;
6260
6261 Ok(())
6262 }
6263 }
6264 },
6265 )
6266 }
6267
6268 pub fn rename_branch(
6269 &mut self,
6270 branch: String,
6271 new_name: String,
6272 ) -> oneshot::Receiver<Result<()>> {
6273 let id = self.id;
6274 self.send_job(
6275 Some(format!("git branch -m {branch} {new_name}").into()),
6276 move |repo, _cx| async move {
6277 match repo {
6278 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6279 backend.rename_branch(branch, new_name).await
6280 }
6281 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6282 client
6283 .request(proto::GitRenameBranch {
6284 project_id: project_id.0,
6285 repository_id: id.to_proto(),
6286 branch,
6287 new_name,
6288 })
6289 .await?;
6290
6291 Ok(())
6292 }
6293 }
6294 },
6295 )
6296 }
6297
6298 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6299 let id = self.id;
6300 self.send_job(None, move |repo, _cx| async move {
6301 match repo {
6302 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6303 backend.check_for_pushed_commit().await
6304 }
6305 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6306 let response = client
6307 .request(proto::CheckForPushedCommits {
6308 project_id: project_id.0,
6309 repository_id: id.to_proto(),
6310 })
6311 .await?;
6312
6313 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6314
6315 Ok(branches)
6316 }
6317 }
6318 })
6319 }
6320
6321 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6322 let id = self.id;
6323 self.send_job(None, move |repo, _cx| async move {
6324 match repo {
6325 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6326 backend.checkpoint().await
6327 }
6328 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6329 let response = client
6330 .request(proto::GitCreateCheckpoint {
6331 project_id: project_id.0,
6332 repository_id: id.to_proto(),
6333 })
6334 .await?;
6335
6336 Ok(GitRepositoryCheckpoint {
6337 commit_sha: Oid::from_bytes(&response.commit_sha)?,
6338 })
6339 }
6340 }
6341 })
6342 }
6343
6344 pub fn restore_checkpoint(
6345 &mut self,
6346 checkpoint: GitRepositoryCheckpoint,
6347 ) -> oneshot::Receiver<Result<()>> {
6348 let id = self.id;
6349 self.send_job(None, move |repo, _cx| async move {
6350 match repo {
6351 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6352 backend.restore_checkpoint(checkpoint).await
6353 }
6354 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6355 client
6356 .request(proto::GitRestoreCheckpoint {
6357 project_id: project_id.0,
6358 repository_id: id.to_proto(),
6359 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
6360 })
6361 .await?;
6362 Ok(())
6363 }
6364 }
6365 })
6366 }
6367
6368 pub(crate) fn apply_remote_update(
6369 &mut self,
6370 update: proto::UpdateRepository,
6371 cx: &mut Context<Self>,
6372 ) -> Result<()> {
6373 if let Some(main_path) = &update.original_repo_abs_path {
6374 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6375 }
6376
6377 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6378 let new_head_commit = update
6379 .head_commit_details
6380 .as_ref()
6381 .map(proto_to_commit_details);
6382 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6383 cx.emit(RepositoryEvent::BranchChanged)
6384 }
6385 self.snapshot.branch = new_branch;
6386 self.snapshot.head_commit = new_head_commit;
6387
6388 // We don't store any merge head state for downstream projects; the upstream
6389 // will track it and we will just get the updated conflicts
6390 let new_merge_heads = TreeMap::from_ordered_entries(
6391 update
6392 .current_merge_conflicts
6393 .into_iter()
6394 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6395 );
6396 let conflicts_changed =
6397 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6398 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6399 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6400 let new_stash_entries = GitStash {
6401 entries: update
6402 .stash_entries
6403 .iter()
6404 .filter_map(|entry| proto_to_stash(entry).ok())
6405 .collect(),
6406 };
6407 if self.snapshot.stash_entries != new_stash_entries {
6408 cx.emit(RepositoryEvent::StashEntriesChanged)
6409 }
6410 self.snapshot.stash_entries = new_stash_entries;
6411 let new_linked_worktrees: Arc<[GitWorktree]> = update
6412 .linked_worktrees
6413 .iter()
6414 .map(proto_to_worktree)
6415 .collect();
6416 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
6417 cx.emit(RepositoryEvent::GitWorktreeListChanged);
6418 }
6419 self.snapshot.linked_worktrees = new_linked_worktrees;
6420 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6421 self.snapshot.remote_origin_url = update.remote_origin_url;
6422
6423 let edits = update
6424 .removed_statuses
6425 .into_iter()
6426 .filter_map(|path| {
6427 Some(sum_tree::Edit::Remove(PathKey(
6428 RelPath::from_proto(&path).log_err()?,
6429 )))
6430 })
6431 .chain(
6432 update
6433 .updated_statuses
6434 .into_iter()
6435 .filter_map(|updated_status| {
6436 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6437 }),
6438 )
6439 .collect::<Vec<_>>();
6440 if conflicts_changed || !edits.is_empty() {
6441 cx.emit(RepositoryEvent::StatusesChanged);
6442 }
6443 self.snapshot.statuses_by_path.edit(edits, ());
6444
6445 if update.is_last_update {
6446 self.snapshot.scan_id = update.scan_id;
6447 }
6448 self.clear_pending_ops(cx);
6449 Ok(())
6450 }
6451
6452 pub fn compare_checkpoints(
6453 &mut self,
6454 left: GitRepositoryCheckpoint,
6455 right: GitRepositoryCheckpoint,
6456 ) -> oneshot::Receiver<Result<bool>> {
6457 let id = self.id;
6458 self.send_job(None, move |repo, _cx| async move {
6459 match repo {
6460 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6461 backend.compare_checkpoints(left, right).await
6462 }
6463 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6464 let response = client
6465 .request(proto::GitCompareCheckpoints {
6466 project_id: project_id.0,
6467 repository_id: id.to_proto(),
6468 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
6469 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
6470 })
6471 .await?;
6472 Ok(response.equal)
6473 }
6474 }
6475 })
6476 }
6477
6478 pub fn diff_checkpoints(
6479 &mut self,
6480 base_checkpoint: GitRepositoryCheckpoint,
6481 target_checkpoint: GitRepositoryCheckpoint,
6482 ) -> oneshot::Receiver<Result<String>> {
6483 let id = self.id;
6484 self.send_job(None, move |repo, _cx| async move {
6485 match repo {
6486 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6487 backend
6488 .diff_checkpoints(base_checkpoint, target_checkpoint)
6489 .await
6490 }
6491 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6492 let response = client
6493 .request(proto::GitDiffCheckpoints {
6494 project_id: project_id.0,
6495 repository_id: id.to_proto(),
6496 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
6497 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
6498 })
6499 .await?;
6500 Ok(response.diff)
6501 }
6502 }
6503 })
6504 }
6505
6506 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6507 let updated = SumTree::from_iter(
6508 self.pending_ops.iter().filter_map(|ops| {
6509 let inner_ops: Vec<PendingOp> =
6510 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6511 if inner_ops.is_empty() {
6512 None
6513 } else {
6514 Some(PendingOps {
6515 repo_path: ops.repo_path.clone(),
6516 ops: inner_ops,
6517 })
6518 }
6519 }),
6520 (),
6521 );
6522
6523 if updated != self.pending_ops {
6524 cx.emit(RepositoryEvent::PendingOpsChanged {
6525 pending_ops: self.pending_ops.clone(),
6526 })
6527 }
6528
6529 self.pending_ops = updated;
6530 }
6531
6532 fn schedule_scan(
6533 &mut self,
6534 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6535 cx: &mut Context<Self>,
6536 ) {
6537 let this = cx.weak_entity();
6538 let _ = self.send_keyed_job(
6539 Some(GitJobKey::ReloadGitState),
6540 None,
6541 |state, mut cx| async move {
6542 log::debug!("run scheduled git status scan");
6543
6544 let Some(this) = this.upgrade() else {
6545 return Ok(());
6546 };
6547 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6548 bail!("not a local repository")
6549 };
6550 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
6551 this.update(&mut cx, |this, cx| {
6552 this.clear_pending_ops(cx);
6553 });
6554 if let Some(updates_tx) = updates_tx {
6555 updates_tx
6556 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6557 .ok();
6558 }
6559 Ok(())
6560 },
6561 );
6562 }
6563
6564 fn spawn_local_git_worker(
6565 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6566 cx: &mut Context<Self>,
6567 ) -> mpsc::UnboundedSender<GitJob> {
6568 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6569
6570 cx.spawn(async move |_, cx| {
6571 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6572 if let Some(git_hosting_provider_registry) =
6573 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6574 {
6575 git_hosting_providers::register_additional_providers(
6576 git_hosting_provider_registry,
6577 state.backend.clone(),
6578 )
6579 .await;
6580 }
6581 let state = RepositoryState::Local(state);
6582 let mut jobs = VecDeque::new();
6583 loop {
6584 while let Ok(next_job) = job_rx.try_recv() {
6585 jobs.push_back(next_job);
6586 }
6587
6588 if let Some(job) = jobs.pop_front() {
6589 if let Some(current_key) = &job.key
6590 && jobs
6591 .iter()
6592 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6593 {
6594 continue;
6595 }
6596 (job.job)(state.clone(), cx).await;
6597 } else if let Some(job) = job_rx.next().await {
6598 jobs.push_back(job);
6599 } else {
6600 break;
6601 }
6602 }
6603 anyhow::Ok(())
6604 })
6605 .detach_and_log_err(cx);
6606
6607 job_tx
6608 }
6609
6610 fn spawn_remote_git_worker(
6611 state: RemoteRepositoryState,
6612 cx: &mut Context<Self>,
6613 ) -> mpsc::UnboundedSender<GitJob> {
6614 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6615
6616 cx.spawn(async move |_, cx| {
6617 let state = RepositoryState::Remote(state);
6618 let mut jobs = VecDeque::new();
6619 loop {
6620 while let Ok(next_job) = job_rx.try_recv() {
6621 jobs.push_back(next_job);
6622 }
6623
6624 if let Some(job) = jobs.pop_front() {
6625 if let Some(current_key) = &job.key
6626 && jobs
6627 .iter()
6628 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6629 {
6630 continue;
6631 }
6632 (job.job)(state.clone(), cx).await;
6633 } else if let Some(job) = job_rx.next().await {
6634 jobs.push_back(job);
6635 } else {
6636 break;
6637 }
6638 }
6639 anyhow::Ok(())
6640 })
6641 .detach_and_log_err(cx);
6642
6643 job_tx
6644 }
6645
6646 fn load_staged_text(
6647 &mut self,
6648 buffer_id: BufferId,
6649 repo_path: RepoPath,
6650 cx: &App,
6651 ) -> Task<Result<Option<String>>> {
6652 let rx = self.send_job(None, move |state, _| async move {
6653 match state {
6654 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6655 anyhow::Ok(backend.load_index_text(repo_path).await)
6656 }
6657 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6658 let response = client
6659 .request(proto::OpenUnstagedDiff {
6660 project_id: project_id.to_proto(),
6661 buffer_id: buffer_id.to_proto(),
6662 })
6663 .await?;
6664 Ok(response.staged_text)
6665 }
6666 }
6667 });
6668 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6669 }
6670
6671 fn load_committed_text(
6672 &mut self,
6673 buffer_id: BufferId,
6674 repo_path: RepoPath,
6675 cx: &App,
6676 ) -> Task<Result<DiffBasesChange>> {
6677 let rx = self.send_job(None, move |state, _| async move {
6678 match state {
6679 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6680 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6681 let staged_text = backend.load_index_text(repo_path).await;
6682 let diff_bases_change = if committed_text == staged_text {
6683 DiffBasesChange::SetBoth(committed_text)
6684 } else {
6685 DiffBasesChange::SetEach {
6686 index: staged_text,
6687 head: committed_text,
6688 }
6689 };
6690 anyhow::Ok(diff_bases_change)
6691 }
6692 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6693 use proto::open_uncommitted_diff_response::Mode;
6694
6695 let response = client
6696 .request(proto::OpenUncommittedDiff {
6697 project_id: project_id.to_proto(),
6698 buffer_id: buffer_id.to_proto(),
6699 })
6700 .await?;
6701 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6702 let bases = match mode {
6703 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6704 Mode::IndexAndHead => DiffBasesChange::SetEach {
6705 head: response.committed_text,
6706 index: response.staged_text,
6707 },
6708 };
6709 Ok(bases)
6710 }
6711 }
6712 });
6713
6714 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6715 }
6716
6717 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6718 let repository_id = self.snapshot.id;
6719 let rx = self.send_job(None, move |state, _| async move {
6720 match state {
6721 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6722 backend.load_blob_content(oid).await
6723 }
6724 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6725 let response = client
6726 .request(proto::GetBlobContent {
6727 project_id: project_id.to_proto(),
6728 repository_id: repository_id.0,
6729 oid: oid.to_string(),
6730 })
6731 .await?;
6732 Ok(response.content)
6733 }
6734 }
6735 });
6736 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6737 }
6738
6739 fn paths_changed(
6740 &mut self,
6741 paths: Vec<RepoPath>,
6742 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6743 cx: &mut Context<Self>,
6744 ) {
6745 if !paths.is_empty() {
6746 self.paths_needing_status_update.push(paths);
6747 }
6748
6749 let this = cx.weak_entity();
6750 let _ = self.send_keyed_job(
6751 Some(GitJobKey::RefreshStatuses),
6752 None,
6753 |state, mut cx| async move {
6754 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6755 (
6756 this.snapshot.clone(),
6757 mem::take(&mut this.paths_needing_status_update),
6758 )
6759 })?;
6760 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6761 bail!("not a local repository")
6762 };
6763
6764 if changed_paths.is_empty() {
6765 return Ok(());
6766 }
6767
6768 let has_head = prev_snapshot.head_commit.is_some();
6769
6770 let stash_entries = backend.stash_entries().await?;
6771 let changed_path_statuses = cx
6772 .background_spawn(async move {
6773 let mut changed_paths =
6774 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6775 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
6776
6777 let status_task = backend.status(&changed_paths_vec);
6778 let diff_stat_future = if has_head {
6779 backend.diff_stat(&changed_paths_vec)
6780 } else {
6781 future::ready(Ok(status::GitDiffStat {
6782 entries: Arc::default(),
6783 }))
6784 .boxed()
6785 };
6786
6787 let (statuses, diff_stats) =
6788 futures::future::try_join(status_task, diff_stat_future).await?;
6789
6790 let diff_stats: HashMap<RepoPath, DiffStat> =
6791 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
6792
6793 let mut changed_path_statuses = Vec::new();
6794 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6795 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6796
6797 for (repo_path, status) in &*statuses.entries {
6798 let current_diff_stat = diff_stats.get(repo_path).copied();
6799
6800 changed_paths.remove(repo_path);
6801 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6802 && cursor.item().is_some_and(|entry| {
6803 entry.status == *status && entry.diff_stat == current_diff_stat
6804 })
6805 {
6806 continue;
6807 }
6808
6809 changed_path_statuses.push(Edit::Insert(StatusEntry {
6810 repo_path: repo_path.clone(),
6811 status: *status,
6812 diff_stat: current_diff_stat,
6813 }));
6814 }
6815 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6816 for path in changed_paths.into_iter() {
6817 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6818 changed_path_statuses
6819 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6820 }
6821 }
6822 anyhow::Ok(changed_path_statuses)
6823 })
6824 .await?;
6825
6826 this.update(&mut cx, |this, cx| {
6827 if this.snapshot.stash_entries != stash_entries {
6828 cx.emit(RepositoryEvent::StashEntriesChanged);
6829 this.snapshot.stash_entries = stash_entries;
6830 }
6831
6832 if !changed_path_statuses.is_empty() {
6833 cx.emit(RepositoryEvent::StatusesChanged);
6834 this.snapshot
6835 .statuses_by_path
6836 .edit(changed_path_statuses, ());
6837 this.snapshot.scan_id += 1;
6838 }
6839
6840 if let Some(updates_tx) = updates_tx {
6841 updates_tx
6842 .unbounded_send(DownstreamUpdate::UpdateRepository(
6843 this.snapshot.clone(),
6844 ))
6845 .ok();
6846 }
6847 })
6848 },
6849 );
6850 }
6851
6852 /// currently running git command and when it started
6853 pub fn current_job(&self) -> Option<JobInfo> {
6854 self.active_jobs.values().next().cloned()
6855 }
6856
6857 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6858 self.send_job(None, |_, _| async {})
6859 }
6860
6861 fn spawn_job_with_tracking<AsyncFn>(
6862 &mut self,
6863 paths: Vec<RepoPath>,
6864 git_status: pending_op::GitStatus,
6865 cx: &mut Context<Self>,
6866 f: AsyncFn,
6867 ) -> Task<Result<()>>
6868 where
6869 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6870 {
6871 let ids = self.new_pending_ops_for_paths(paths, git_status);
6872
6873 cx.spawn(async move |this, cx| {
6874 let (job_status, result) = match f(this.clone(), cx).await {
6875 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6876 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6877 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6878 };
6879
6880 this.update(cx, |this, _| {
6881 let mut edits = Vec::with_capacity(ids.len());
6882 for (id, entry) in ids {
6883 if let Some(mut ops) = this
6884 .pending_ops
6885 .get(&PathKey(entry.as_ref().clone()), ())
6886 .cloned()
6887 {
6888 if let Some(op) = ops.op_by_id_mut(id) {
6889 op.job_status = job_status;
6890 }
6891 edits.push(sum_tree::Edit::Insert(ops));
6892 }
6893 }
6894 this.pending_ops.edit(edits, ());
6895 })?;
6896
6897 result
6898 })
6899 }
6900
6901 fn new_pending_ops_for_paths(
6902 &mut self,
6903 paths: Vec<RepoPath>,
6904 git_status: pending_op::GitStatus,
6905 ) -> Vec<(PendingOpId, RepoPath)> {
6906 let mut edits = Vec::with_capacity(paths.len());
6907 let mut ids = Vec::with_capacity(paths.len());
6908 for path in paths {
6909 let mut ops = self
6910 .pending_ops
6911 .get(&PathKey(path.as_ref().clone()), ())
6912 .cloned()
6913 .unwrap_or_else(|| PendingOps::new(&path));
6914 let id = ops.max_id() + 1;
6915 ops.ops.push(PendingOp {
6916 id,
6917 git_status,
6918 job_status: pending_op::JobStatus::Running,
6919 });
6920 edits.push(sum_tree::Edit::Insert(ops));
6921 ids.push((id, path));
6922 }
6923 self.pending_ops.edit(edits, ());
6924 ids
6925 }
6926 pub fn default_remote_url(&self) -> Option<String> {
6927 self.remote_upstream_url
6928 .clone()
6929 .or(self.remote_origin_url.clone())
6930 }
6931}
6932
6933/// If `path` is a git linked worktree checkout, resolves it to the main
6934/// repository's working directory path. Returns `None` if `path` is a normal
6935/// repository, not a git repo, or if resolution fails.
6936///
6937/// Resolution works by:
6938/// 1. Reading the `.git` file to get the `gitdir:` pointer
6939/// 2. Following that to the worktree-specific git directory
6940/// 3. Reading the `commondir` file to find the shared `.git` directory
6941/// 4. Deriving the main repo's working directory from the common dir
6942pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
6943 let dot_git = path.join(".git");
6944 let metadata = fs.metadata(&dot_git).await.ok()??;
6945 if metadata.is_dir {
6946 return None; // Normal repo, not a linked worktree
6947 }
6948 // It's a .git file — parse the gitdir: pointer
6949 let content = fs.load(&dot_git).await.ok()?;
6950 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
6951 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
6952 // Read commondir to find the main .git directory
6953 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
6954 let common_dir = fs
6955 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
6956 .await
6957 .ok()?;
6958 Some(git::repository::original_repo_path_from_common_dir(
6959 &common_dir,
6960 ))
6961}
6962
6963/// Validates that the resolved worktree directory is acceptable:
6964/// - The setting must not be an absolute path.
6965/// - The resolved path must be either a subdirectory of the working
6966/// directory or a subdirectory of its parent (i.e., a sibling).
6967///
6968/// Returns `Ok(resolved_path)` or an error with a user-facing message.
6969pub fn worktrees_directory_for_repo(
6970 original_repo_abs_path: &Path,
6971 worktree_directory_setting: &str,
6972) -> Result<PathBuf> {
6973 // Check the original setting before trimming, since a path like "///"
6974 // is absolute but becomes "" after stripping trailing separators.
6975 // Also check for leading `/` or `\` explicitly, because on Windows
6976 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
6977 // would slip through even though it's clearly not a relative path.
6978 if Path::new(worktree_directory_setting).is_absolute()
6979 || worktree_directory_setting.starts_with('/')
6980 || worktree_directory_setting.starts_with('\\')
6981 {
6982 anyhow::bail!(
6983 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
6984 );
6985 }
6986
6987 if worktree_directory_setting.is_empty() {
6988 anyhow::bail!("git.worktree_directory must not be empty");
6989 }
6990
6991 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
6992 if trimmed == ".." {
6993 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
6994 }
6995
6996 let joined = original_repo_abs_path.join(trimmed);
6997 let resolved = util::normalize_path(&joined);
6998 let resolved = if resolved.starts_with(original_repo_abs_path) {
6999 resolved
7000 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7001 resolved.join(repo_dir_name)
7002 } else {
7003 resolved
7004 };
7005
7006 let parent = original_repo_abs_path
7007 .parent()
7008 .unwrap_or(original_repo_abs_path);
7009
7010 if !resolved.starts_with(parent) {
7011 anyhow::bail!(
7012 "git.worktree_directory resolved to {resolved:?}, which is outside \
7013 the project root and its parent directory. It must resolve to a \
7014 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7015 );
7016 }
7017
7018 Ok(resolved)
7019}
7020
7021/// Returns a short name for a linked worktree suitable for UI display
7022///
7023/// Uses the main worktree path to come up with a short name that disambiguates
7024/// the linked worktree from the main worktree.
7025pub fn linked_worktree_short_name(
7026 main_worktree_path: &Path,
7027 linked_worktree_path: &Path,
7028) -> Option<SharedString> {
7029 if main_worktree_path == linked_worktree_path {
7030 return None;
7031 }
7032
7033 let project_name = main_worktree_path.file_name()?.to_str()?;
7034 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7035 let name = if directory_name != project_name {
7036 directory_name.to_string()
7037 } else {
7038 linked_worktree_path
7039 .parent()?
7040 .file_name()?
7041 .to_str()?
7042 .to_string()
7043 };
7044 Some(name.into())
7045}
7046
7047fn get_permalink_in_rust_registry_src(
7048 provider_registry: Arc<GitHostingProviderRegistry>,
7049 path: PathBuf,
7050 selection: Range<u32>,
7051) -> Result<url::Url> {
7052 #[derive(Deserialize)]
7053 struct CargoVcsGit {
7054 sha1: String,
7055 }
7056
7057 #[derive(Deserialize)]
7058 struct CargoVcsInfo {
7059 git: CargoVcsGit,
7060 path_in_vcs: String,
7061 }
7062
7063 #[derive(Deserialize)]
7064 struct CargoPackage {
7065 repository: String,
7066 }
7067
7068 #[derive(Deserialize)]
7069 struct CargoToml {
7070 package: CargoPackage,
7071 }
7072
7073 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7074 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7075 Some((dir, json))
7076 }) else {
7077 bail!("No .cargo_vcs_info.json found in parent directories")
7078 };
7079 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7080 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7081 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7082 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7083 .context("parsing package.repository field of manifest")?;
7084 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7085 let permalink = provider.build_permalink(
7086 remote,
7087 BuildPermalinkParams::new(
7088 &cargo_vcs_info.git.sha1,
7089 &RepoPath::from_rel_path(
7090 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7091 ),
7092 Some(selection),
7093 ),
7094 );
7095 Ok(permalink)
7096}
7097
7098fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7099 let Some(blame) = blame else {
7100 return proto::BlameBufferResponse {
7101 blame_response: None,
7102 };
7103 };
7104
7105 let entries = blame
7106 .entries
7107 .into_iter()
7108 .map(|entry| proto::BlameEntry {
7109 sha: entry.sha.as_bytes().into(),
7110 start_line: entry.range.start,
7111 end_line: entry.range.end,
7112 original_line_number: entry.original_line_number,
7113 author: entry.author,
7114 author_mail: entry.author_mail,
7115 author_time: entry.author_time,
7116 author_tz: entry.author_tz,
7117 committer: entry.committer_name,
7118 committer_mail: entry.committer_email,
7119 committer_time: entry.committer_time,
7120 committer_tz: entry.committer_tz,
7121 summary: entry.summary,
7122 previous: entry.previous,
7123 filename: entry.filename,
7124 })
7125 .collect::<Vec<_>>();
7126
7127 let messages = blame
7128 .messages
7129 .into_iter()
7130 .map(|(oid, message)| proto::CommitMessage {
7131 oid: oid.as_bytes().into(),
7132 message,
7133 })
7134 .collect::<Vec<_>>();
7135
7136 proto::BlameBufferResponse {
7137 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7138 }
7139}
7140
7141fn deserialize_blame_buffer_response(
7142 response: proto::BlameBufferResponse,
7143) -> Option<git::blame::Blame> {
7144 let response = response.blame_response?;
7145 let entries = response
7146 .entries
7147 .into_iter()
7148 .filter_map(|entry| {
7149 Some(git::blame::BlameEntry {
7150 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7151 range: entry.start_line..entry.end_line,
7152 original_line_number: entry.original_line_number,
7153 committer_name: entry.committer,
7154 committer_time: entry.committer_time,
7155 committer_tz: entry.committer_tz,
7156 committer_email: entry.committer_mail,
7157 author: entry.author,
7158 author_mail: entry.author_mail,
7159 author_time: entry.author_time,
7160 author_tz: entry.author_tz,
7161 summary: entry.summary,
7162 previous: entry.previous,
7163 filename: entry.filename,
7164 })
7165 })
7166 .collect::<Vec<_>>();
7167
7168 let messages = response
7169 .messages
7170 .into_iter()
7171 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7172 .collect::<HashMap<_, _>>();
7173
7174 Some(Blame { entries, messages })
7175}
7176
7177fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
7178 proto::Branch {
7179 is_head: branch.is_head,
7180 ref_name: branch.ref_name.to_string(),
7181 unix_timestamp: branch
7182 .most_recent_commit
7183 .as_ref()
7184 .map(|commit| commit.commit_timestamp as u64),
7185 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
7186 ref_name: upstream.ref_name.to_string(),
7187 tracking: upstream
7188 .tracking
7189 .status()
7190 .map(|upstream| proto::UpstreamTracking {
7191 ahead: upstream.ahead as u64,
7192 behind: upstream.behind as u64,
7193 }),
7194 }),
7195 most_recent_commit: branch
7196 .most_recent_commit
7197 .as_ref()
7198 .map(|commit| proto::CommitSummary {
7199 sha: commit.sha.to_string(),
7200 subject: commit.subject.to_string(),
7201 commit_timestamp: commit.commit_timestamp,
7202 author_name: commit.author_name.to_string(),
7203 }),
7204 }
7205}
7206
7207fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
7208 proto::Worktree {
7209 path: worktree.path.to_string_lossy().to_string(),
7210 ref_name: worktree
7211 .ref_name
7212 .as_ref()
7213 .map(|s| s.to_string())
7214 .unwrap_or_default(),
7215 sha: worktree.sha.to_string(),
7216 is_main: worktree.is_main,
7217 }
7218}
7219
7220fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
7221 git::repository::Worktree {
7222 path: PathBuf::from(proto.path.clone()),
7223 ref_name: Some(SharedString::from(&proto.ref_name)),
7224 sha: proto.sha.clone().into(),
7225 is_main: proto.is_main,
7226 }
7227}
7228
7229fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
7230 git::repository::Branch {
7231 is_head: proto.is_head,
7232 ref_name: proto.ref_name.clone().into(),
7233 upstream: proto
7234 .upstream
7235 .as_ref()
7236 .map(|upstream| git::repository::Upstream {
7237 ref_name: upstream.ref_name.to_string().into(),
7238 tracking: upstream
7239 .tracking
7240 .as_ref()
7241 .map(|tracking| {
7242 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
7243 ahead: tracking.ahead as u32,
7244 behind: tracking.behind as u32,
7245 })
7246 })
7247 .unwrap_or(git::repository::UpstreamTracking::Gone),
7248 }),
7249 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
7250 git::repository::CommitSummary {
7251 sha: commit.sha.to_string().into(),
7252 subject: commit.subject.to_string().into(),
7253 commit_timestamp: commit.commit_timestamp,
7254 author_name: commit.author_name.to_string().into(),
7255 has_parent: true,
7256 }
7257 }),
7258 }
7259}
7260
7261fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
7262 proto::GitCommitDetails {
7263 sha: commit.sha.to_string(),
7264 message: commit.message.to_string(),
7265 commit_timestamp: commit.commit_timestamp,
7266 author_email: commit.author_email.to_string(),
7267 author_name: commit.author_name.to_string(),
7268 }
7269}
7270
7271fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
7272 CommitDetails {
7273 sha: proto.sha.clone().into(),
7274 message: proto.message.clone().into(),
7275 commit_timestamp: proto.commit_timestamp,
7276 author_email: proto.author_email.clone().into(),
7277 author_name: proto.author_name.clone().into(),
7278 }
7279}
7280
7281/// This snapshot computes the repository state on the foreground thread while
7282/// running the git commands on the background thread. We update branch, head,
7283/// remotes, and worktrees first so the UI can react sooner, then compute file
7284/// state and emit those events immediately after.
7285async fn compute_snapshot(
7286 this: Entity<Repository>,
7287 backend: Arc<dyn GitRepository>,
7288 cx: &mut AsyncApp,
7289) -> Result<RepositorySnapshot> {
7290 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
7291 this.paths_needing_status_update.clear();
7292 (
7293 this.id,
7294 this.work_directory_abs_path.clone(),
7295 this.snapshot.clone(),
7296 )
7297 });
7298
7299 let head_commit_future = {
7300 let backend = backend.clone();
7301 async move {
7302 Ok(match backend.head_sha().await {
7303 Some(head_sha) => backend.show(head_sha).await.log_err(),
7304 None => None,
7305 })
7306 }
7307 };
7308 let (branches, head_commit, all_worktrees) = cx
7309 .background_spawn({
7310 let backend = backend.clone();
7311 async move {
7312 futures::future::try_join3(
7313 backend.branches(),
7314 head_commit_future,
7315 backend.worktrees(),
7316 )
7317 .await
7318 }
7319 })
7320 .await?;
7321 let branch = branches.into_iter().find(|branch| branch.is_head);
7322
7323 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
7324 .into_iter()
7325 .filter(|wt| wt.path != *work_directory_abs_path)
7326 .collect();
7327
7328 let (remote_origin_url, remote_upstream_url) = cx
7329 .background_spawn({
7330 let backend = backend.clone();
7331 async move {
7332 Ok::<_, anyhow::Error>(
7333 futures::future::join(
7334 backend.remote_url("origin"),
7335 backend.remote_url("upstream"),
7336 )
7337 .await,
7338 )
7339 }
7340 })
7341 .await?;
7342
7343 let snapshot = this.update(cx, |this, cx| {
7344 let branch_changed =
7345 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
7346 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
7347
7348 this.snapshot = RepositorySnapshot {
7349 id,
7350 work_directory_abs_path,
7351 branch,
7352 head_commit,
7353 remote_origin_url,
7354 remote_upstream_url,
7355 linked_worktrees,
7356 scan_id: prev_snapshot.scan_id + 1,
7357 ..prev_snapshot
7358 };
7359
7360 if branch_changed {
7361 cx.emit(RepositoryEvent::BranchChanged);
7362 }
7363
7364 if worktrees_changed {
7365 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7366 }
7367
7368 this.snapshot.clone()
7369 });
7370
7371 let (statuses, diff_stats, stash_entries) = cx
7372 .background_spawn({
7373 let backend = backend.clone();
7374 let snapshot = snapshot.clone();
7375 async move {
7376 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
7377 if snapshot.head_commit.is_some() {
7378 backend.diff_stat(&[])
7379 } else {
7380 future::ready(Ok(status::GitDiffStat {
7381 entries: Arc::default(),
7382 }))
7383 .boxed()
7384 };
7385 futures::future::try_join3(
7386 backend.status(&[RepoPath::from_rel_path(
7387 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
7388 )]),
7389 diff_stat_future,
7390 backend.stash_entries(),
7391 )
7392 .await
7393 }
7394 })
7395 .await?;
7396
7397 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
7398 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
7399 let mut conflicted_paths = Vec::new();
7400 let statuses_by_path = SumTree::from_iter(
7401 statuses.entries.iter().map(|(repo_path, status)| {
7402 if status.is_conflicted() {
7403 conflicted_paths.push(repo_path.clone());
7404 }
7405 StatusEntry {
7406 repo_path: repo_path.clone(),
7407 status: *status,
7408 diff_stat: diff_stat_map.get(repo_path).copied(),
7409 }
7410 }),
7411 (),
7412 );
7413
7414 let merge_details = cx
7415 .background_spawn({
7416 let backend = backend.clone();
7417 let mut merge_details = snapshot.merge.clone();
7418 async move {
7419 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
7420 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
7421 }
7422 })
7423 .await?;
7424 let (merge_details, conflicts_changed) = merge_details;
7425 log::debug!("new merge details: {merge_details:?}");
7426
7427 Ok(this.update(cx, |this, cx| {
7428 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
7429 cx.emit(RepositoryEvent::StatusesChanged);
7430 }
7431 if stash_entries != this.snapshot.stash_entries {
7432 cx.emit(RepositoryEvent::StashEntriesChanged);
7433 }
7434
7435 this.snapshot.scan_id += 1;
7436 this.snapshot.merge = merge_details;
7437 this.snapshot.statuses_by_path = statuses_by_path;
7438 this.snapshot.stash_entries = stash_entries;
7439
7440 this.snapshot.clone()
7441 }))
7442}
7443
7444fn status_from_proto(
7445 simple_status: i32,
7446 status: Option<proto::GitFileStatus>,
7447) -> anyhow::Result<FileStatus> {
7448 use proto::git_file_status::Variant;
7449
7450 let Some(variant) = status.and_then(|status| status.variant) else {
7451 let code = proto::GitStatus::from_i32(simple_status)
7452 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
7453 let result = match code {
7454 proto::GitStatus::Added => TrackedStatus {
7455 worktree_status: StatusCode::Added,
7456 index_status: StatusCode::Unmodified,
7457 }
7458 .into(),
7459 proto::GitStatus::Modified => TrackedStatus {
7460 worktree_status: StatusCode::Modified,
7461 index_status: StatusCode::Unmodified,
7462 }
7463 .into(),
7464 proto::GitStatus::Conflict => UnmergedStatus {
7465 first_head: UnmergedStatusCode::Updated,
7466 second_head: UnmergedStatusCode::Updated,
7467 }
7468 .into(),
7469 proto::GitStatus::Deleted => TrackedStatus {
7470 worktree_status: StatusCode::Deleted,
7471 index_status: StatusCode::Unmodified,
7472 }
7473 .into(),
7474 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
7475 };
7476 return Ok(result);
7477 };
7478
7479 let result = match variant {
7480 Variant::Untracked(_) => FileStatus::Untracked,
7481 Variant::Ignored(_) => FileStatus::Ignored,
7482 Variant::Unmerged(unmerged) => {
7483 let [first_head, second_head] =
7484 [unmerged.first_head, unmerged.second_head].map(|head| {
7485 let code = proto::GitStatus::from_i32(head)
7486 .with_context(|| format!("Invalid git status code: {head}"))?;
7487 let result = match code {
7488 proto::GitStatus::Added => UnmergedStatusCode::Added,
7489 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
7490 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
7491 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
7492 };
7493 Ok(result)
7494 });
7495 let [first_head, second_head] = [first_head?, second_head?];
7496 UnmergedStatus {
7497 first_head,
7498 second_head,
7499 }
7500 .into()
7501 }
7502 Variant::Tracked(tracked) => {
7503 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
7504 .map(|status| {
7505 let code = proto::GitStatus::from_i32(status)
7506 .with_context(|| format!("Invalid git status code: {status}"))?;
7507 let result = match code {
7508 proto::GitStatus::Modified => StatusCode::Modified,
7509 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
7510 proto::GitStatus::Added => StatusCode::Added,
7511 proto::GitStatus::Deleted => StatusCode::Deleted,
7512 proto::GitStatus::Renamed => StatusCode::Renamed,
7513 proto::GitStatus::Copied => StatusCode::Copied,
7514 proto::GitStatus::Unmodified => StatusCode::Unmodified,
7515 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
7516 };
7517 Ok(result)
7518 });
7519 let [index_status, worktree_status] = [index_status?, worktree_status?];
7520 TrackedStatus {
7521 index_status,
7522 worktree_status,
7523 }
7524 .into()
7525 }
7526 };
7527 Ok(result)
7528}
7529
7530fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7531 use proto::git_file_status::{Tracked, Unmerged, Variant};
7532
7533 let variant = match status {
7534 FileStatus::Untracked => Variant::Untracked(Default::default()),
7535 FileStatus::Ignored => Variant::Ignored(Default::default()),
7536 FileStatus::Unmerged(UnmergedStatus {
7537 first_head,
7538 second_head,
7539 }) => Variant::Unmerged(Unmerged {
7540 first_head: unmerged_status_to_proto(first_head),
7541 second_head: unmerged_status_to_proto(second_head),
7542 }),
7543 FileStatus::Tracked(TrackedStatus {
7544 index_status,
7545 worktree_status,
7546 }) => Variant::Tracked(Tracked {
7547 index_status: tracked_status_to_proto(index_status),
7548 worktree_status: tracked_status_to_proto(worktree_status),
7549 }),
7550 };
7551 proto::GitFileStatus {
7552 variant: Some(variant),
7553 }
7554}
7555
7556fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7557 match code {
7558 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7559 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7560 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7561 }
7562}
7563
7564fn tracked_status_to_proto(code: StatusCode) -> i32 {
7565 match code {
7566 StatusCode::Added => proto::GitStatus::Added as _,
7567 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7568 StatusCode::Modified => proto::GitStatus::Modified as _,
7569 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7570 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7571 StatusCode::Copied => proto::GitStatus::Copied as _,
7572 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7573 }
7574}