1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 project_settings::ProjectSettings,
10 trusted_worktrees::{
11 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
12 },
13 worktree_store::{WorktreeStore, WorktreeStoreEvent},
14};
15use anyhow::{Context as _, Result, anyhow, bail};
16use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
17use buffer_diff::{BufferDiff, BufferDiffEvent};
18use client::ProjectId;
19use collections::HashMap;
20pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
21use fs::{Fs, RemoveOptions};
22use futures::{
23 FutureExt, StreamExt,
24 channel::{
25 mpsc,
26 oneshot::{self, Canceled},
27 },
28 future::{self, BoxFuture, Shared},
29 stream::{FuturesOrdered, FuturesUnordered},
30};
31use git::{
32 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
33 blame::Blame,
34 parse_git_remote_url,
35 repository::{
36 Branch, CommitData, CommitDetails, CommitDiff, CommitFile, CommitOptions,
37 CreateWorktreeTarget, DiffType, FetchOptions, GitCommitTemplate, GitRepository,
38 GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote,
39 RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus,
40 Worktree as GitWorktree,
41 },
42 stash::{GitStash, StashEntry},
43 status::{
44 self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff,
45 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
46 },
47};
48use gpui::{
49 App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, SharedString,
50 Subscription, Task, WeakEntity,
51};
52use language::{
53 Buffer, BufferEvent, Language, LanguageRegistry,
54 proto::{deserialize_version, serialize_version},
55};
56use parking_lot::Mutex;
57use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
58use postage::stream::Stream as _;
59use rpc::{
60 AnyProtoClient, TypedEnvelope,
61 proto::{self, git_reset, split_repository_update},
62};
63use serde::Deserialize;
64use settings::{Settings, WorktreeId};
65use smallvec::SmallVec;
66use smol::future::yield_now;
67use std::{
68 cmp::Ordering,
69 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
70 future::Future,
71 mem,
72 ops::Range,
73 path::{Path, PathBuf},
74 str::FromStr,
75 sync::{
76 Arc,
77 atomic::{self, AtomicU64},
78 },
79 time::{Duration, Instant},
80};
81use sum_tree::{Edit, SumTree, TreeMap};
82use task::Shell;
83use text::{Bias, BufferId};
84use util::{
85 ResultExt, debug_panic,
86 paths::{PathStyle, SanitizedPath},
87 post_inc,
88 rel_path::RelPath,
89};
90use worktree::{
91 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
92 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
93};
94use zeroize::Zeroize;
95
96pub struct GitStore {
97 state: GitStoreState,
98 buffer_store: Entity<BufferStore>,
99 worktree_store: Entity<WorktreeStore>,
100 repositories: HashMap<RepositoryId, Entity<Repository>>,
101 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
102 active_repo_id: Option<RepositoryId>,
103 #[allow(clippy::type_complexity)]
104 loading_diffs:
105 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
106 diffs: HashMap<BufferId, Entity<BufferGitState>>,
107 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
108 _subscriptions: Vec<Subscription>,
109}
110
111#[derive(Default)]
112struct SharedDiffs {
113 unstaged: Option<Entity<BufferDiff>>,
114 uncommitted: Option<Entity<BufferDiff>>,
115}
116
117struct BufferGitState {
118 unstaged_diff: Option<WeakEntity<BufferDiff>>,
119 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
120 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
121 conflict_set: Option<WeakEntity<ConflictSet>>,
122 recalculate_diff_task: Option<Task<Result<()>>>,
123 reparse_conflict_markers_task: Option<Task<Result<()>>>,
124 language: Option<Arc<Language>>,
125 language_registry: Option<Arc<LanguageRegistry>>,
126 conflict_updated_futures: Vec<oneshot::Sender<()>>,
127 recalculating_tx: postage::watch::Sender<bool>,
128
129 /// These operation counts are used to ensure that head and index text
130 /// values read from the git repository are up-to-date with any hunk staging
131 /// operations that have been performed on the BufferDiff.
132 ///
133 /// The operation count is incremented immediately when the user initiates a
134 /// hunk stage/unstage operation. Then, upon finishing writing the new index
135 /// text do disk, the `operation count as of write` is updated to reflect
136 /// the operation count that prompted the write.
137 hunk_staging_operation_count: usize,
138 hunk_staging_operation_count_as_of_write: usize,
139
140 head_text: Option<Arc<str>>,
141 index_text: Option<Arc<str>>,
142 oid_texts: HashMap<git::Oid, Arc<str>>,
143 head_changed: bool,
144 index_changed: bool,
145 language_changed: bool,
146}
147
148#[derive(Clone, Debug)]
149enum DiffBasesChange {
150 SetIndex(Option<String>),
151 SetHead(Option<String>),
152 SetEach {
153 index: Option<String>,
154 head: Option<String>,
155 },
156 SetBoth(Option<String>),
157}
158
159#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
160enum DiffKind {
161 Unstaged,
162 Uncommitted,
163 SinceOid(Option<git::Oid>),
164}
165
166enum GitStoreState {
167 Local {
168 next_repository_id: Arc<AtomicU64>,
169 downstream: Option<LocalDownstreamState>,
170 project_environment: Entity<ProjectEnvironment>,
171 fs: Arc<dyn Fs>,
172 },
173 Remote {
174 upstream_client: AnyProtoClient,
175 upstream_project_id: u64,
176 downstream: Option<(AnyProtoClient, ProjectId)>,
177 },
178}
179
180enum DownstreamUpdate {
181 UpdateRepository(RepositorySnapshot),
182 RemoveRepository(RepositoryId),
183}
184
185struct LocalDownstreamState {
186 client: AnyProtoClient,
187 project_id: ProjectId,
188 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
189 _task: Task<Result<()>>,
190}
191
192#[derive(Clone, Debug)]
193pub struct GitStoreCheckpoint {
194 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
195}
196
197#[derive(Clone, Debug, PartialEq, Eq)]
198pub struct StatusEntry {
199 pub repo_path: RepoPath,
200 pub status: FileStatus,
201 pub diff_stat: Option<DiffStat>,
202}
203
204impl StatusEntry {
205 fn to_proto(&self) -> proto::StatusEntry {
206 let simple_status = match self.status {
207 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
208 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
209 FileStatus::Tracked(TrackedStatus {
210 index_status,
211 worktree_status,
212 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
213 worktree_status
214 } else {
215 index_status
216 }),
217 };
218
219 proto::StatusEntry {
220 repo_path: self.repo_path.to_proto(),
221 simple_status,
222 status: Some(status_to_proto(self.status)),
223 diff_stat_added: self.diff_stat.map(|ds| ds.added),
224 diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted),
225 }
226 }
227}
228
229impl TryFrom<proto::StatusEntry> for StatusEntry {
230 type Error = anyhow::Error;
231
232 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
233 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
234 let status = status_from_proto(value.simple_status, value.status)?;
235 let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) {
236 (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }),
237 _ => None,
238 };
239 Ok(Self {
240 repo_path,
241 status,
242 diff_stat,
243 })
244 }
245}
246
247impl sum_tree::Item for StatusEntry {
248 type Summary = PathSummary<GitSummary>;
249
250 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
251 PathSummary {
252 max_path: self.repo_path.as_ref().clone(),
253 item_summary: self.status.summary(),
254 }
255 }
256}
257
258impl sum_tree::KeyedItem for StatusEntry {
259 type Key = PathKey;
260
261 fn key(&self) -> Self::Key {
262 PathKey(self.repo_path.as_ref().clone())
263 }
264}
265
266#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
267pub struct RepositoryId(pub u64);
268
269#[derive(Clone, Debug, Default, PartialEq, Eq)]
270pub struct MergeDetails {
271 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
272 pub message: Option<SharedString>,
273}
274
275#[derive(Clone)]
276pub enum CommitDataState {
277 Loading(Option<Shared<oneshot::Receiver<Arc<CommitData>>>>),
278 Loaded(Arc<CommitData>),
279}
280
281#[derive(Clone, Debug, PartialEq, Eq)]
282pub struct RepositorySnapshot {
283 pub id: RepositoryId,
284 pub statuses_by_path: SumTree<StatusEntry>,
285 pub work_directory_abs_path: Arc<Path>,
286 /// The working directory of the original repository. For a normal
287 /// checkout this equals `work_directory_abs_path`. For a git worktree
288 /// checkout, this is the original repo's working directory — used to
289 /// anchor new worktree creation so they don't nest.
290 pub original_repo_abs_path: Arc<Path>,
291 pub path_style: PathStyle,
292 pub branch: Option<Branch>,
293 pub branch_list: Arc<[Branch]>,
294 pub head_commit: Option<CommitDetails>,
295 pub scan_id: u64,
296 pub merge: MergeDetails,
297 pub remote_origin_url: Option<String>,
298 pub remote_upstream_url: Option<String>,
299 pub stash_entries: GitStash,
300 pub linked_worktrees: Arc<[GitWorktree]>,
301}
302
303type JobId = u64;
304
305#[derive(Clone, Debug, PartialEq, Eq)]
306pub struct JobInfo {
307 pub start: Instant,
308 pub message: SharedString,
309}
310
311struct CommitDataHandler {
312 _task: Task<()>,
313 commit_data_request: smol::channel::Sender<Oid>,
314 completion_senders: HashMap<Oid, oneshot::Sender<Arc<CommitData>>>,
315 pending_requests: HashSet<Oid>,
316}
317
318/// Represents the handler of a git cat-file --batch process within Zed
319/// It's used to lazily fetch commit data as needed (whatever a user is viewing)
320enum CommitDataHandlerState {
321 /// The handler is open and processing requests
322 Open(CommitDataHandler),
323 /// The handler closed because it didn't receive any requests in the last 10s
324 /// or hasn't been open before
325 Closed,
326}
327
328enum NextCommitDataRequest {
329 Request(BoxFuture<'static, Result<proto::GetCommitDataResponse>>),
330 Idle,
331 Closed,
332}
333
334pub struct InitialGitGraphData {
335 fetch_task: Task<()>,
336 pub error: Option<SharedString>,
337 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
338 pub commit_oid_to_index: HashMap<Oid, usize>,
339}
340
341pub struct GraphDataResponse<'a> {
342 pub commits: &'a [Arc<InitialGraphCommitData>],
343 pub is_loading: bool,
344 pub error: Option<SharedString>,
345}
346
347pub struct Repository {
348 this: WeakEntity<Self>,
349 snapshot: RepositorySnapshot,
350 commit_message_buffer: Option<Entity<Buffer>>,
351 git_store: WeakEntity<GitStore>,
352 // For a local repository, holds paths that have had worktree events since the last status scan completed,
353 // and that should be examined during the next status scan.
354 paths_needing_status_update: Vec<Vec<RepoPath>>,
355 job_sender: mpsc::UnboundedSender<GitJob>,
356 active_jobs: HashMap<JobId, JobInfo>,
357 pending_ops: SumTree<PendingOps>,
358 job_id: JobId,
359 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
360 latest_askpass_id: u64,
361 repository_state: Shared<Task<Result<RepositoryState, String>>>,
362 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
363 commit_data_handler: CommitDataHandlerState,
364 commit_data: HashMap<Oid, CommitDataState>,
365}
366
367impl std::ops::Deref for Repository {
368 type Target = RepositorySnapshot;
369
370 fn deref(&self) -> &Self::Target {
371 &self.snapshot
372 }
373}
374
375#[derive(Clone)]
376pub struct LocalRepositoryState {
377 pub fs: Arc<dyn Fs>,
378 pub backend: Arc<dyn GitRepository>,
379 pub environment: Arc<HashMap<String, String>>,
380}
381
382impl LocalRepositoryState {
383 async fn new(
384 work_directory_abs_path: Arc<Path>,
385 dot_git_abs_path: Arc<Path>,
386 project_environment: WeakEntity<ProjectEnvironment>,
387 fs: Arc<dyn Fs>,
388 is_trusted: bool,
389 cx: &mut AsyncApp,
390 ) -> anyhow::Result<Self> {
391 let environment = project_environment
392 .update(cx, |project_environment, cx| {
393 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
394 })?
395 .await
396 .unwrap_or_else(|| {
397 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
398 HashMap::default()
399 });
400 let search_paths = environment.get("PATH").map(|val| val.to_owned());
401 let backend = cx
402 .background_spawn({
403 let fs = fs.clone();
404 async move {
405 let system_git_binary_path = search_paths
406 .and_then(|search_paths| {
407 which::which_in("git", Some(search_paths), &work_directory_abs_path)
408 .ok()
409 })
410 .or_else(|| which::which("git").ok());
411 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
412 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
413 }
414 })
415 .await?;
416 backend.set_trusted(is_trusted);
417 Ok(LocalRepositoryState {
418 backend,
419 environment: Arc::new(environment),
420 fs,
421 })
422 }
423}
424
425#[derive(Clone)]
426pub struct RemoteRepositoryState {
427 pub project_id: ProjectId,
428 pub client: AnyProtoClient,
429}
430
431#[derive(Clone)]
432pub enum RepositoryState {
433 Local(LocalRepositoryState),
434 Remote(RemoteRepositoryState),
435}
436
437#[derive(Clone, Debug, PartialEq, Eq)]
438pub enum GitGraphEvent {
439 CountUpdated(usize),
440 FullyLoaded,
441 LoadingError,
442}
443
444#[derive(Clone, Debug, PartialEq, Eq)]
445pub enum RepositoryEvent {
446 StatusesChanged,
447 HeadChanged,
448 BranchListChanged,
449 StashEntriesChanged,
450 GitWorktreeListChanged,
451 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
452 GraphEvent((LogSource, LogOrder), GitGraphEvent),
453}
454
455#[derive(Clone, Debug)]
456pub struct JobsUpdated;
457
458#[derive(Debug)]
459pub enum GitStoreEvent {
460 ActiveRepositoryChanged(Option<RepositoryId>),
461 /// Bool is true when the repository that's updated is the active repository
462 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
463 RepositoryAdded,
464 RepositoryRemoved(RepositoryId),
465 IndexWriteError(anyhow::Error),
466 JobsUpdated,
467 ConflictsUpdated,
468}
469
470impl EventEmitter<RepositoryEvent> for Repository {}
471impl EventEmitter<JobsUpdated> for Repository {}
472impl EventEmitter<GitStoreEvent> for GitStore {}
473
474pub struct GitJob {
475 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
476 key: Option<GitJobKey>,
477}
478
479#[derive(PartialEq, Eq)]
480enum GitJobKey {
481 WriteIndex(Vec<RepoPath>),
482 ReloadBufferDiffBases,
483 RefreshStatuses,
484 ReloadGitState,
485}
486
487impl GitStore {
488 pub fn local(
489 worktree_store: &Entity<WorktreeStore>,
490 buffer_store: Entity<BufferStore>,
491 environment: Entity<ProjectEnvironment>,
492 fs: Arc<dyn Fs>,
493 cx: &mut Context<Self>,
494 ) -> Self {
495 Self::new(
496 worktree_store.clone(),
497 buffer_store,
498 GitStoreState::Local {
499 next_repository_id: Arc::new(AtomicU64::new(1)),
500 downstream: None,
501 project_environment: environment,
502 fs,
503 },
504 cx,
505 )
506 }
507
508 pub fn remote(
509 worktree_store: &Entity<WorktreeStore>,
510 buffer_store: Entity<BufferStore>,
511 upstream_client: AnyProtoClient,
512 project_id: u64,
513 cx: &mut Context<Self>,
514 ) -> Self {
515 Self::new(
516 worktree_store.clone(),
517 buffer_store,
518 GitStoreState::Remote {
519 upstream_client,
520 upstream_project_id: project_id,
521 downstream: None,
522 },
523 cx,
524 )
525 }
526
527 fn new(
528 worktree_store: Entity<WorktreeStore>,
529 buffer_store: Entity<BufferStore>,
530 state: GitStoreState,
531 cx: &mut Context<Self>,
532 ) -> Self {
533 let mut _subscriptions = vec![
534 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
535 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
536 ];
537
538 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
539 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
540 }
541
542 GitStore {
543 state,
544 buffer_store,
545 worktree_store,
546 repositories: HashMap::default(),
547 worktree_ids: HashMap::default(),
548 active_repo_id: None,
549 _subscriptions,
550 loading_diffs: HashMap::default(),
551 shared_diffs: HashMap::default(),
552 diffs: HashMap::default(),
553 }
554 }
555
556 pub fn init(client: &AnyProtoClient) {
557 client.add_entity_request_handler(Self::handle_get_remotes);
558 client.add_entity_request_handler(Self::handle_get_branches);
559 client.add_entity_request_handler(Self::handle_get_default_branch);
560 client.add_entity_request_handler(Self::handle_change_branch);
561 client.add_entity_request_handler(Self::handle_create_branch);
562 client.add_entity_request_handler(Self::handle_rename_branch);
563 client.add_entity_request_handler(Self::handle_create_remote);
564 client.add_entity_request_handler(Self::handle_remove_remote);
565 client.add_entity_request_handler(Self::handle_delete_branch);
566 client.add_entity_request_handler(Self::handle_git_init);
567 client.add_entity_request_handler(Self::handle_push);
568 client.add_entity_request_handler(Self::handle_pull);
569 client.add_entity_request_handler(Self::handle_fetch);
570 client.add_entity_request_handler(Self::handle_stage);
571 client.add_entity_request_handler(Self::handle_unstage);
572 client.add_entity_request_handler(Self::handle_stash);
573 client.add_entity_request_handler(Self::handle_stash_pop);
574 client.add_entity_request_handler(Self::handle_stash_apply);
575 client.add_entity_request_handler(Self::handle_stash_drop);
576 client.add_entity_request_handler(Self::handle_commit);
577 client.add_entity_request_handler(Self::handle_run_hook);
578 client.add_entity_request_handler(Self::handle_reset);
579 client.add_entity_request_handler(Self::handle_show);
580 client.add_entity_request_handler(Self::handle_create_checkpoint);
581 client.add_entity_request_handler(Self::handle_create_archive_checkpoint);
582 client.add_entity_request_handler(Self::handle_restore_checkpoint);
583 client.add_entity_request_handler(Self::handle_restore_archive_checkpoint);
584 client.add_entity_request_handler(Self::handle_compare_checkpoints);
585 client.add_entity_request_handler(Self::handle_diff_checkpoints);
586 client.add_entity_request_handler(Self::handle_load_commit_diff);
587 client.add_entity_request_handler(Self::handle_file_history);
588 client.add_entity_request_handler(Self::handle_checkout_files);
589 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
590 client.add_entity_request_handler(Self::handle_set_index_text);
591 client.add_entity_request_handler(Self::handle_askpass);
592 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
593 client.add_entity_request_handler(Self::handle_git_diff);
594 client.add_entity_request_handler(Self::handle_tree_diff);
595 client.add_entity_request_handler(Self::handle_get_blob_content);
596 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
597 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
598 client.add_entity_message_handler(Self::handle_update_diff_bases);
599 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
600 client.add_entity_request_handler(Self::handle_blame_buffer);
601 client.add_entity_message_handler(Self::handle_update_repository);
602 client.add_entity_message_handler(Self::handle_remove_repository);
603 client.add_entity_request_handler(Self::handle_git_clone);
604 client.add_entity_request_handler(Self::handle_get_worktrees);
605 client.add_entity_request_handler(Self::handle_create_worktree);
606 client.add_entity_request_handler(Self::handle_remove_worktree);
607 client.add_entity_request_handler(Self::handle_rename_worktree);
608 client.add_entity_request_handler(Self::handle_get_head_sha);
609 client.add_entity_request_handler(Self::handle_edit_ref);
610 client.add_entity_request_handler(Self::handle_repair_worktrees);
611 client.add_entity_request_handler(Self::handle_get_commit_data);
612 }
613
614 pub fn is_local(&self) -> bool {
615 matches!(self.state, GitStoreState::Local { .. })
616 }
617
618 fn set_active_repo_id(&mut self, repo_id: RepositoryId, cx: &mut Context<Self>) {
619 if self.active_repo_id != Some(repo_id) {
620 self.active_repo_id = Some(repo_id);
621 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
622 }
623 }
624
625 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
626 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
627 self.set_active_repo_id(repo.read(cx).id, cx);
628 }
629 }
630
631 pub fn set_active_repo_for_worktree(
632 &mut self,
633 worktree_id: WorktreeId,
634 cx: &mut Context<Self>,
635 ) {
636 let Some(worktree) = self
637 .worktree_store
638 .read(cx)
639 .worktree_for_id(worktree_id, cx)
640 else {
641 return;
642 };
643 let worktree_abs_path = worktree.read(cx).abs_path();
644 let Some(repo_id) = self
645 .repositories
646 .values()
647 .filter(|repo| {
648 let repo_path = &repo.read(cx).work_directory_abs_path;
649 *repo_path == worktree_abs_path || worktree_abs_path.starts_with(repo_path.as_ref())
650 })
651 .max_by_key(|repo| repo.read(cx).work_directory_abs_path.as_os_str().len())
652 .map(|repo| repo.read(cx).id)
653 else {
654 return;
655 };
656
657 self.set_active_repo_id(repo_id, cx);
658 }
659
660 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
661 match &mut self.state {
662 GitStoreState::Remote {
663 downstream: downstream_client,
664 ..
665 } => {
666 for repo in self.repositories.values() {
667 let update = repo.read(cx).snapshot.initial_update(project_id);
668 for update in split_repository_update(update) {
669 client.send(update).log_err();
670 }
671 }
672 *downstream_client = Some((client, ProjectId(project_id)));
673 }
674 GitStoreState::Local {
675 downstream: downstream_client,
676 ..
677 } => {
678 let mut snapshots = HashMap::default();
679 let (updates_tx, mut updates_rx) = mpsc::unbounded();
680 for repo in self.repositories.values() {
681 updates_tx
682 .unbounded_send(DownstreamUpdate::UpdateRepository(
683 repo.read(cx).snapshot.clone(),
684 ))
685 .ok();
686 }
687 *downstream_client = Some(LocalDownstreamState {
688 client: client.clone(),
689 project_id: ProjectId(project_id),
690 updates_tx,
691 _task: cx.spawn(async move |this, cx| {
692 cx.background_spawn(async move {
693 while let Some(update) = updates_rx.next().await {
694 match update {
695 DownstreamUpdate::UpdateRepository(snapshot) => {
696 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
697 {
698 let update =
699 snapshot.build_update(old_snapshot, project_id);
700 *old_snapshot = snapshot;
701 for update in split_repository_update(update) {
702 client.send(update)?;
703 }
704 } else {
705 let update = snapshot.initial_update(project_id);
706 for update in split_repository_update(update) {
707 client.send(update)?;
708 }
709 snapshots.insert(snapshot.id, snapshot);
710 }
711 }
712 DownstreamUpdate::RemoveRepository(id) => {
713 client.send(proto::RemoveRepository {
714 project_id,
715 id: id.to_proto(),
716 })?;
717 }
718 }
719 }
720 anyhow::Ok(())
721 })
722 .await
723 .ok();
724 this.update(cx, |this, _| {
725 if let GitStoreState::Local {
726 downstream: downstream_client,
727 ..
728 } = &mut this.state
729 {
730 downstream_client.take();
731 } else {
732 unreachable!("unshared called on remote store");
733 }
734 })
735 }),
736 });
737 }
738 }
739 }
740
741 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
742 match &mut self.state {
743 GitStoreState::Local {
744 downstream: downstream_client,
745 ..
746 } => {
747 downstream_client.take();
748 }
749 GitStoreState::Remote {
750 downstream: downstream_client,
751 ..
752 } => {
753 downstream_client.take();
754 }
755 }
756 self.shared_diffs.clear();
757 }
758
759 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
760 self.shared_diffs.remove(peer_id);
761 }
762
763 pub fn active_repository(&self) -> Option<Entity<Repository>> {
764 self.active_repo_id
765 .as_ref()
766 .map(|id| self.repositories[id].clone())
767 }
768
769 pub fn open_unstaged_diff(
770 &mut self,
771 buffer: Entity<Buffer>,
772 cx: &mut Context<Self>,
773 ) -> Task<Result<Entity<BufferDiff>>> {
774 let buffer_id = buffer.read(cx).remote_id();
775 if let Some(diff_state) = self.diffs.get(&buffer_id)
776 && let Some(unstaged_diff) = diff_state
777 .read(cx)
778 .unstaged_diff
779 .as_ref()
780 .and_then(|weak| weak.upgrade())
781 {
782 if let Some(task) =
783 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
784 {
785 return cx.background_executor().spawn(async move {
786 task.await;
787 Ok(unstaged_diff)
788 });
789 }
790 return Task::ready(Ok(unstaged_diff));
791 }
792
793 let Some((repo, repo_path)) =
794 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
795 else {
796 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
797 };
798
799 let task = self
800 .loading_diffs
801 .entry((buffer_id, DiffKind::Unstaged))
802 .or_insert_with(|| {
803 let staged_text = repo.update(cx, |repo, cx| {
804 repo.load_staged_text(buffer_id, repo_path, cx)
805 });
806 cx.spawn(async move |this, cx| {
807 Self::open_diff_internal(
808 this,
809 DiffKind::Unstaged,
810 staged_text.await.map(DiffBasesChange::SetIndex),
811 buffer,
812 cx,
813 )
814 .await
815 .map_err(Arc::new)
816 })
817 .shared()
818 })
819 .clone();
820
821 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
822 }
823
824 pub fn open_diff_since(
825 &mut self,
826 oid: Option<git::Oid>,
827 buffer: Entity<Buffer>,
828 repo: Entity<Repository>,
829 cx: &mut Context<Self>,
830 ) -> Task<Result<Entity<BufferDiff>>> {
831 let buffer_id = buffer.read(cx).remote_id();
832
833 if let Some(diff_state) = self.diffs.get(&buffer_id)
834 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
835 {
836 if let Some(task) =
837 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
838 {
839 return cx.background_executor().spawn(async move {
840 task.await;
841 Ok(oid_diff)
842 });
843 }
844 return Task::ready(Ok(oid_diff));
845 }
846
847 let diff_kind = DiffKind::SinceOid(oid);
848 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
849 let task = task.clone();
850 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
851 }
852
853 let task = cx
854 .spawn(async move |this, cx| {
855 let result: Result<Entity<BufferDiff>> = async {
856 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
857 let language_registry =
858 buffer.update(cx, |buffer, _| buffer.language_registry());
859 let content: Option<Arc<str>> = match oid {
860 None => None,
861 Some(oid) => Some(
862 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
863 .await?
864 .into(),
865 ),
866 };
867 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
868
869 buffer_diff
870 .update(cx, |buffer_diff, cx| {
871 buffer_diff.language_changed(
872 buffer_snapshot.language().cloned(),
873 language_registry,
874 cx,
875 );
876 buffer_diff.set_base_text(
877 content.clone(),
878 buffer_snapshot.language().cloned(),
879 buffer_snapshot.text,
880 cx,
881 )
882 })
883 .await?;
884 let unstaged_diff = this
885 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
886 .await?;
887 buffer_diff.update(cx, |buffer_diff, _| {
888 buffer_diff.set_secondary_diff(unstaged_diff);
889 });
890
891 this.update(cx, |this, cx| {
892 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
893 .detach();
894
895 this.loading_diffs.remove(&(buffer_id, diff_kind));
896
897 let git_store = cx.weak_entity();
898 let diff_state = this
899 .diffs
900 .entry(buffer_id)
901 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
902
903 diff_state.update(cx, |state, _| {
904 if let Some(oid) = oid {
905 if let Some(content) = content {
906 state.oid_texts.insert(oid, content);
907 }
908 }
909 state.oid_diffs.insert(oid, buffer_diff.downgrade());
910 });
911 })?;
912
913 Ok(buffer_diff)
914 }
915 .await;
916 result.map_err(Arc::new)
917 })
918 .shared();
919
920 self.loading_diffs
921 .insert((buffer_id, diff_kind), task.clone());
922 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
923 }
924
925 #[ztracing::instrument(skip_all)]
926 pub fn open_uncommitted_diff(
927 &mut self,
928 buffer: Entity<Buffer>,
929 cx: &mut Context<Self>,
930 ) -> Task<Result<Entity<BufferDiff>>> {
931 let buffer_id = buffer.read(cx).remote_id();
932
933 if let Some(diff_state) = self.diffs.get(&buffer_id)
934 && let Some(uncommitted_diff) = diff_state
935 .read(cx)
936 .uncommitted_diff
937 .as_ref()
938 .and_then(|weak| weak.upgrade())
939 {
940 if let Some(task) =
941 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
942 {
943 return cx.background_executor().spawn(async move {
944 task.await;
945 Ok(uncommitted_diff)
946 });
947 }
948 return Task::ready(Ok(uncommitted_diff));
949 }
950
951 let Some((repo, repo_path)) =
952 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
953 else {
954 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
955 };
956
957 let task = self
958 .loading_diffs
959 .entry((buffer_id, DiffKind::Uncommitted))
960 .or_insert_with(|| {
961 let changes = repo.update(cx, |repo, cx| {
962 repo.load_committed_text(buffer_id, repo_path, cx)
963 });
964
965 // todo(lw): hot foreground spawn
966 cx.spawn(async move |this, cx| {
967 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
968 .await
969 .map_err(Arc::new)
970 })
971 .shared()
972 })
973 .clone();
974
975 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
976 }
977
978 #[ztracing::instrument(skip_all)]
979 async fn open_diff_internal(
980 this: WeakEntity<Self>,
981 kind: DiffKind,
982 texts: Result<DiffBasesChange>,
983 buffer_entity: Entity<Buffer>,
984 cx: &mut AsyncApp,
985 ) -> Result<Entity<BufferDiff>> {
986 let diff_bases_change = match texts {
987 Err(e) => {
988 this.update(cx, |this, cx| {
989 let buffer = buffer_entity.read(cx);
990 let buffer_id = buffer.remote_id();
991 this.loading_diffs.remove(&(buffer_id, kind));
992 })?;
993 return Err(e);
994 }
995 Ok(change) => change,
996 };
997
998 this.update(cx, |this, cx| {
999 let buffer = buffer_entity.read(cx);
1000 let buffer_id = buffer.remote_id();
1001 let language = buffer.language().cloned();
1002 let language_registry = buffer.language_registry();
1003 let text_snapshot = buffer.text_snapshot();
1004 this.loading_diffs.remove(&(buffer_id, kind));
1005
1006 let git_store = cx.weak_entity();
1007 let diff_state = this
1008 .diffs
1009 .entry(buffer_id)
1010 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1011
1012 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1013
1014 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
1015 diff_state.update(cx, |diff_state, cx| {
1016 diff_state.language_changed = true;
1017 diff_state.language = language;
1018 diff_state.language_registry = language_registry;
1019
1020 match kind {
1021 DiffKind::Unstaged => {
1022 diff_state.unstaged_diff.get_or_insert(diff.downgrade());
1023 }
1024 DiffKind::Uncommitted => {
1025 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1026 diff
1027 } else {
1028 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1029 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1030 unstaged_diff
1031 };
1032
1033 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1034 diff_state.uncommitted_diff = Some(diff.downgrade())
1035 }
1036 DiffKind::SinceOid(_) => {
1037 unreachable!("open_diff_internal is not used for OID diffs")
1038 }
1039 }
1040
1041 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
1042 let rx = diff_state.wait_for_recalculation();
1043
1044 anyhow::Ok(async move {
1045 if let Some(rx) = rx {
1046 rx.await;
1047 }
1048 Ok(diff)
1049 })
1050 })
1051 })??
1052 .await
1053 }
1054
1055 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1056 let diff_state = self.diffs.get(&buffer_id)?;
1057 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1058 }
1059
1060 pub fn get_uncommitted_diff(
1061 &self,
1062 buffer_id: BufferId,
1063 cx: &App,
1064 ) -> Option<Entity<BufferDiff>> {
1065 let diff_state = self.diffs.get(&buffer_id)?;
1066 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1067 }
1068
1069 pub fn get_diff_since_oid(
1070 &self,
1071 buffer_id: BufferId,
1072 oid: Option<git::Oid>,
1073 cx: &App,
1074 ) -> Option<Entity<BufferDiff>> {
1075 let diff_state = self.diffs.get(&buffer_id)?;
1076 diff_state.read(cx).oid_diff(oid)
1077 }
1078
1079 pub fn open_conflict_set(
1080 &mut self,
1081 buffer: Entity<Buffer>,
1082 cx: &mut Context<Self>,
1083 ) -> Entity<ConflictSet> {
1084 log::debug!("open conflict set");
1085 let buffer_id = buffer.read(cx).remote_id();
1086
1087 if let Some(git_state) = self.diffs.get(&buffer_id)
1088 && let Some(conflict_set) = git_state
1089 .read(cx)
1090 .conflict_set
1091 .as_ref()
1092 .and_then(|weak| weak.upgrade())
1093 {
1094 let conflict_set = conflict_set;
1095 let buffer_snapshot = buffer.read(cx).text_snapshot();
1096
1097 git_state.update(cx, |state, cx| {
1098 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1099 });
1100
1101 return conflict_set;
1102 }
1103
1104 let is_unmerged = self
1105 .repository_and_path_for_buffer_id(buffer_id, cx)
1106 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1107 let git_store = cx.weak_entity();
1108 let buffer_git_state = self
1109 .diffs
1110 .entry(buffer_id)
1111 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1112 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1113
1114 self._subscriptions
1115 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1116 cx.emit(GitStoreEvent::ConflictsUpdated);
1117 }));
1118
1119 buffer_git_state.update(cx, |state, cx| {
1120 state.conflict_set = Some(conflict_set.downgrade());
1121 let buffer_snapshot = buffer.read(cx).text_snapshot();
1122 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1123 });
1124
1125 conflict_set
1126 }
1127
1128 pub fn project_path_git_status(
1129 &self,
1130 project_path: &ProjectPath,
1131 cx: &App,
1132 ) -> Option<FileStatus> {
1133 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1134 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1135 }
1136
1137 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1138 let mut work_directory_abs_paths = Vec::new();
1139 let mut checkpoints = Vec::new();
1140 for repository in self.repositories.values() {
1141 repository.update(cx, |repository, _| {
1142 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1143 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1144 });
1145 }
1146
1147 cx.background_executor().spawn(async move {
1148 let checkpoints = future::try_join_all(checkpoints).await?;
1149 Ok(GitStoreCheckpoint {
1150 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1151 .into_iter()
1152 .zip(checkpoints)
1153 .collect(),
1154 })
1155 })
1156 }
1157
1158 pub fn restore_checkpoint(
1159 &self,
1160 checkpoint: GitStoreCheckpoint,
1161 cx: &mut App,
1162 ) -> Task<Result<()>> {
1163 let repositories_by_work_dir_abs_path = self
1164 .repositories
1165 .values()
1166 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1167 .collect::<HashMap<_, _>>();
1168
1169 let mut tasks = Vec::new();
1170 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1171 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1172 let restore = repository.update(cx, |repository, _| {
1173 repository.restore_checkpoint(checkpoint)
1174 });
1175 tasks.push(async move { restore.await? });
1176 }
1177 }
1178 cx.background_spawn(async move {
1179 future::try_join_all(tasks).await?;
1180 Ok(())
1181 })
1182 }
1183
1184 /// Compares two checkpoints, returning true if they are equal.
1185 pub fn compare_checkpoints(
1186 &self,
1187 left: GitStoreCheckpoint,
1188 mut right: GitStoreCheckpoint,
1189 cx: &mut App,
1190 ) -> Task<Result<bool>> {
1191 let repositories_by_work_dir_abs_path = self
1192 .repositories
1193 .values()
1194 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1195 .collect::<HashMap<_, _>>();
1196
1197 let mut tasks = Vec::new();
1198 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1199 if let Some(right_checkpoint) = right
1200 .checkpoints_by_work_dir_abs_path
1201 .remove(&work_dir_abs_path)
1202 {
1203 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1204 {
1205 let compare = repository.update(cx, |repository, _| {
1206 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1207 });
1208
1209 tasks.push(async move { compare.await? });
1210 }
1211 } else {
1212 return Task::ready(Ok(false));
1213 }
1214 }
1215 cx.background_spawn(async move {
1216 Ok(future::try_join_all(tasks)
1217 .await?
1218 .into_iter()
1219 .all(|result| result))
1220 })
1221 }
1222
1223 /// Blames a buffer.
1224 pub fn blame_buffer(
1225 &self,
1226 buffer: &Entity<Buffer>,
1227 version: Option<clock::Global>,
1228 cx: &mut Context<Self>,
1229 ) -> Task<Result<Option<Blame>>> {
1230 let buffer = buffer.read(cx);
1231 let Some((repo, repo_path)) =
1232 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1233 else {
1234 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1235 };
1236 let content = match &version {
1237 Some(version) => buffer.rope_for_version(version),
1238 None => buffer.as_rope().clone(),
1239 };
1240 let line_ending = buffer.line_ending();
1241 let version = version.unwrap_or(buffer.version());
1242 let buffer_id = buffer.remote_id();
1243
1244 let repo = repo.downgrade();
1245 cx.spawn(async move |_, cx| {
1246 let repository_state = repo
1247 .update(cx, |repo, _| repo.repository_state.clone())?
1248 .await
1249 .map_err(|err| anyhow::anyhow!(err))?;
1250 match repository_state {
1251 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1252 .blame(repo_path.clone(), content, line_ending)
1253 .await
1254 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1255 .map(Some),
1256 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1257 let response = client
1258 .request(proto::BlameBuffer {
1259 project_id: project_id.to_proto(),
1260 buffer_id: buffer_id.into(),
1261 version: serialize_version(&version),
1262 })
1263 .await?;
1264 Ok(deserialize_blame_buffer_response(response))
1265 }
1266 }
1267 })
1268 }
1269
1270 pub fn file_history(
1271 &self,
1272 repo: &Entity<Repository>,
1273 path: RepoPath,
1274 cx: &mut App,
1275 ) -> Task<Result<git::repository::FileHistory>> {
1276 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1277
1278 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1279 }
1280
1281 pub fn file_history_paginated(
1282 &self,
1283 repo: &Entity<Repository>,
1284 path: RepoPath,
1285 skip: usize,
1286 limit: Option<usize>,
1287 cx: &mut App,
1288 ) -> Task<Result<git::repository::FileHistory>> {
1289 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1290
1291 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1292 }
1293
1294 pub fn get_permalink_to_line(
1295 &self,
1296 buffer: &Entity<Buffer>,
1297 selection: Range<u32>,
1298 cx: &mut App,
1299 ) -> Task<Result<url::Url>> {
1300 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1301 return Task::ready(Err(anyhow!("buffer has no file")));
1302 };
1303
1304 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1305 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1306 cx,
1307 ) else {
1308 // If we're not in a Git repo, check whether this is a Rust source
1309 // file in the Cargo registry (presumably opened with go-to-definition
1310 // from a normal Rust file). If so, we can put together a permalink
1311 // using crate metadata.
1312 if buffer
1313 .read(cx)
1314 .language()
1315 .is_none_or(|lang| lang.name() != "Rust")
1316 {
1317 return Task::ready(Err(anyhow!("no permalink available")));
1318 }
1319 let file_path = file.worktree.read(cx).absolutize(&file.path);
1320 return cx.spawn(async move |cx| {
1321 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1322 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1323 .context("no permalink available")
1324 });
1325 };
1326
1327 let buffer_id = buffer.read(cx).remote_id();
1328 let branch = repo.read(cx).branch.clone();
1329 let remote = branch
1330 .as_ref()
1331 .and_then(|b| b.upstream.as_ref())
1332 .and_then(|b| b.remote_name())
1333 .unwrap_or("origin")
1334 .to_string();
1335
1336 let rx = repo.update(cx, |repo, _| {
1337 repo.send_job(None, move |state, cx| async move {
1338 match state {
1339 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1340 let origin_url = backend
1341 .remote_url(&remote)
1342 .await
1343 .with_context(|| format!("remote \"{remote}\" not found"))?;
1344
1345 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1346
1347 let provider_registry =
1348 cx.update(GitHostingProviderRegistry::default_global);
1349
1350 let (provider, remote) =
1351 parse_git_remote_url(provider_registry, &origin_url)
1352 .context("parsing Git remote URL")?;
1353
1354 Ok(provider.build_permalink(
1355 remote,
1356 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1357 ))
1358 }
1359 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1360 let response = client
1361 .request(proto::GetPermalinkToLine {
1362 project_id: project_id.to_proto(),
1363 buffer_id: buffer_id.into(),
1364 selection: Some(proto::Range {
1365 start: selection.start as u64,
1366 end: selection.end as u64,
1367 }),
1368 })
1369 .await?;
1370
1371 url::Url::parse(&response.permalink).context("failed to parse permalink")
1372 }
1373 }
1374 })
1375 });
1376 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1377 }
1378
1379 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1380 match &self.state {
1381 GitStoreState::Local {
1382 downstream: downstream_client,
1383 ..
1384 } => downstream_client
1385 .as_ref()
1386 .map(|state| (state.client.clone(), state.project_id)),
1387 GitStoreState::Remote {
1388 downstream: downstream_client,
1389 ..
1390 } => downstream_client.clone(),
1391 }
1392 }
1393
1394 fn upstream_client(&self) -> Option<AnyProtoClient> {
1395 match &self.state {
1396 GitStoreState::Local { .. } => None,
1397 GitStoreState::Remote {
1398 upstream_client, ..
1399 } => Some(upstream_client.clone()),
1400 }
1401 }
1402
1403 fn on_worktree_store_event(
1404 &mut self,
1405 worktree_store: Entity<WorktreeStore>,
1406 event: &WorktreeStoreEvent,
1407 cx: &mut Context<Self>,
1408 ) {
1409 let GitStoreState::Local {
1410 project_environment,
1411 downstream,
1412 next_repository_id,
1413 fs,
1414 } = &self.state
1415 else {
1416 return;
1417 };
1418
1419 match event {
1420 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1421 if let Some(worktree) = self
1422 .worktree_store
1423 .read(cx)
1424 .worktree_for_id(*worktree_id, cx)
1425 {
1426 let paths_by_git_repo =
1427 self.process_updated_entries(&worktree, updated_entries, cx);
1428 let downstream = downstream
1429 .as_ref()
1430 .map(|downstream| downstream.updates_tx.clone());
1431 cx.spawn(async move |_, cx| {
1432 let paths_by_git_repo = paths_by_git_repo.await;
1433 for (repo, paths) in paths_by_git_repo {
1434 repo.update(cx, |repo, cx| {
1435 repo.paths_changed(paths, downstream.clone(), cx);
1436 });
1437 }
1438 })
1439 .detach();
1440 }
1441 }
1442 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1443 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1444 else {
1445 return;
1446 };
1447 if !worktree.read(cx).is_visible() {
1448 log::debug!(
1449 "not adding repositories for local worktree {:?} because it's not visible",
1450 worktree.read(cx).abs_path()
1451 );
1452 return;
1453 }
1454 self.update_repositories_from_worktree(
1455 *worktree_id,
1456 project_environment.clone(),
1457 next_repository_id.clone(),
1458 downstream
1459 .as_ref()
1460 .map(|downstream| downstream.updates_tx.clone()),
1461 changed_repos.clone(),
1462 fs.clone(),
1463 cx,
1464 );
1465 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1466 }
1467 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1468 let repos_without_worktree: Vec<RepositoryId> = self
1469 .worktree_ids
1470 .iter_mut()
1471 .filter_map(|(repo_id, worktree_ids)| {
1472 worktree_ids.remove(worktree_id);
1473 if worktree_ids.is_empty() {
1474 Some(*repo_id)
1475 } else {
1476 None
1477 }
1478 })
1479 .collect();
1480 let is_active_repo_removed = repos_without_worktree
1481 .iter()
1482 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1483
1484 for repo_id in repos_without_worktree {
1485 self.repositories.remove(&repo_id);
1486 self.worktree_ids.remove(&repo_id);
1487 if let Some(updates_tx) =
1488 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1489 {
1490 updates_tx
1491 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1492 .ok();
1493 }
1494 }
1495
1496 if is_active_repo_removed {
1497 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1498 self.active_repo_id = Some(repo_id);
1499 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1500 } else {
1501 self.active_repo_id = None;
1502 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1503 }
1504 }
1505 }
1506 _ => {}
1507 }
1508 }
1509 fn on_repository_event(
1510 &mut self,
1511 repo: Entity<Repository>,
1512 event: &RepositoryEvent,
1513 cx: &mut Context<Self>,
1514 ) {
1515 let id = repo.read(cx).id;
1516 let repo_snapshot = repo.read(cx).snapshot.clone();
1517 for (buffer_id, diff) in self.diffs.iter() {
1518 if let Some((buffer_repo, repo_path)) =
1519 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1520 && buffer_repo == repo
1521 {
1522 diff.update(cx, |diff, cx| {
1523 if let Some(conflict_set) = &diff.conflict_set {
1524 let conflict_status_changed =
1525 conflict_set.update(cx, |conflict_set, cx| {
1526 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1527 conflict_set.set_has_conflict(has_conflict, cx)
1528 })?;
1529 if conflict_status_changed {
1530 let buffer_store = self.buffer_store.read(cx);
1531 if let Some(buffer) = buffer_store.get(*buffer_id) {
1532 let _ = diff
1533 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1534 }
1535 }
1536 }
1537 anyhow::Ok(())
1538 })
1539 .ok();
1540 }
1541 }
1542 cx.emit(GitStoreEvent::RepositoryUpdated(
1543 id,
1544 event.clone(),
1545 self.active_repo_id == Some(id),
1546 ))
1547 }
1548
1549 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1550 cx.emit(GitStoreEvent::JobsUpdated)
1551 }
1552
1553 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1554 fn update_repositories_from_worktree(
1555 &mut self,
1556 worktree_id: WorktreeId,
1557 project_environment: Entity<ProjectEnvironment>,
1558 next_repository_id: Arc<AtomicU64>,
1559 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1560 updated_git_repositories: UpdatedGitRepositoriesSet,
1561 fs: Arc<dyn Fs>,
1562 cx: &mut Context<Self>,
1563 ) {
1564 let mut removed_ids = Vec::new();
1565 for update in updated_git_repositories.iter() {
1566 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1567 let existing_work_directory_abs_path =
1568 repo.read(cx).work_directory_abs_path.clone();
1569 Some(&existing_work_directory_abs_path)
1570 == update.old_work_directory_abs_path.as_ref()
1571 || Some(&existing_work_directory_abs_path)
1572 == update.new_work_directory_abs_path.as_ref()
1573 }) {
1574 let repo_id = *id;
1575 if let Some(new_work_directory_abs_path) =
1576 update.new_work_directory_abs_path.clone()
1577 {
1578 self.worktree_ids
1579 .entry(repo_id)
1580 .or_insert_with(HashSet::new)
1581 .insert(worktree_id);
1582 existing.update(cx, |existing, cx| {
1583 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1584 existing.schedule_scan(updates_tx.clone(), cx);
1585 });
1586 } else {
1587 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1588 worktree_ids.remove(&worktree_id);
1589 if worktree_ids.is_empty() {
1590 removed_ids.push(repo_id);
1591 }
1592 }
1593 }
1594 } else if let UpdatedGitRepository {
1595 new_work_directory_abs_path: Some(work_directory_abs_path),
1596 dot_git_abs_path: Some(dot_git_abs_path),
1597 repository_dir_abs_path: Some(repository_dir_abs_path),
1598 common_dir_abs_path: Some(common_dir_abs_path),
1599 ..
1600 } = update
1601 {
1602 let original_repo_abs_path: Arc<Path> = git::repository::original_repo_path(
1603 work_directory_abs_path,
1604 common_dir_abs_path,
1605 repository_dir_abs_path,
1606 )
1607 .into();
1608 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1609 let is_trusted = TrustedWorktrees::try_get_global(cx)
1610 .map(|trusted_worktrees| {
1611 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1612 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1613 })
1614 })
1615 .unwrap_or(false);
1616 let git_store = cx.weak_entity();
1617 let repo = cx.new(|cx| {
1618 let mut repo = Repository::local(
1619 id,
1620 work_directory_abs_path.clone(),
1621 original_repo_abs_path.clone(),
1622 dot_git_abs_path.clone(),
1623 project_environment.downgrade(),
1624 fs.clone(),
1625 is_trusted,
1626 git_store,
1627 cx,
1628 );
1629 if let Some(updates_tx) = updates_tx.as_ref() {
1630 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1631 updates_tx
1632 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1633 .ok();
1634 }
1635 repo.schedule_scan(updates_tx.clone(), cx);
1636 repo
1637 });
1638 self._subscriptions
1639 .push(cx.subscribe(&repo, Self::on_repository_event));
1640 self._subscriptions
1641 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1642 self.repositories.insert(id, repo);
1643 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1644 cx.emit(GitStoreEvent::RepositoryAdded);
1645 self.active_repo_id.get_or_insert_with(|| {
1646 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1647 id
1648 });
1649 }
1650 }
1651
1652 for id in removed_ids {
1653 if self.active_repo_id == Some(id) {
1654 self.active_repo_id = None;
1655 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1656 }
1657 self.repositories.remove(&id);
1658 if let Some(updates_tx) = updates_tx.as_ref() {
1659 updates_tx
1660 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1661 .ok();
1662 }
1663 }
1664 }
1665
1666 fn on_trusted_worktrees_event(
1667 &mut self,
1668 _: Entity<TrustedWorktreesStore>,
1669 event: &TrustedWorktreesEvent,
1670 cx: &mut Context<Self>,
1671 ) {
1672 if !matches!(self.state, GitStoreState::Local { .. }) {
1673 return;
1674 }
1675
1676 let (is_trusted, event_paths) = match event {
1677 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1678 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1679 };
1680
1681 for (repo_id, worktree_ids) in &self.worktree_ids {
1682 if worktree_ids
1683 .iter()
1684 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1685 {
1686 if let Some(repo) = self.repositories.get(repo_id) {
1687 let repository_state = repo.read(cx).repository_state.clone();
1688 cx.background_spawn(async move {
1689 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1690 state.backend.set_trusted(is_trusted);
1691 }
1692 })
1693 .detach();
1694 }
1695 }
1696 }
1697 }
1698
1699 fn on_buffer_store_event(
1700 &mut self,
1701 _: Entity<BufferStore>,
1702 event: &BufferStoreEvent,
1703 cx: &mut Context<Self>,
1704 ) {
1705 match event {
1706 BufferStoreEvent::BufferAdded(buffer) => {
1707 cx.subscribe(buffer, |this, buffer, event, cx| {
1708 if let BufferEvent::LanguageChanged(_) = event {
1709 let buffer_id = buffer.read(cx).remote_id();
1710 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1711 diff_state.update(cx, |diff_state, cx| {
1712 diff_state.buffer_language_changed(buffer, cx);
1713 });
1714 }
1715 }
1716 })
1717 .detach();
1718 }
1719 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1720 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1721 diffs.remove(buffer_id);
1722 }
1723 }
1724 BufferStoreEvent::BufferDropped(buffer_id) => {
1725 self.diffs.remove(buffer_id);
1726 for diffs in self.shared_diffs.values_mut() {
1727 diffs.remove(buffer_id);
1728 }
1729 }
1730 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1731 // Whenever a buffer's file path changes, it's possible that the
1732 // new path is actually a path that is being tracked by a git
1733 // repository. In that case, we'll want to update the buffer's
1734 // `BufferDiffState`, in case it already has one.
1735 let buffer_id = buffer.read(cx).remote_id();
1736 let diff_state = self.diffs.get(&buffer_id);
1737 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1738
1739 if let Some(diff_state) = diff_state
1740 && let Some((repo, repo_path)) = repo
1741 {
1742 let buffer = buffer.clone();
1743 let diff_state = diff_state.clone();
1744
1745 cx.spawn(async move |_git_store, cx| {
1746 async {
1747 let diff_bases_change = repo
1748 .update(cx, |repo, cx| {
1749 repo.load_committed_text(buffer_id, repo_path, cx)
1750 })
1751 .await?;
1752
1753 diff_state.update(cx, |diff_state, cx| {
1754 let buffer_snapshot = buffer.read(cx).text_snapshot();
1755 diff_state.diff_bases_changed(
1756 buffer_snapshot,
1757 Some(diff_bases_change),
1758 cx,
1759 );
1760 });
1761 anyhow::Ok(())
1762 }
1763 .await
1764 .log_err();
1765 })
1766 .detach();
1767 }
1768 }
1769 }
1770 }
1771
1772 pub fn recalculate_buffer_diffs(
1773 &mut self,
1774 buffers: Vec<Entity<Buffer>>,
1775 cx: &mut Context<Self>,
1776 ) -> impl Future<Output = ()> + use<> {
1777 let mut futures = Vec::new();
1778 for buffer in buffers {
1779 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1780 let buffer = buffer.read(cx).text_snapshot();
1781 diff_state.update(cx, |diff_state, cx| {
1782 diff_state.recalculate_diffs(buffer.clone(), cx);
1783 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1784 });
1785 futures.push(diff_state.update(cx, |diff_state, cx| {
1786 diff_state
1787 .reparse_conflict_markers(buffer, cx)
1788 .map(|_| {})
1789 .boxed()
1790 }));
1791 }
1792 }
1793 async move {
1794 futures::future::join_all(futures).await;
1795 }
1796 }
1797
1798 fn on_buffer_diff_event(
1799 &mut self,
1800 diff: Entity<buffer_diff::BufferDiff>,
1801 event: &BufferDiffEvent,
1802 cx: &mut Context<Self>,
1803 ) {
1804 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1805 let buffer_id = diff.read(cx).buffer_id;
1806 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1807 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1808 diff_state.hunk_staging_operation_count += 1;
1809 diff_state.hunk_staging_operation_count
1810 });
1811 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1812 let recv = repo.update(cx, |repo, cx| {
1813 log::debug!("hunks changed for {}", path.as_unix_str());
1814 repo.spawn_set_index_text_job(
1815 path,
1816 new_index_text.as_ref().map(|rope| rope.to_string()),
1817 Some(hunk_staging_operation_count),
1818 cx,
1819 )
1820 });
1821 let diff = diff.downgrade();
1822 cx.spawn(async move |this, cx| {
1823 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1824 diff.update(cx, |diff, cx| {
1825 diff.clear_pending_hunks(cx);
1826 })
1827 .ok();
1828 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1829 .ok();
1830 }
1831 })
1832 .detach();
1833 }
1834 }
1835 }
1836 }
1837
1838 fn local_worktree_git_repos_changed(
1839 &mut self,
1840 worktree: Entity<Worktree>,
1841 changed_repos: &UpdatedGitRepositoriesSet,
1842 cx: &mut Context<Self>,
1843 ) {
1844 log::debug!("local worktree repos changed");
1845 debug_assert!(worktree.read(cx).is_local());
1846
1847 for repository in self.repositories.values() {
1848 repository.update(cx, |repository, cx| {
1849 let repo_abs_path = &repository.work_directory_abs_path;
1850 if changed_repos.iter().any(|update| {
1851 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1852 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1853 }) {
1854 repository.reload_buffer_diff_bases(cx);
1855 }
1856 });
1857 }
1858 }
1859
1860 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1861 &self.repositories
1862 }
1863
1864 /// Returns the original (main) repository working directory for the given worktree.
1865 /// For normal checkouts this equals the worktree's own path; for linked
1866 /// worktrees it points back to the original repo.
1867 pub fn original_repo_path_for_worktree(
1868 &self,
1869 worktree_id: WorktreeId,
1870 cx: &App,
1871 ) -> Option<Arc<Path>> {
1872 self.active_repo_id
1873 .iter()
1874 .chain(self.worktree_ids.keys())
1875 .find(|repo_id| {
1876 self.worktree_ids
1877 .get(repo_id)
1878 .is_some_and(|ids| ids.contains(&worktree_id))
1879 })
1880 .and_then(|repo_id| self.repositories.get(repo_id))
1881 .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
1882 }
1883
1884 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1885 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1886 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1887 Some(status.status)
1888 }
1889
1890 pub fn repository_and_path_for_buffer_id(
1891 &self,
1892 buffer_id: BufferId,
1893 cx: &App,
1894 ) -> Option<(Entity<Repository>, RepoPath)> {
1895 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1896 let project_path = buffer.read(cx).project_path(cx)?;
1897 self.repository_and_path_for_project_path(&project_path, cx)
1898 }
1899
1900 pub fn repository_and_path_for_project_path(
1901 &self,
1902 path: &ProjectPath,
1903 cx: &App,
1904 ) -> Option<(Entity<Repository>, RepoPath)> {
1905 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1906 self.repositories
1907 .values()
1908 .filter_map(|repo| {
1909 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1910 Some((repo.clone(), repo_path))
1911 })
1912 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1913 }
1914
1915 pub fn git_init(
1916 &self,
1917 path: Arc<Path>,
1918 fallback_branch_name: String,
1919 cx: &App,
1920 ) -> Task<Result<()>> {
1921 match &self.state {
1922 GitStoreState::Local { fs, .. } => {
1923 let fs = fs.clone();
1924 cx.background_executor()
1925 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1926 }
1927 GitStoreState::Remote {
1928 upstream_client,
1929 upstream_project_id: project_id,
1930 ..
1931 } => {
1932 let client = upstream_client.clone();
1933 let project_id = *project_id;
1934 cx.background_executor().spawn(async move {
1935 client
1936 .request(proto::GitInit {
1937 project_id: project_id,
1938 abs_path: path.to_string_lossy().into_owned(),
1939 fallback_branch_name,
1940 })
1941 .await?;
1942 Ok(())
1943 })
1944 }
1945 }
1946 }
1947
1948 pub fn git_clone(
1949 &self,
1950 repo: String,
1951 path: impl Into<Arc<std::path::Path>>,
1952 cx: &App,
1953 ) -> Task<Result<()>> {
1954 let path = path.into();
1955 match &self.state {
1956 GitStoreState::Local { fs, .. } => {
1957 let fs = fs.clone();
1958 cx.background_executor()
1959 .spawn(async move { fs.git_clone(&repo, &path).await })
1960 }
1961 GitStoreState::Remote {
1962 upstream_client,
1963 upstream_project_id,
1964 ..
1965 } => {
1966 if upstream_client.is_via_collab() {
1967 return Task::ready(Err(anyhow!(
1968 "Git Clone isn't supported for project guests"
1969 )));
1970 }
1971 let request = upstream_client.request(proto::GitClone {
1972 project_id: *upstream_project_id,
1973 abs_path: path.to_string_lossy().into_owned(),
1974 remote_repo: repo,
1975 });
1976
1977 cx.background_spawn(async move {
1978 let result = request.await?;
1979
1980 match result.success {
1981 true => Ok(()),
1982 false => Err(anyhow!("Git Clone failed")),
1983 }
1984 })
1985 }
1986 }
1987 }
1988
1989 async fn handle_update_repository(
1990 this: Entity<Self>,
1991 envelope: TypedEnvelope<proto::UpdateRepository>,
1992 mut cx: AsyncApp,
1993 ) -> Result<()> {
1994 this.update(&mut cx, |this, cx| {
1995 let path_style = this.worktree_store.read(cx).path_style();
1996 let mut update = envelope.payload;
1997
1998 let id = RepositoryId::from_proto(update.id);
1999 let client = this.upstream_client().context("no upstream client")?;
2000
2001 let original_repo_abs_path: Option<Arc<Path>> = update
2002 .original_repo_abs_path
2003 .as_deref()
2004 .map(|p| Path::new(p).into());
2005
2006 let mut repo_subscription = None;
2007 let repo = this.repositories.entry(id).or_insert_with(|| {
2008 let git_store = cx.weak_entity();
2009 let repo = cx.new(|cx| {
2010 Repository::remote(
2011 id,
2012 Path::new(&update.abs_path).into(),
2013 original_repo_abs_path.clone(),
2014 path_style,
2015 ProjectId(update.project_id),
2016 client,
2017 git_store,
2018 cx,
2019 )
2020 });
2021 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
2022 cx.emit(GitStoreEvent::RepositoryAdded);
2023 repo
2024 });
2025 this._subscriptions.extend(repo_subscription);
2026
2027 repo.update(cx, {
2028 let update = update.clone();
2029 |repo, cx| repo.apply_remote_update(update, cx)
2030 })?;
2031
2032 this.active_repo_id.get_or_insert_with(|| {
2033 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2034 id
2035 });
2036
2037 if let Some((client, project_id)) = this.downstream_client() {
2038 update.project_id = project_id.to_proto();
2039 client.send(update).log_err();
2040 }
2041 Ok(())
2042 })
2043 }
2044
2045 async fn handle_remove_repository(
2046 this: Entity<Self>,
2047 envelope: TypedEnvelope<proto::RemoveRepository>,
2048 mut cx: AsyncApp,
2049 ) -> Result<()> {
2050 this.update(&mut cx, |this, cx| {
2051 let mut update = envelope.payload;
2052 let id = RepositoryId::from_proto(update.id);
2053 this.repositories.remove(&id);
2054 if let Some((client, project_id)) = this.downstream_client() {
2055 update.project_id = project_id.to_proto();
2056 client.send(update).log_err();
2057 }
2058 if this.active_repo_id == Some(id) {
2059 this.active_repo_id = None;
2060 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
2061 }
2062 cx.emit(GitStoreEvent::RepositoryRemoved(id));
2063 });
2064 Ok(())
2065 }
2066
2067 async fn handle_git_init(
2068 this: Entity<Self>,
2069 envelope: TypedEnvelope<proto::GitInit>,
2070 cx: AsyncApp,
2071 ) -> Result<proto::Ack> {
2072 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2073 let name = envelope.payload.fallback_branch_name;
2074 cx.update(|cx| this.read(cx).git_init(path, name, cx))
2075 .await?;
2076
2077 Ok(proto::Ack {})
2078 }
2079
2080 async fn handle_git_clone(
2081 this: Entity<Self>,
2082 envelope: TypedEnvelope<proto::GitClone>,
2083 cx: AsyncApp,
2084 ) -> Result<proto::GitCloneResponse> {
2085 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
2086 let repo_name = envelope.payload.remote_repo;
2087 let result = cx
2088 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
2089 .await;
2090
2091 Ok(proto::GitCloneResponse {
2092 success: result.is_ok(),
2093 })
2094 }
2095
2096 async fn handle_fetch(
2097 this: Entity<Self>,
2098 envelope: TypedEnvelope<proto::Fetch>,
2099 mut cx: AsyncApp,
2100 ) -> Result<proto::RemoteMessageResponse> {
2101 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2102 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2103 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2104 let askpass_id = envelope.payload.askpass_id;
2105
2106 let askpass = make_remote_delegate(
2107 this,
2108 envelope.payload.project_id,
2109 repository_id,
2110 askpass_id,
2111 &mut cx,
2112 );
2113
2114 let remote_output = repository_handle
2115 .update(&mut cx, |repository_handle, cx| {
2116 repository_handle.fetch(fetch_options, askpass, cx)
2117 })
2118 .await??;
2119
2120 Ok(proto::RemoteMessageResponse {
2121 stdout: remote_output.stdout,
2122 stderr: remote_output.stderr,
2123 })
2124 }
2125
2126 async fn handle_push(
2127 this: Entity<Self>,
2128 envelope: TypedEnvelope<proto::Push>,
2129 mut cx: AsyncApp,
2130 ) -> Result<proto::RemoteMessageResponse> {
2131 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2132 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2133
2134 let askpass_id = envelope.payload.askpass_id;
2135 let askpass = make_remote_delegate(
2136 this,
2137 envelope.payload.project_id,
2138 repository_id,
2139 askpass_id,
2140 &mut cx,
2141 );
2142
2143 let options = envelope
2144 .payload
2145 .options
2146 .as_ref()
2147 .map(|_| match envelope.payload.options() {
2148 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2149 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2150 });
2151
2152 let branch_name = envelope.payload.branch_name.into();
2153 let remote_branch_name = envelope.payload.remote_branch_name.into();
2154 let remote_name = envelope.payload.remote_name.into();
2155
2156 let remote_output = repository_handle
2157 .update(&mut cx, |repository_handle, cx| {
2158 repository_handle.push(
2159 branch_name,
2160 remote_branch_name,
2161 remote_name,
2162 options,
2163 askpass,
2164 cx,
2165 )
2166 })
2167 .await??;
2168 Ok(proto::RemoteMessageResponse {
2169 stdout: remote_output.stdout,
2170 stderr: remote_output.stderr,
2171 })
2172 }
2173
2174 async fn handle_pull(
2175 this: Entity<Self>,
2176 envelope: TypedEnvelope<proto::Pull>,
2177 mut cx: AsyncApp,
2178 ) -> Result<proto::RemoteMessageResponse> {
2179 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2180 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2181 let askpass_id = envelope.payload.askpass_id;
2182 let askpass = make_remote_delegate(
2183 this,
2184 envelope.payload.project_id,
2185 repository_id,
2186 askpass_id,
2187 &mut cx,
2188 );
2189
2190 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2191 let remote_name = envelope.payload.remote_name.into();
2192 let rebase = envelope.payload.rebase;
2193
2194 let remote_message = repository_handle
2195 .update(&mut cx, |repository_handle, cx| {
2196 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2197 })
2198 .await??;
2199
2200 Ok(proto::RemoteMessageResponse {
2201 stdout: remote_message.stdout,
2202 stderr: remote_message.stderr,
2203 })
2204 }
2205
2206 async fn handle_stage(
2207 this: Entity<Self>,
2208 envelope: TypedEnvelope<proto::Stage>,
2209 mut cx: AsyncApp,
2210 ) -> Result<proto::Ack> {
2211 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2212 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2213
2214 let entries = envelope
2215 .payload
2216 .paths
2217 .into_iter()
2218 .map(|path| RepoPath::new(&path))
2219 .collect::<Result<Vec<_>>>()?;
2220
2221 repository_handle
2222 .update(&mut cx, |repository_handle, cx| {
2223 repository_handle.stage_entries(entries, cx)
2224 })
2225 .await?;
2226 Ok(proto::Ack {})
2227 }
2228
2229 async fn handle_unstage(
2230 this: Entity<Self>,
2231 envelope: TypedEnvelope<proto::Unstage>,
2232 mut cx: AsyncApp,
2233 ) -> Result<proto::Ack> {
2234 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2235 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2236
2237 let entries = envelope
2238 .payload
2239 .paths
2240 .into_iter()
2241 .map(|path| RepoPath::new(&path))
2242 .collect::<Result<Vec<_>>>()?;
2243
2244 repository_handle
2245 .update(&mut cx, |repository_handle, cx| {
2246 repository_handle.unstage_entries(entries, cx)
2247 })
2248 .await?;
2249
2250 Ok(proto::Ack {})
2251 }
2252
2253 async fn handle_stash(
2254 this: Entity<Self>,
2255 envelope: TypedEnvelope<proto::Stash>,
2256 mut cx: AsyncApp,
2257 ) -> Result<proto::Ack> {
2258 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2259 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2260
2261 let entries = envelope
2262 .payload
2263 .paths
2264 .into_iter()
2265 .map(|path| RepoPath::new(&path))
2266 .collect::<Result<Vec<_>>>()?;
2267
2268 repository_handle
2269 .update(&mut cx, |repository_handle, cx| {
2270 repository_handle.stash_entries(entries, cx)
2271 })
2272 .await?;
2273
2274 Ok(proto::Ack {})
2275 }
2276
2277 async fn handle_stash_pop(
2278 this: Entity<Self>,
2279 envelope: TypedEnvelope<proto::StashPop>,
2280 mut cx: AsyncApp,
2281 ) -> Result<proto::Ack> {
2282 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2283 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2284 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2285
2286 repository_handle
2287 .update(&mut cx, |repository_handle, cx| {
2288 repository_handle.stash_pop(stash_index, cx)
2289 })
2290 .await?;
2291
2292 Ok(proto::Ack {})
2293 }
2294
2295 async fn handle_stash_apply(
2296 this: Entity<Self>,
2297 envelope: TypedEnvelope<proto::StashApply>,
2298 mut cx: AsyncApp,
2299 ) -> Result<proto::Ack> {
2300 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2301 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2302 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2303
2304 repository_handle
2305 .update(&mut cx, |repository_handle, cx| {
2306 repository_handle.stash_apply(stash_index, cx)
2307 })
2308 .await?;
2309
2310 Ok(proto::Ack {})
2311 }
2312
2313 async fn handle_stash_drop(
2314 this: Entity<Self>,
2315 envelope: TypedEnvelope<proto::StashDrop>,
2316 mut cx: AsyncApp,
2317 ) -> Result<proto::Ack> {
2318 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2319 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2320 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2321
2322 repository_handle
2323 .update(&mut cx, |repository_handle, cx| {
2324 repository_handle.stash_drop(stash_index, cx)
2325 })
2326 .await??;
2327
2328 Ok(proto::Ack {})
2329 }
2330
2331 async fn handle_set_index_text(
2332 this: Entity<Self>,
2333 envelope: TypedEnvelope<proto::SetIndexText>,
2334 mut cx: AsyncApp,
2335 ) -> Result<proto::Ack> {
2336 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2337 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2338 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2339
2340 repository_handle
2341 .update(&mut cx, |repository_handle, cx| {
2342 repository_handle.spawn_set_index_text_job(
2343 repo_path,
2344 envelope.payload.text,
2345 None,
2346 cx,
2347 )
2348 })
2349 .await??;
2350 Ok(proto::Ack {})
2351 }
2352
2353 async fn handle_run_hook(
2354 this: Entity<Self>,
2355 envelope: TypedEnvelope<proto::RunGitHook>,
2356 mut cx: AsyncApp,
2357 ) -> Result<proto::Ack> {
2358 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2359 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2360 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2361 repository_handle
2362 .update(&mut cx, |repository_handle, cx| {
2363 repository_handle.run_hook(hook, cx)
2364 })
2365 .await??;
2366 Ok(proto::Ack {})
2367 }
2368
2369 async fn handle_commit(
2370 this: Entity<Self>,
2371 envelope: TypedEnvelope<proto::Commit>,
2372 mut cx: AsyncApp,
2373 ) -> Result<proto::Ack> {
2374 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2375 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2376 let askpass_id = envelope.payload.askpass_id;
2377
2378 let askpass = make_remote_delegate(
2379 this,
2380 envelope.payload.project_id,
2381 repository_id,
2382 askpass_id,
2383 &mut cx,
2384 );
2385
2386 let message = SharedString::from(envelope.payload.message);
2387 let name = envelope.payload.name.map(SharedString::from);
2388 let email = envelope.payload.email.map(SharedString::from);
2389 let options = envelope.payload.options.unwrap_or_default();
2390
2391 repository_handle
2392 .update(&mut cx, |repository_handle, cx| {
2393 repository_handle.commit(
2394 message,
2395 name.zip(email),
2396 CommitOptions {
2397 amend: options.amend,
2398 signoff: options.signoff,
2399 allow_empty: options.allow_empty,
2400 },
2401 askpass,
2402 cx,
2403 )
2404 })
2405 .await??;
2406 Ok(proto::Ack {})
2407 }
2408
2409 async fn handle_get_remotes(
2410 this: Entity<Self>,
2411 envelope: TypedEnvelope<proto::GetRemotes>,
2412 mut cx: AsyncApp,
2413 ) -> Result<proto::GetRemotesResponse> {
2414 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2415 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2416
2417 let branch_name = envelope.payload.branch_name;
2418 let is_push = envelope.payload.is_push;
2419
2420 let remotes = repository_handle
2421 .update(&mut cx, |repository_handle, _| {
2422 repository_handle.get_remotes(branch_name, is_push)
2423 })
2424 .await??;
2425
2426 Ok(proto::GetRemotesResponse {
2427 remotes: remotes
2428 .into_iter()
2429 .map(|remotes| proto::get_remotes_response::Remote {
2430 name: remotes.name.to_string(),
2431 })
2432 .collect::<Vec<_>>(),
2433 })
2434 }
2435
2436 async fn handle_get_worktrees(
2437 this: Entity<Self>,
2438 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2439 mut cx: AsyncApp,
2440 ) -> Result<proto::GitWorktreesResponse> {
2441 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2442 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2443
2444 let worktrees = repository_handle
2445 .update(&mut cx, |repository_handle, _| {
2446 repository_handle.worktrees()
2447 })
2448 .await??;
2449
2450 Ok(proto::GitWorktreesResponse {
2451 worktrees: worktrees
2452 .into_iter()
2453 .map(|worktree| worktree_to_proto(&worktree))
2454 .collect::<Vec<_>>(),
2455 })
2456 }
2457
2458 async fn handle_create_worktree(
2459 this: Entity<Self>,
2460 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2461 mut cx: AsyncApp,
2462 ) -> Result<proto::Ack> {
2463 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2464 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2465 let directory = PathBuf::from(envelope.payload.directory);
2466 let name = envelope.payload.name;
2467 let commit = envelope.payload.commit;
2468 let use_existing_branch = envelope.payload.use_existing_branch;
2469 let target = if name.is_empty() {
2470 CreateWorktreeTarget::Detached { base_sha: commit }
2471 } else if use_existing_branch {
2472 CreateWorktreeTarget::ExistingBranch { branch_name: name }
2473 } else {
2474 CreateWorktreeTarget::NewBranch {
2475 branch_name: name,
2476 base_sha: commit,
2477 }
2478 };
2479
2480 repository_handle
2481 .update(&mut cx, |repository_handle, _| {
2482 repository_handle.create_worktree(target, directory)
2483 })
2484 .await??;
2485
2486 Ok(proto::Ack {})
2487 }
2488
2489 async fn handle_remove_worktree(
2490 this: Entity<Self>,
2491 envelope: TypedEnvelope<proto::GitRemoveWorktree>,
2492 mut cx: AsyncApp,
2493 ) -> Result<proto::Ack> {
2494 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2495 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2496 let path = PathBuf::from(envelope.payload.path);
2497 let force = envelope.payload.force;
2498
2499 repository_handle
2500 .update(&mut cx, |repository_handle, _| {
2501 repository_handle.remove_worktree(path, force)
2502 })
2503 .await??;
2504
2505 Ok(proto::Ack {})
2506 }
2507
2508 async fn handle_rename_worktree(
2509 this: Entity<Self>,
2510 envelope: TypedEnvelope<proto::GitRenameWorktree>,
2511 mut cx: AsyncApp,
2512 ) -> Result<proto::Ack> {
2513 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2514 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2515 let old_path = PathBuf::from(envelope.payload.old_path);
2516 let new_path = PathBuf::from(envelope.payload.new_path);
2517
2518 repository_handle
2519 .update(&mut cx, |repository_handle, _| {
2520 repository_handle.rename_worktree(old_path, new_path)
2521 })
2522 .await??;
2523
2524 Ok(proto::Ack {})
2525 }
2526
2527 async fn handle_get_head_sha(
2528 this: Entity<Self>,
2529 envelope: TypedEnvelope<proto::GitGetHeadSha>,
2530 mut cx: AsyncApp,
2531 ) -> Result<proto::GitGetHeadShaResponse> {
2532 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2533 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2534
2535 let head_sha = repository_handle
2536 .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
2537 .await??;
2538
2539 Ok(proto::GitGetHeadShaResponse { sha: head_sha })
2540 }
2541
2542 async fn handle_get_commit_data(
2543 this: Entity<Self>,
2544 envelope: TypedEnvelope<proto::GetCommitData>,
2545 mut cx: AsyncApp,
2546 ) -> Result<proto::GetCommitDataResponse> {
2547 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2548 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2549
2550 let shas: Vec<Oid> = envelope
2551 .payload
2552 .shas
2553 .iter()
2554 .filter_map(|s| Oid::from_str(s).ok())
2555 .collect();
2556
2557 let mut commits = Vec::with_capacity(shas.len());
2558 let mut receivers = Vec::new();
2559
2560 repository_handle.update(&mut cx, |repository, cx| {
2561 for &sha in &shas {
2562 match repository.fetch_commit_data(sha, true, cx) {
2563 CommitDataState::Loaded(data) => {
2564 commits.push(commit_data_to_proto(data));
2565 }
2566 CommitDataState::Loading(Some(shared)) => {
2567 receivers.push(shared.clone());
2568 }
2569 CommitDataState::Loading(None) => {
2570 // todo(git_graph) this could happen if the request fails, we should encode an error case
2571 debug_panic!(
2572 "This should never happen since we passed true into fetch commit data"
2573 );
2574 }
2575 }
2576 }
2577 });
2578
2579 let results = future::join_all(receivers).await;
2580
2581 commits.extend(
2582 results
2583 .into_iter()
2584 .filter_map(|result| result.ok())
2585 .map(|data| commit_data_to_proto(&data)),
2586 );
2587
2588 Ok(proto::GetCommitDataResponse { commits })
2589 }
2590
2591 async fn handle_edit_ref(
2592 this: Entity<Self>,
2593 envelope: TypedEnvelope<proto::GitEditRef>,
2594 mut cx: AsyncApp,
2595 ) -> Result<proto::Ack> {
2596 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2597 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2598 let ref_name = envelope.payload.ref_name;
2599 let commit = match envelope.payload.action {
2600 Some(proto::git_edit_ref::Action::UpdateToCommit(sha)) => Some(sha),
2601 Some(proto::git_edit_ref::Action::Delete(_)) => None,
2602 None => anyhow::bail!("GitEditRef missing action"),
2603 };
2604
2605 repository_handle
2606 .update(&mut cx, |repository_handle, _| {
2607 repository_handle.edit_ref(ref_name, commit)
2608 })
2609 .await??;
2610
2611 Ok(proto::Ack {})
2612 }
2613
2614 async fn handle_repair_worktrees(
2615 this: Entity<Self>,
2616 envelope: TypedEnvelope<proto::GitRepairWorktrees>,
2617 mut cx: AsyncApp,
2618 ) -> Result<proto::Ack> {
2619 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2620 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2621
2622 repository_handle
2623 .update(&mut cx, |repository_handle, _| {
2624 repository_handle.repair_worktrees()
2625 })
2626 .await??;
2627
2628 Ok(proto::Ack {})
2629 }
2630
2631 async fn handle_get_branches(
2632 this: Entity<Self>,
2633 envelope: TypedEnvelope<proto::GitGetBranches>,
2634 mut cx: AsyncApp,
2635 ) -> Result<proto::GitBranchesResponse> {
2636 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2637 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2638
2639 let branches = repository_handle
2640 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2641 .await??;
2642
2643 Ok(proto::GitBranchesResponse {
2644 branches: branches
2645 .into_iter()
2646 .map(|branch| branch_to_proto(&branch))
2647 .collect::<Vec<_>>(),
2648 })
2649 }
2650 async fn handle_get_default_branch(
2651 this: Entity<Self>,
2652 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2653 mut cx: AsyncApp,
2654 ) -> Result<proto::GetDefaultBranchResponse> {
2655 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2656 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2657
2658 let branch = repository_handle
2659 .update(&mut cx, |repository_handle, _| {
2660 repository_handle.default_branch(false)
2661 })
2662 .await??
2663 .map(Into::into);
2664
2665 Ok(proto::GetDefaultBranchResponse { branch })
2666 }
2667 async fn handle_create_branch(
2668 this: Entity<Self>,
2669 envelope: TypedEnvelope<proto::GitCreateBranch>,
2670 mut cx: AsyncApp,
2671 ) -> Result<proto::Ack> {
2672 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2673 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2674 let branch_name = envelope.payload.branch_name;
2675
2676 repository_handle
2677 .update(&mut cx, |repository_handle, _| {
2678 repository_handle.create_branch(branch_name, None)
2679 })
2680 .await??;
2681
2682 Ok(proto::Ack {})
2683 }
2684
2685 async fn handle_change_branch(
2686 this: Entity<Self>,
2687 envelope: TypedEnvelope<proto::GitChangeBranch>,
2688 mut cx: AsyncApp,
2689 ) -> Result<proto::Ack> {
2690 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2691 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2692 let branch_name = envelope.payload.branch_name;
2693
2694 repository_handle
2695 .update(&mut cx, |repository_handle, _| {
2696 repository_handle.change_branch(branch_name)
2697 })
2698 .await??;
2699
2700 Ok(proto::Ack {})
2701 }
2702
2703 async fn handle_rename_branch(
2704 this: Entity<Self>,
2705 envelope: TypedEnvelope<proto::GitRenameBranch>,
2706 mut cx: AsyncApp,
2707 ) -> Result<proto::Ack> {
2708 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2709 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2710 let branch = envelope.payload.branch;
2711 let new_name = envelope.payload.new_name;
2712
2713 repository_handle
2714 .update(&mut cx, |repository_handle, _| {
2715 repository_handle.rename_branch(branch, new_name)
2716 })
2717 .await??;
2718
2719 Ok(proto::Ack {})
2720 }
2721
2722 async fn handle_create_remote(
2723 this: Entity<Self>,
2724 envelope: TypedEnvelope<proto::GitCreateRemote>,
2725 mut cx: AsyncApp,
2726 ) -> Result<proto::Ack> {
2727 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2728 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2729 let remote_name = envelope.payload.remote_name;
2730 let remote_url = envelope.payload.remote_url;
2731
2732 repository_handle
2733 .update(&mut cx, |repository_handle, _| {
2734 repository_handle.create_remote(remote_name, remote_url)
2735 })
2736 .await??;
2737
2738 Ok(proto::Ack {})
2739 }
2740
2741 async fn handle_delete_branch(
2742 this: Entity<Self>,
2743 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2744 mut cx: AsyncApp,
2745 ) -> Result<proto::Ack> {
2746 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2747 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2748 let is_remote = envelope.payload.is_remote;
2749 let branch_name = envelope.payload.branch_name;
2750
2751 repository_handle
2752 .update(&mut cx, |repository_handle, _| {
2753 repository_handle.delete_branch(is_remote, branch_name)
2754 })
2755 .await??;
2756
2757 Ok(proto::Ack {})
2758 }
2759
2760 async fn handle_remove_remote(
2761 this: Entity<Self>,
2762 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2763 mut cx: AsyncApp,
2764 ) -> Result<proto::Ack> {
2765 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2766 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2767 let remote_name = envelope.payload.remote_name;
2768
2769 repository_handle
2770 .update(&mut cx, |repository_handle, _| {
2771 repository_handle.remove_remote(remote_name)
2772 })
2773 .await??;
2774
2775 Ok(proto::Ack {})
2776 }
2777
2778 async fn handle_show(
2779 this: Entity<Self>,
2780 envelope: TypedEnvelope<proto::GitShow>,
2781 mut cx: AsyncApp,
2782 ) -> Result<proto::GitCommitDetails> {
2783 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2784 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2785
2786 let commit = repository_handle
2787 .update(&mut cx, |repository_handle, _| {
2788 repository_handle.show(envelope.payload.commit)
2789 })
2790 .await??;
2791 Ok(proto::GitCommitDetails {
2792 sha: commit.sha.into(),
2793 message: commit.message.into(),
2794 commit_timestamp: commit.commit_timestamp,
2795 author_email: commit.author_email.into(),
2796 author_name: commit.author_name.into(),
2797 })
2798 }
2799
2800 async fn handle_create_checkpoint(
2801 this: Entity<Self>,
2802 envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
2803 mut cx: AsyncApp,
2804 ) -> Result<proto::GitCreateCheckpointResponse> {
2805 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2806 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2807
2808 let checkpoint = repository_handle
2809 .update(&mut cx, |repository, _| repository.checkpoint())
2810 .await??;
2811
2812 Ok(proto::GitCreateCheckpointResponse {
2813 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
2814 })
2815 }
2816
2817 async fn handle_create_archive_checkpoint(
2818 this: Entity<Self>,
2819 envelope: TypedEnvelope<proto::GitCreateArchiveCheckpoint>,
2820 mut cx: AsyncApp,
2821 ) -> Result<proto::GitCreateArchiveCheckpointResponse> {
2822 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2823 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2824
2825 let (staged_commit_sha, unstaged_commit_sha) = repository_handle
2826 .update(&mut cx, |repository, _| {
2827 repository.create_archive_checkpoint()
2828 })
2829 .await??;
2830
2831 Ok(proto::GitCreateArchiveCheckpointResponse {
2832 staged_commit_sha,
2833 unstaged_commit_sha,
2834 })
2835 }
2836
2837 async fn handle_restore_checkpoint(
2838 this: Entity<Self>,
2839 envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
2840 mut cx: AsyncApp,
2841 ) -> Result<proto::Ack> {
2842 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2843 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2844
2845 let checkpoint = GitRepositoryCheckpoint {
2846 commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
2847 };
2848
2849 repository_handle
2850 .update(&mut cx, |repository, _| {
2851 repository.restore_checkpoint(checkpoint)
2852 })
2853 .await??;
2854
2855 Ok(proto::Ack {})
2856 }
2857
2858 async fn handle_restore_archive_checkpoint(
2859 this: Entity<Self>,
2860 envelope: TypedEnvelope<proto::GitRestoreArchiveCheckpoint>,
2861 mut cx: AsyncApp,
2862 ) -> Result<proto::Ack> {
2863 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2864 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2865 let staged_commit_sha = envelope.payload.staged_commit_sha;
2866 let unstaged_commit_sha = envelope.payload.unstaged_commit_sha;
2867
2868 repository_handle
2869 .update(&mut cx, |repository, _| {
2870 repository.restore_archive_checkpoint(staged_commit_sha, unstaged_commit_sha)
2871 })
2872 .await??;
2873
2874 Ok(proto::Ack {})
2875 }
2876
2877 async fn handle_compare_checkpoints(
2878 this: Entity<Self>,
2879 envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
2880 mut cx: AsyncApp,
2881 ) -> Result<proto::GitCompareCheckpointsResponse> {
2882 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2883 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2884
2885 let left = GitRepositoryCheckpoint {
2886 commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
2887 };
2888 let right = GitRepositoryCheckpoint {
2889 commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
2890 };
2891
2892 let equal = repository_handle
2893 .update(&mut cx, |repository, _| {
2894 repository.compare_checkpoints(left, right)
2895 })
2896 .await??;
2897
2898 Ok(proto::GitCompareCheckpointsResponse { equal })
2899 }
2900
2901 async fn handle_diff_checkpoints(
2902 this: Entity<Self>,
2903 envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
2904 mut cx: AsyncApp,
2905 ) -> Result<proto::GitDiffCheckpointsResponse> {
2906 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2907 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2908
2909 let base = GitRepositoryCheckpoint {
2910 commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
2911 };
2912 let target = GitRepositoryCheckpoint {
2913 commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
2914 };
2915
2916 let diff = repository_handle
2917 .update(&mut cx, |repository, _| {
2918 repository.diff_checkpoints(base, target)
2919 })
2920 .await??;
2921
2922 Ok(proto::GitDiffCheckpointsResponse { diff })
2923 }
2924
2925 async fn handle_load_commit_diff(
2926 this: Entity<Self>,
2927 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2928 mut cx: AsyncApp,
2929 ) -> Result<proto::LoadCommitDiffResponse> {
2930 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2931 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2932
2933 let commit_diff = repository_handle
2934 .update(&mut cx, |repository_handle, _| {
2935 repository_handle.load_commit_diff(envelope.payload.commit)
2936 })
2937 .await??;
2938 Ok(proto::LoadCommitDiffResponse {
2939 files: commit_diff
2940 .files
2941 .into_iter()
2942 .map(|file| proto::CommitFile {
2943 path: file.path.to_proto(),
2944 old_text: file.old_text,
2945 new_text: file.new_text,
2946 is_binary: file.is_binary,
2947 })
2948 .collect(),
2949 })
2950 }
2951
2952 async fn handle_file_history(
2953 this: Entity<Self>,
2954 envelope: TypedEnvelope<proto::GitFileHistory>,
2955 mut cx: AsyncApp,
2956 ) -> Result<proto::GitFileHistoryResponse> {
2957 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2958 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2959 let path = RepoPath::from_proto(&envelope.payload.path)?;
2960 let skip = envelope.payload.skip as usize;
2961 let limit = envelope.payload.limit.map(|l| l as usize);
2962
2963 let file_history = repository_handle
2964 .update(&mut cx, |repository_handle, _| {
2965 repository_handle.file_history_paginated(path, skip, limit)
2966 })
2967 .await??;
2968
2969 Ok(proto::GitFileHistoryResponse {
2970 entries: file_history
2971 .entries
2972 .into_iter()
2973 .map(|entry| proto::FileHistoryEntry {
2974 sha: entry.sha.to_string(),
2975 subject: entry.subject.to_string(),
2976 message: entry.message.to_string(),
2977 commit_timestamp: entry.commit_timestamp,
2978 author_name: entry.author_name.to_string(),
2979 author_email: entry.author_email.to_string(),
2980 })
2981 .collect(),
2982 path: file_history.path.to_proto(),
2983 })
2984 }
2985
2986 async fn handle_reset(
2987 this: Entity<Self>,
2988 envelope: TypedEnvelope<proto::GitReset>,
2989 mut cx: AsyncApp,
2990 ) -> Result<proto::Ack> {
2991 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2992 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2993
2994 let mode = match envelope.payload.mode() {
2995 git_reset::ResetMode::Soft => ResetMode::Soft,
2996 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2997 };
2998
2999 repository_handle
3000 .update(&mut cx, |repository_handle, cx| {
3001 repository_handle.reset(envelope.payload.commit, mode, cx)
3002 })
3003 .await??;
3004 Ok(proto::Ack {})
3005 }
3006
3007 async fn handle_checkout_files(
3008 this: Entity<Self>,
3009 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
3010 mut cx: AsyncApp,
3011 ) -> Result<proto::Ack> {
3012 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3013 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3014 let paths = envelope
3015 .payload
3016 .paths
3017 .iter()
3018 .map(|s| RepoPath::from_proto(s))
3019 .collect::<Result<Vec<_>>>()?;
3020
3021 repository_handle
3022 .update(&mut cx, |repository_handle, cx| {
3023 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
3024 })
3025 .await?;
3026 Ok(proto::Ack {})
3027 }
3028
3029 async fn handle_open_commit_message_buffer(
3030 this: Entity<Self>,
3031 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
3032 mut cx: AsyncApp,
3033 ) -> Result<proto::OpenBufferResponse> {
3034 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3035 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
3036 let buffer = repository
3037 .update(&mut cx, |repository, cx| {
3038 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
3039 })
3040 .await?;
3041
3042 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
3043 this.update(&mut cx, |this, cx| {
3044 this.buffer_store.update(cx, |buffer_store, cx| {
3045 buffer_store
3046 .create_buffer_for_peer(
3047 &buffer,
3048 envelope.original_sender_id.unwrap_or(envelope.sender_id),
3049 cx,
3050 )
3051 .detach_and_log_err(cx);
3052 })
3053 });
3054
3055 Ok(proto::OpenBufferResponse {
3056 buffer_id: buffer_id.to_proto(),
3057 })
3058 }
3059
3060 async fn handle_askpass(
3061 this: Entity<Self>,
3062 envelope: TypedEnvelope<proto::AskPassRequest>,
3063 mut cx: AsyncApp,
3064 ) -> Result<proto::AskPassResponse> {
3065 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3066 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
3067
3068 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
3069 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
3070 debug_panic!("no askpass found");
3071 anyhow::bail!("no askpass found");
3072 };
3073
3074 let response = askpass
3075 .ask_password(envelope.payload.prompt)
3076 .await
3077 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
3078
3079 delegates
3080 .lock()
3081 .insert(envelope.payload.askpass_id, askpass);
3082
3083 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
3084 Ok(proto::AskPassResponse {
3085 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
3086 })
3087 }
3088
3089 async fn handle_check_for_pushed_commits(
3090 this: Entity<Self>,
3091 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
3092 mut cx: AsyncApp,
3093 ) -> Result<proto::CheckForPushedCommitsResponse> {
3094 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3095 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3096
3097 let branches = repository_handle
3098 .update(&mut cx, |repository_handle, _| {
3099 repository_handle.check_for_pushed_commits()
3100 })
3101 .await??;
3102 Ok(proto::CheckForPushedCommitsResponse {
3103 pushed_to: branches
3104 .into_iter()
3105 .map(|commit| commit.to_string())
3106 .collect(),
3107 })
3108 }
3109
3110 async fn handle_git_diff(
3111 this: Entity<Self>,
3112 envelope: TypedEnvelope<proto::GitDiff>,
3113 mut cx: AsyncApp,
3114 ) -> Result<proto::GitDiffResponse> {
3115 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
3116 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
3117 let diff_type = match envelope.payload.diff_type() {
3118 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
3119 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
3120 proto::git_diff::DiffType::MergeBase => {
3121 let base_ref = envelope
3122 .payload
3123 .merge_base_ref
3124 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
3125 DiffType::MergeBase {
3126 base_ref: base_ref.into(),
3127 }
3128 }
3129 };
3130
3131 let mut diff = repository_handle
3132 .update(&mut cx, |repository_handle, cx| {
3133 repository_handle.diff(diff_type, cx)
3134 })
3135 .await??;
3136 const ONE_MB: usize = 1_000_000;
3137 if diff.len() > ONE_MB {
3138 diff = diff.chars().take(ONE_MB).collect()
3139 }
3140
3141 Ok(proto::GitDiffResponse { diff })
3142 }
3143
3144 async fn handle_tree_diff(
3145 this: Entity<Self>,
3146 request: TypedEnvelope<proto::GetTreeDiff>,
3147 mut cx: AsyncApp,
3148 ) -> Result<proto::GetTreeDiffResponse> {
3149 let repository_id = RepositoryId(request.payload.repository_id);
3150 let diff_type = if request.payload.is_merge {
3151 DiffTreeType::MergeBase {
3152 base: request.payload.base.into(),
3153 head: request.payload.head.into(),
3154 }
3155 } else {
3156 DiffTreeType::Since {
3157 base: request.payload.base.into(),
3158 head: request.payload.head.into(),
3159 }
3160 };
3161
3162 let diff = this
3163 .update(&mut cx, |this, cx| {
3164 let repository = this.repositories().get(&repository_id)?;
3165 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
3166 })
3167 .context("missing repository")?
3168 .await??;
3169
3170 Ok(proto::GetTreeDiffResponse {
3171 entries: diff
3172 .entries
3173 .into_iter()
3174 .map(|(path, status)| proto::TreeDiffStatus {
3175 path: path.as_ref().to_proto(),
3176 status: match status {
3177 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
3178 TreeDiffStatus::Modified { .. } => {
3179 proto::tree_diff_status::Status::Modified.into()
3180 }
3181 TreeDiffStatus::Deleted { .. } => {
3182 proto::tree_diff_status::Status::Deleted.into()
3183 }
3184 },
3185 oid: match status {
3186 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
3187 Some(old.to_string())
3188 }
3189 TreeDiffStatus::Added => None,
3190 },
3191 })
3192 .collect(),
3193 })
3194 }
3195
3196 async fn handle_get_blob_content(
3197 this: Entity<Self>,
3198 request: TypedEnvelope<proto::GetBlobContent>,
3199 mut cx: AsyncApp,
3200 ) -> Result<proto::GetBlobContentResponse> {
3201 let oid = git::Oid::from_str(&request.payload.oid)?;
3202 let repository_id = RepositoryId(request.payload.repository_id);
3203 let content = this
3204 .update(&mut cx, |this, cx| {
3205 let repository = this.repositories().get(&repository_id)?;
3206 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
3207 })
3208 .context("missing repository")?
3209 .await?;
3210 Ok(proto::GetBlobContentResponse { content })
3211 }
3212
3213 async fn handle_open_unstaged_diff(
3214 this: Entity<Self>,
3215 request: TypedEnvelope<proto::OpenUnstagedDiff>,
3216 mut cx: AsyncApp,
3217 ) -> Result<proto::OpenUnstagedDiffResponse> {
3218 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3219 let diff = this
3220 .update(&mut cx, |this, cx| {
3221 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3222 Some(this.open_unstaged_diff(buffer, cx))
3223 })
3224 .context("missing buffer")?
3225 .await?;
3226 this.update(&mut cx, |this, _| {
3227 let shared_diffs = this
3228 .shared_diffs
3229 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3230 .or_default();
3231 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
3232 });
3233 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
3234 Ok(proto::OpenUnstagedDiffResponse { staged_text })
3235 }
3236
3237 async fn handle_open_uncommitted_diff(
3238 this: Entity<Self>,
3239 request: TypedEnvelope<proto::OpenUncommittedDiff>,
3240 mut cx: AsyncApp,
3241 ) -> Result<proto::OpenUncommittedDiffResponse> {
3242 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3243 let diff = this
3244 .update(&mut cx, |this, cx| {
3245 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
3246 Some(this.open_uncommitted_diff(buffer, cx))
3247 })
3248 .context("missing buffer")?
3249 .await?;
3250 this.update(&mut cx, |this, _| {
3251 let shared_diffs = this
3252 .shared_diffs
3253 .entry(request.original_sender_id.unwrap_or(request.sender_id))
3254 .or_default();
3255 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
3256 });
3257 Ok(diff.read_with(&cx, |diff, cx| {
3258 use proto::open_uncommitted_diff_response::Mode;
3259
3260 let unstaged_diff = diff.secondary_diff();
3261 let index_snapshot = unstaged_diff.and_then(|diff| {
3262 let diff = diff.read(cx);
3263 diff.base_text_exists().then(|| diff.base_text(cx))
3264 });
3265
3266 let mode;
3267 let staged_text;
3268 let committed_text;
3269 if diff.base_text_exists() {
3270 let committed_snapshot = diff.base_text(cx);
3271 committed_text = Some(committed_snapshot.text());
3272 if let Some(index_text) = index_snapshot {
3273 if index_text.remote_id() == committed_snapshot.remote_id() {
3274 mode = Mode::IndexMatchesHead;
3275 staged_text = None;
3276 } else {
3277 mode = Mode::IndexAndHead;
3278 staged_text = Some(index_text.text());
3279 }
3280 } else {
3281 mode = Mode::IndexAndHead;
3282 staged_text = None;
3283 }
3284 } else {
3285 mode = Mode::IndexAndHead;
3286 committed_text = None;
3287 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
3288 }
3289
3290 proto::OpenUncommittedDiffResponse {
3291 committed_text,
3292 staged_text,
3293 mode: mode.into(),
3294 }
3295 }))
3296 }
3297
3298 async fn handle_update_diff_bases(
3299 this: Entity<Self>,
3300 request: TypedEnvelope<proto::UpdateDiffBases>,
3301 mut cx: AsyncApp,
3302 ) -> Result<()> {
3303 let buffer_id = BufferId::new(request.payload.buffer_id)?;
3304 this.update(&mut cx, |this, cx| {
3305 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
3306 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
3307 {
3308 let buffer = buffer.read(cx).text_snapshot();
3309 diff_state.update(cx, |diff_state, cx| {
3310 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
3311 })
3312 }
3313 });
3314 Ok(())
3315 }
3316
3317 async fn handle_blame_buffer(
3318 this: Entity<Self>,
3319 envelope: TypedEnvelope<proto::BlameBuffer>,
3320 mut cx: AsyncApp,
3321 ) -> Result<proto::BlameBufferResponse> {
3322 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3323 let version = deserialize_version(&envelope.payload.version);
3324 let buffer = this.read_with(&cx, |this, cx| {
3325 this.buffer_store.read(cx).get_existing(buffer_id)
3326 })?;
3327 buffer
3328 .update(&mut cx, |buffer, _| {
3329 buffer.wait_for_version(version.clone())
3330 })
3331 .await?;
3332 let blame = this
3333 .update(&mut cx, |this, cx| {
3334 this.blame_buffer(&buffer, Some(version), cx)
3335 })
3336 .await?;
3337 Ok(serialize_blame_buffer_response(blame))
3338 }
3339
3340 async fn handle_get_permalink_to_line(
3341 this: Entity<Self>,
3342 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3343 mut cx: AsyncApp,
3344 ) -> Result<proto::GetPermalinkToLineResponse> {
3345 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3346 // let version = deserialize_version(&envelope.payload.version);
3347 let selection = {
3348 let proto_selection = envelope
3349 .payload
3350 .selection
3351 .context("no selection to get permalink for defined")?;
3352 proto_selection.start as u32..proto_selection.end as u32
3353 };
3354 let buffer = this.read_with(&cx, |this, cx| {
3355 this.buffer_store.read(cx).get_existing(buffer_id)
3356 })?;
3357 let permalink = this
3358 .update(&mut cx, |this, cx| {
3359 this.get_permalink_to_line(&buffer, selection, cx)
3360 })
3361 .await?;
3362 Ok(proto::GetPermalinkToLineResponse {
3363 permalink: permalink.to_string(),
3364 })
3365 }
3366
3367 fn repository_for_request(
3368 this: &Entity<Self>,
3369 id: RepositoryId,
3370 cx: &mut AsyncApp,
3371 ) -> Result<Entity<Repository>> {
3372 this.read_with(cx, |this, _| {
3373 this.repositories
3374 .get(&id)
3375 .context("missing repository handle")
3376 .cloned()
3377 })
3378 }
3379
3380 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3381 self.repositories
3382 .iter()
3383 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3384 .collect()
3385 }
3386
3387 fn process_updated_entries(
3388 &self,
3389 worktree: &Entity<Worktree>,
3390 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3391 cx: &mut App,
3392 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3393 let path_style = worktree.read(cx).path_style();
3394 let mut repo_paths = self
3395 .repositories
3396 .values()
3397 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3398 .collect::<Vec<_>>();
3399 let mut entries: Vec<_> = updated_entries
3400 .iter()
3401 .map(|(path, _, _)| path.clone())
3402 .collect();
3403 entries.sort();
3404 let worktree = worktree.read(cx);
3405
3406 let entries = entries
3407 .into_iter()
3408 .map(|path| worktree.absolutize(&path))
3409 .collect::<Arc<[_]>>();
3410
3411 let executor = cx.background_executor().clone();
3412 cx.background_executor().spawn(async move {
3413 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3414 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3415 let mut tasks = FuturesOrdered::new();
3416 for (repo_path, repo) in repo_paths.into_iter().rev() {
3417 let entries = entries.clone();
3418 let task = executor.spawn(async move {
3419 // Find all repository paths that belong to this repo
3420 let mut ix = entries.partition_point(|path| path < &*repo_path);
3421 if ix == entries.len() {
3422 return None;
3423 };
3424
3425 let mut paths = Vec::new();
3426 // All paths prefixed by a given repo will constitute a continuous range.
3427 while let Some(path) = entries.get(ix)
3428 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3429 &repo_path, path, path_style,
3430 )
3431 {
3432 paths.push((repo_path, ix));
3433 ix += 1;
3434 }
3435 if paths.is_empty() {
3436 None
3437 } else {
3438 Some((repo, paths))
3439 }
3440 });
3441 tasks.push_back(task);
3442 }
3443
3444 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3445 let mut path_was_used = vec![false; entries.len()];
3446 let tasks = tasks.collect::<Vec<_>>().await;
3447 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3448 // We always want to assign a path to it's innermost repository.
3449 for t in tasks {
3450 let Some((repo, paths)) = t else {
3451 continue;
3452 };
3453 let entry = paths_by_git_repo.entry(repo).or_default();
3454 for (repo_path, ix) in paths {
3455 if path_was_used[ix] {
3456 continue;
3457 }
3458 path_was_used[ix] = true;
3459 entry.push(repo_path);
3460 }
3461 }
3462
3463 paths_by_git_repo
3464 })
3465 }
3466}
3467
3468impl BufferGitState {
3469 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3470 Self {
3471 unstaged_diff: Default::default(),
3472 uncommitted_diff: Default::default(),
3473 oid_diffs: Default::default(),
3474 recalculate_diff_task: Default::default(),
3475 language: Default::default(),
3476 language_registry: Default::default(),
3477 recalculating_tx: postage::watch::channel_with(false).0,
3478 hunk_staging_operation_count: 0,
3479 hunk_staging_operation_count_as_of_write: 0,
3480 head_text: Default::default(),
3481 index_text: Default::default(),
3482 oid_texts: Default::default(),
3483 head_changed: Default::default(),
3484 index_changed: Default::default(),
3485 language_changed: Default::default(),
3486 conflict_updated_futures: Default::default(),
3487 conflict_set: Default::default(),
3488 reparse_conflict_markers_task: Default::default(),
3489 }
3490 }
3491
3492 #[ztracing::instrument(skip_all)]
3493 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3494 self.language = buffer.read(cx).language().cloned();
3495 self.language_changed = true;
3496 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3497 }
3498
3499 fn reparse_conflict_markers(
3500 &mut self,
3501 buffer: text::BufferSnapshot,
3502 cx: &mut Context<Self>,
3503 ) -> oneshot::Receiver<()> {
3504 let (tx, rx) = oneshot::channel();
3505
3506 let Some(conflict_set) = self
3507 .conflict_set
3508 .as_ref()
3509 .and_then(|conflict_set| conflict_set.upgrade())
3510 else {
3511 return rx;
3512 };
3513
3514 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3515 if conflict_set.has_conflict {
3516 Some(conflict_set.snapshot())
3517 } else {
3518 None
3519 }
3520 });
3521
3522 if let Some(old_snapshot) = old_snapshot {
3523 self.conflict_updated_futures.push(tx);
3524 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3525 let (snapshot, changed_range) = cx
3526 .background_spawn(async move {
3527 let new_snapshot = ConflictSet::parse(&buffer);
3528 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3529 (new_snapshot, changed_range)
3530 })
3531 .await;
3532 this.update(cx, |this, cx| {
3533 if let Some(conflict_set) = &this.conflict_set {
3534 conflict_set
3535 .update(cx, |conflict_set, cx| {
3536 conflict_set.set_snapshot(snapshot, changed_range, cx);
3537 })
3538 .ok();
3539 }
3540 let futures = std::mem::take(&mut this.conflict_updated_futures);
3541 for tx in futures {
3542 tx.send(()).ok();
3543 }
3544 })
3545 }))
3546 }
3547
3548 rx
3549 }
3550
3551 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3552 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3553 }
3554
3555 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3556 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3557 }
3558
3559 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3560 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3561 }
3562
3563 fn handle_base_texts_updated(
3564 &mut self,
3565 buffer: text::BufferSnapshot,
3566 message: proto::UpdateDiffBases,
3567 cx: &mut Context<Self>,
3568 ) {
3569 use proto::update_diff_bases::Mode;
3570
3571 let Some(mode) = Mode::from_i32(message.mode) else {
3572 return;
3573 };
3574
3575 let diff_bases_change = match mode {
3576 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3577 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3578 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3579 Mode::IndexAndHead => DiffBasesChange::SetEach {
3580 index: message.staged_text,
3581 head: message.committed_text,
3582 },
3583 };
3584
3585 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3586 }
3587
3588 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3589 if *self.recalculating_tx.borrow() {
3590 let mut rx = self.recalculating_tx.subscribe();
3591 Some(async move {
3592 loop {
3593 let is_recalculating = rx.recv().await;
3594 if is_recalculating != Some(true) {
3595 break;
3596 }
3597 }
3598 })
3599 } else {
3600 None
3601 }
3602 }
3603
3604 fn diff_bases_changed(
3605 &mut self,
3606 buffer: text::BufferSnapshot,
3607 diff_bases_change: Option<DiffBasesChange>,
3608 cx: &mut Context<Self>,
3609 ) {
3610 match diff_bases_change {
3611 Some(DiffBasesChange::SetIndex(index)) => {
3612 self.index_text = index.map(|mut index| {
3613 text::LineEnding::normalize(&mut index);
3614 Arc::from(index.as_str())
3615 });
3616 self.index_changed = true;
3617 }
3618 Some(DiffBasesChange::SetHead(head)) => {
3619 self.head_text = head.map(|mut head| {
3620 text::LineEnding::normalize(&mut head);
3621 Arc::from(head.as_str())
3622 });
3623 self.head_changed = true;
3624 }
3625 Some(DiffBasesChange::SetBoth(text)) => {
3626 let text = text.map(|mut text| {
3627 text::LineEnding::normalize(&mut text);
3628 Arc::from(text.as_str())
3629 });
3630 self.head_text = text.clone();
3631 self.index_text = text;
3632 self.head_changed = true;
3633 self.index_changed = true;
3634 }
3635 Some(DiffBasesChange::SetEach { index, head }) => {
3636 self.index_text = index.map(|mut index| {
3637 text::LineEnding::normalize(&mut index);
3638 Arc::from(index.as_str())
3639 });
3640 self.index_changed = true;
3641 self.head_text = head.map(|mut head| {
3642 text::LineEnding::normalize(&mut head);
3643 Arc::from(head.as_str())
3644 });
3645 self.head_changed = true;
3646 }
3647 None => {}
3648 }
3649
3650 self.recalculate_diffs(buffer, cx)
3651 }
3652
3653 #[ztracing::instrument(skip_all)]
3654 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3655 *self.recalculating_tx.borrow_mut() = true;
3656
3657 let language = self.language.clone();
3658 let language_registry = self.language_registry.clone();
3659 let unstaged_diff = self.unstaged_diff();
3660 let uncommitted_diff = self.uncommitted_diff();
3661 let head = self.head_text.clone();
3662 let index = self.index_text.clone();
3663 let index_changed = self.index_changed;
3664 let head_changed = self.head_changed;
3665 let language_changed = self.language_changed;
3666 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3667 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3668 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3669 (None, None) => true,
3670 _ => false,
3671 };
3672
3673 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3674 .oid_diffs
3675 .iter()
3676 .filter_map(|(oid, weak)| {
3677 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3678 weak.upgrade().map(|diff| (*oid, diff, base_text))
3679 })
3680 .collect();
3681
3682 self.oid_diffs.retain(|oid, weak| {
3683 let alive = weak.upgrade().is_some();
3684 if !alive {
3685 if let Some(oid) = oid {
3686 self.oid_texts.remove(oid);
3687 }
3688 }
3689 alive
3690 });
3691 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3692 log::debug!(
3693 "start recalculating diffs for buffer {}",
3694 buffer.remote_id()
3695 );
3696
3697 let mut new_unstaged_diff = None;
3698 if let Some(unstaged_diff) = &unstaged_diff {
3699 new_unstaged_diff = Some(
3700 cx.update(|cx| {
3701 unstaged_diff.read(cx).update_diff(
3702 buffer.clone(),
3703 index,
3704 index_changed.then_some(false),
3705 language.clone(),
3706 cx,
3707 )
3708 })
3709 .await,
3710 );
3711 }
3712
3713 // Dropping BufferDiff can be expensive, so yield back to the event loop
3714 // for a bit
3715 yield_now().await;
3716
3717 let mut new_uncommitted_diff = None;
3718 if let Some(uncommitted_diff) = &uncommitted_diff {
3719 new_uncommitted_diff = if index_matches_head {
3720 new_unstaged_diff.clone()
3721 } else {
3722 Some(
3723 cx.update(|cx| {
3724 uncommitted_diff.read(cx).update_diff(
3725 buffer.clone(),
3726 head,
3727 head_changed.then_some(true),
3728 language.clone(),
3729 cx,
3730 )
3731 })
3732 .await,
3733 )
3734 }
3735 }
3736
3737 // Dropping BufferDiff can be expensive, so yield back to the event loop
3738 // for a bit
3739 yield_now().await;
3740
3741 let cancel = this.update(cx, |this, _| {
3742 // This checks whether all pending stage/unstage operations
3743 // have quiesced (i.e. both the corresponding write and the
3744 // read of that write have completed). If not, then we cancel
3745 // this recalculation attempt to avoid invalidating pending
3746 // state too quickly; another recalculation will come along
3747 // later and clear the pending state once the state of the index has settled.
3748 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3749 *this.recalculating_tx.borrow_mut() = false;
3750 true
3751 } else {
3752 false
3753 }
3754 })?;
3755 if cancel {
3756 log::debug!(
3757 concat!(
3758 "aborting recalculating diffs for buffer {}",
3759 "due to subsequent hunk operations",
3760 ),
3761 buffer.remote_id()
3762 );
3763 return Ok(());
3764 }
3765
3766 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3767 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3768 {
3769 let task = unstaged_diff.update(cx, |diff, cx| {
3770 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3771 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3772 // view multibuffers.
3773 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3774 });
3775 Some(task.await)
3776 } else {
3777 None
3778 };
3779
3780 yield_now().await;
3781
3782 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3783 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3784 {
3785 uncommitted_diff
3786 .update(cx, |diff, cx| {
3787 if language_changed {
3788 diff.language_changed(language.clone(), language_registry.clone(), cx);
3789 }
3790 diff.set_snapshot_with_secondary(
3791 new_uncommitted_diff,
3792 &buffer,
3793 unstaged_changed_range.flatten(),
3794 true,
3795 cx,
3796 )
3797 })
3798 .await;
3799 }
3800
3801 yield_now().await;
3802
3803 for (oid, oid_diff, base_text) in oid_diffs {
3804 let new_oid_diff = cx
3805 .update(|cx| {
3806 oid_diff.read(cx).update_diff(
3807 buffer.clone(),
3808 base_text,
3809 None,
3810 language.clone(),
3811 cx,
3812 )
3813 })
3814 .await;
3815
3816 oid_diff
3817 .update(cx, |diff, cx| {
3818 if language_changed {
3819 diff.language_changed(language.clone(), language_registry.clone(), cx);
3820 }
3821 diff.set_snapshot(new_oid_diff, &buffer, cx)
3822 })
3823 .await;
3824
3825 log::debug!(
3826 "finished recalculating oid diff for buffer {} oid {:?}",
3827 buffer.remote_id(),
3828 oid
3829 );
3830
3831 yield_now().await;
3832 }
3833
3834 log::debug!(
3835 "finished recalculating diffs for buffer {}",
3836 buffer.remote_id()
3837 );
3838
3839 if let Some(this) = this.upgrade() {
3840 this.update(cx, |this, _| {
3841 this.index_changed = false;
3842 this.head_changed = false;
3843 this.language_changed = false;
3844 *this.recalculating_tx.borrow_mut() = false;
3845 });
3846 }
3847
3848 Ok(())
3849 }));
3850 }
3851}
3852
3853fn make_remote_delegate(
3854 this: Entity<GitStore>,
3855 project_id: u64,
3856 repository_id: RepositoryId,
3857 askpass_id: u64,
3858 cx: &mut AsyncApp,
3859) -> AskPassDelegate {
3860 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3861 this.update(cx, |this, cx| {
3862 let Some((client, _)) = this.downstream_client() else {
3863 return;
3864 };
3865 let response = client.request(proto::AskPassRequest {
3866 project_id,
3867 repository_id: repository_id.to_proto(),
3868 askpass_id,
3869 prompt,
3870 });
3871 cx.spawn(async move |_, _| {
3872 let mut response = response.await?.response;
3873 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3874 .ok();
3875 response.zeroize();
3876 anyhow::Ok(())
3877 })
3878 .detach_and_log_err(cx);
3879 });
3880 })
3881}
3882
3883impl RepositoryId {
3884 pub fn to_proto(self) -> u64 {
3885 self.0
3886 }
3887
3888 pub fn from_proto(id: u64) -> Self {
3889 RepositoryId(id)
3890 }
3891}
3892
3893impl RepositorySnapshot {
3894 fn empty(
3895 id: RepositoryId,
3896 work_directory_abs_path: Arc<Path>,
3897 original_repo_abs_path: Option<Arc<Path>>,
3898 path_style: PathStyle,
3899 ) -> Self {
3900 Self {
3901 id,
3902 statuses_by_path: Default::default(),
3903 original_repo_abs_path: original_repo_abs_path
3904 .unwrap_or_else(|| work_directory_abs_path.clone()),
3905 work_directory_abs_path,
3906 branch: None,
3907 branch_list: Arc::from([]),
3908 head_commit: None,
3909 scan_id: 0,
3910 merge: Default::default(),
3911 remote_origin_url: None,
3912 remote_upstream_url: None,
3913 stash_entries: Default::default(),
3914 linked_worktrees: Arc::from([]),
3915 path_style,
3916 }
3917 }
3918
3919 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3920 proto::UpdateRepository {
3921 branch_summary: self.branch.as_ref().map(branch_to_proto),
3922 branch_list: self.branch_list.iter().map(branch_to_proto).collect(),
3923 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3924 updated_statuses: self
3925 .statuses_by_path
3926 .iter()
3927 .map(|entry| entry.to_proto())
3928 .collect(),
3929 removed_statuses: Default::default(),
3930 current_merge_conflicts: self
3931 .merge
3932 .merge_heads_by_conflicted_path
3933 .iter()
3934 .map(|(repo_path, _)| repo_path.to_proto())
3935 .collect(),
3936 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3937 project_id,
3938 id: self.id.to_proto(),
3939 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3940 entry_ids: vec![self.id.to_proto()],
3941 scan_id: self.scan_id,
3942 is_last_update: true,
3943 stash_entries: self
3944 .stash_entries
3945 .entries
3946 .iter()
3947 .map(stash_to_proto)
3948 .collect(),
3949 remote_upstream_url: self.remote_upstream_url.clone(),
3950 remote_origin_url: self.remote_origin_url.clone(),
3951 original_repo_abs_path: Some(
3952 self.original_repo_abs_path.to_string_lossy().into_owned(),
3953 ),
3954 linked_worktrees: self
3955 .linked_worktrees
3956 .iter()
3957 .map(worktree_to_proto)
3958 .collect(),
3959 }
3960 }
3961
3962 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3963 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3964 let mut removed_statuses: Vec<String> = Vec::new();
3965
3966 let mut new_statuses = self.statuses_by_path.iter().peekable();
3967 let mut old_statuses = old.statuses_by_path.iter().peekable();
3968
3969 let mut current_new_entry = new_statuses.next();
3970 let mut current_old_entry = old_statuses.next();
3971 loop {
3972 match (current_new_entry, current_old_entry) {
3973 (Some(new_entry), Some(old_entry)) => {
3974 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3975 Ordering::Less => {
3976 updated_statuses.push(new_entry.to_proto());
3977 current_new_entry = new_statuses.next();
3978 }
3979 Ordering::Equal => {
3980 if new_entry.status != old_entry.status
3981 || new_entry.diff_stat != old_entry.diff_stat
3982 {
3983 updated_statuses.push(new_entry.to_proto());
3984 }
3985 current_old_entry = old_statuses.next();
3986 current_new_entry = new_statuses.next();
3987 }
3988 Ordering::Greater => {
3989 removed_statuses.push(old_entry.repo_path.to_proto());
3990 current_old_entry = old_statuses.next();
3991 }
3992 }
3993 }
3994 (None, Some(old_entry)) => {
3995 removed_statuses.push(old_entry.repo_path.to_proto());
3996 current_old_entry = old_statuses.next();
3997 }
3998 (Some(new_entry), None) => {
3999 updated_statuses.push(new_entry.to_proto());
4000 current_new_entry = new_statuses.next();
4001 }
4002 (None, None) => break,
4003 }
4004 }
4005
4006 proto::UpdateRepository {
4007 branch_summary: self.branch.as_ref().map(branch_to_proto),
4008 branch_list: self.branch_list.iter().map(branch_to_proto).collect(),
4009 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
4010 updated_statuses,
4011 removed_statuses,
4012 current_merge_conflicts: self
4013 .merge
4014 .merge_heads_by_conflicted_path
4015 .iter()
4016 .map(|(path, _)| path.to_proto())
4017 .collect(),
4018 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
4019 project_id,
4020 id: self.id.to_proto(),
4021 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
4022 entry_ids: vec![],
4023 scan_id: self.scan_id,
4024 is_last_update: true,
4025 stash_entries: self
4026 .stash_entries
4027 .entries
4028 .iter()
4029 .map(stash_to_proto)
4030 .collect(),
4031 remote_upstream_url: self.remote_upstream_url.clone(),
4032 remote_origin_url: self.remote_origin_url.clone(),
4033 original_repo_abs_path: Some(
4034 self.original_repo_abs_path.to_string_lossy().into_owned(),
4035 ),
4036 linked_worktrees: self
4037 .linked_worktrees
4038 .iter()
4039 .map(worktree_to_proto)
4040 .collect(),
4041 }
4042 }
4043
4044 /// The main worktree is the original checkout that other worktrees were
4045 /// created from.
4046 ///
4047 /// For example, if you had both `~/code/zed` and `~/code/worktrees/zed-2`,
4048 /// then `~/code/zed` is the main worktree and `~/code/worktrees/zed-2` is a linked worktree.
4049 ///
4050 /// Submodules also return `true` here, since they are not linked worktrees.
4051 pub fn is_main_worktree(&self) -> bool {
4052 self.work_directory_abs_path == self.original_repo_abs_path
4053 }
4054
4055 /// Returns true if this repository is a linked worktree, that is, one that
4056 /// was created from another worktree.
4057 ///
4058 /// Returns `false` for both the main worktree and submodules.
4059 pub fn is_linked_worktree(&self) -> bool {
4060 !self.is_main_worktree()
4061 }
4062
4063 pub fn linked_worktrees(&self) -> &[GitWorktree] {
4064 &self.linked_worktrees
4065 }
4066
4067 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
4068 self.statuses_by_path.iter().cloned()
4069 }
4070
4071 pub fn status_summary(&self) -> GitSummary {
4072 self.statuses_by_path.summary().item_summary
4073 }
4074
4075 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
4076 self.statuses_by_path
4077 .get(&PathKey(path.as_ref().clone()), ())
4078 .cloned()
4079 }
4080
4081 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4082 self.statuses_by_path
4083 .get(&PathKey(path.as_ref().clone()), ())
4084 .and_then(|entry| entry.diff_stat)
4085 }
4086
4087 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
4088 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
4089 }
4090
4091 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
4092 let repo_path = repo_path.display(self.path_style);
4093 PathBuf::from(
4094 self.path_style
4095 .join(&self.work_directory_abs_path, repo_path.as_ref())
4096 .unwrap(),
4097 )
4098 }
4099
4100 #[inline]
4101 fn abs_path_to_repo_path_inner(
4102 work_directory_abs_path: &Path,
4103 abs_path: &Path,
4104 path_style: PathStyle,
4105 ) -> Option<RepoPath> {
4106 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
4107 Some(RepoPath::from_rel_path(&rel_path))
4108 }
4109
4110 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
4111 self.merge
4112 .merge_heads_by_conflicted_path
4113 .contains_key(repo_path)
4114 }
4115
4116 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
4117 let had_conflict_on_last_merge_head_change = self
4118 .merge
4119 .merge_heads_by_conflicted_path
4120 .contains_key(repo_path);
4121 let has_conflict_currently = self
4122 .status_for_path(repo_path)
4123 .is_some_and(|entry| entry.status.is_conflicted());
4124 had_conflict_on_last_merge_head_change || has_conflict_currently
4125 }
4126
4127 /// This is the name that will be displayed in the repository selector for this repository.
4128 pub fn display_name(&self) -> SharedString {
4129 self.work_directory_abs_path
4130 .file_name()
4131 .unwrap_or_default()
4132 .to_string_lossy()
4133 .to_string()
4134 .into()
4135 }
4136}
4137
4138pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
4139 proto::StashEntry {
4140 oid: entry.oid.as_bytes().to_vec(),
4141 message: entry.message.clone(),
4142 branch: entry.branch.clone(),
4143 index: entry.index as u64,
4144 timestamp: entry.timestamp,
4145 }
4146}
4147
4148pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
4149 Ok(StashEntry {
4150 oid: Oid::from_bytes(&entry.oid)?,
4151 message: entry.message.clone(),
4152 index: entry.index as usize,
4153 branch: entry.branch.clone(),
4154 timestamp: entry.timestamp,
4155 })
4156}
4157
4158impl MergeDetails {
4159 async fn update(
4160 &mut self,
4161 backend: &Arc<dyn GitRepository>,
4162 current_conflicted_paths: Vec<RepoPath>,
4163 ) -> Result<bool> {
4164 log::debug!("load merge details");
4165 self.message = backend.merge_message().await.map(SharedString::from);
4166 let heads = backend
4167 .revparse_batch(vec![
4168 "MERGE_HEAD".into(),
4169 "CHERRY_PICK_HEAD".into(),
4170 "REBASE_HEAD".into(),
4171 "REVERT_HEAD".into(),
4172 "APPLY_HEAD".into(),
4173 ])
4174 .await
4175 .log_err()
4176 .unwrap_or_default()
4177 .into_iter()
4178 .map(|opt| opt.map(SharedString::from))
4179 .collect::<Vec<_>>();
4180
4181 let mut conflicts_changed = false;
4182
4183 // Record the merge state for newly conflicted paths
4184 for path in ¤t_conflicted_paths {
4185 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
4186 conflicts_changed = true;
4187 self.merge_heads_by_conflicted_path
4188 .insert(path.clone(), heads.clone());
4189 }
4190 }
4191
4192 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
4193 self.merge_heads_by_conflicted_path
4194 .retain(|path, old_merge_heads| {
4195 let keep = current_conflicted_paths.contains(path)
4196 || (old_merge_heads == &heads
4197 && old_merge_heads.iter().any(|head| head.is_some()));
4198 if !keep {
4199 conflicts_changed = true;
4200 }
4201 keep
4202 });
4203
4204 Ok(conflicts_changed)
4205 }
4206}
4207
4208impl Repository {
4209 pub fn is_trusted(&self) -> bool {
4210 match self.repository_state.peek() {
4211 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
4212 _ => false,
4213 }
4214 }
4215
4216 pub fn snapshot(&self) -> RepositorySnapshot {
4217 self.snapshot.clone()
4218 }
4219
4220 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
4221 self.pending_ops.iter().cloned()
4222 }
4223
4224 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
4225 self.pending_ops.summary().clone()
4226 }
4227
4228 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
4229 self.pending_ops
4230 .get(&PathKey(path.as_ref().clone()), ())
4231 .cloned()
4232 }
4233
4234 fn local(
4235 id: RepositoryId,
4236 work_directory_abs_path: Arc<Path>,
4237 original_repo_abs_path: Arc<Path>,
4238 dot_git_abs_path: Arc<Path>,
4239 project_environment: WeakEntity<ProjectEnvironment>,
4240 fs: Arc<dyn Fs>,
4241 is_trusted: bool,
4242 git_store: WeakEntity<GitStore>,
4243 cx: &mut Context<Self>,
4244 ) -> Self {
4245 let snapshot = RepositorySnapshot::empty(
4246 id,
4247 work_directory_abs_path.clone(),
4248 Some(original_repo_abs_path),
4249 PathStyle::local(),
4250 );
4251 let state = cx
4252 .spawn(async move |_, cx| {
4253 LocalRepositoryState::new(
4254 work_directory_abs_path,
4255 dot_git_abs_path,
4256 project_environment,
4257 fs,
4258 is_trusted,
4259 cx,
4260 )
4261 .await
4262 .map_err(|err| err.to_string())
4263 })
4264 .shared();
4265 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
4266 let state = cx
4267 .spawn(async move |_, _| {
4268 let state = state.await?;
4269 Ok(RepositoryState::Local(state))
4270 })
4271 .shared();
4272
4273 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
4274 RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
4275 if this.scan_id > 1 {
4276 this.initial_graph_data.clear();
4277 }
4278 }
4279 RepositoryEvent::StashEntriesChanged => {
4280 if this.scan_id > 1 {
4281 this.initial_graph_data
4282 .retain(|(log_source, _), _| *log_source != LogSource::All);
4283 }
4284 }
4285 _ => {}
4286 })
4287 .detach();
4288
4289 Repository {
4290 this: cx.weak_entity(),
4291 git_store,
4292 snapshot,
4293 pending_ops: Default::default(),
4294 repository_state: state,
4295 commit_message_buffer: None,
4296 askpass_delegates: Default::default(),
4297 paths_needing_status_update: Default::default(),
4298 latest_askpass_id: 0,
4299 job_sender,
4300 job_id: 0,
4301 active_jobs: Default::default(),
4302 initial_graph_data: Default::default(),
4303 commit_data: Default::default(),
4304 commit_data_handler: CommitDataHandlerState::Closed,
4305 }
4306 }
4307
4308 fn remote(
4309 id: RepositoryId,
4310 work_directory_abs_path: Arc<Path>,
4311 original_repo_abs_path: Option<Arc<Path>>,
4312 path_style: PathStyle,
4313 project_id: ProjectId,
4314 client: AnyProtoClient,
4315 git_store: WeakEntity<GitStore>,
4316 cx: &mut Context<Self>,
4317 ) -> Self {
4318 let snapshot = RepositorySnapshot::empty(
4319 id,
4320 work_directory_abs_path,
4321 original_repo_abs_path,
4322 path_style,
4323 );
4324 let repository_state = RemoteRepositoryState { project_id, client };
4325 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
4326 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
4327 Self {
4328 this: cx.weak_entity(),
4329 snapshot,
4330 commit_message_buffer: None,
4331 git_store,
4332 pending_ops: Default::default(),
4333 paths_needing_status_update: Default::default(),
4334 job_sender,
4335 repository_state,
4336 askpass_delegates: Default::default(),
4337 latest_askpass_id: 0,
4338 active_jobs: Default::default(),
4339 job_id: 0,
4340 initial_graph_data: Default::default(),
4341 commit_data: Default::default(),
4342 commit_data_handler: CommitDataHandlerState::Closed,
4343 }
4344 }
4345
4346 pub fn git_store(&self) -> Option<Entity<GitStore>> {
4347 self.git_store.upgrade()
4348 }
4349
4350 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
4351 let this = cx.weak_entity();
4352 let git_store = self.git_store.clone();
4353 let _ = self.send_keyed_job(
4354 Some(GitJobKey::ReloadBufferDiffBases),
4355 None,
4356 |state, mut cx| async move {
4357 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
4358 log::error!("tried to recompute diffs for a non-local repository");
4359 return Ok(());
4360 };
4361
4362 let Some(this) = this.upgrade() else {
4363 return Ok(());
4364 };
4365
4366 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
4367 git_store.update(cx, |git_store, cx| {
4368 git_store
4369 .diffs
4370 .iter()
4371 .filter_map(|(buffer_id, diff_state)| {
4372 let buffer_store = git_store.buffer_store.read(cx);
4373 let buffer = buffer_store.get(*buffer_id)?;
4374 let file = File::from_dyn(buffer.read(cx).file())?;
4375 let abs_path = file.worktree.read(cx).absolutize(&file.path);
4376 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
4377 log::debug!(
4378 "start reload diff bases for repo path {}",
4379 repo_path.as_unix_str()
4380 );
4381 diff_state.update(cx, |diff_state, _| {
4382 let has_unstaged_diff = diff_state
4383 .unstaged_diff
4384 .as_ref()
4385 .is_some_and(|diff| diff.is_upgradable());
4386 let has_uncommitted_diff = diff_state
4387 .uncommitted_diff
4388 .as_ref()
4389 .is_some_and(|set| set.is_upgradable());
4390
4391 Some((
4392 buffer,
4393 repo_path,
4394 has_unstaged_diff.then(|| diff_state.index_text.clone()),
4395 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
4396 ))
4397 })
4398 })
4399 .collect::<Vec<_>>()
4400 })
4401 })?;
4402
4403 let buffer_diff_base_changes = cx
4404 .background_spawn(async move {
4405 let mut changes = Vec::new();
4406 for (buffer, repo_path, current_index_text, current_head_text) in
4407 &repo_diff_state_updates
4408 {
4409 let index_text = if current_index_text.is_some() {
4410 backend.load_index_text(repo_path.clone()).await
4411 } else {
4412 None
4413 };
4414 let head_text = if current_head_text.is_some() {
4415 backend.load_committed_text(repo_path.clone()).await
4416 } else {
4417 None
4418 };
4419
4420 let change =
4421 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4422 (Some(current_index), Some(current_head)) => {
4423 let index_changed =
4424 index_text.as_deref() != current_index.as_deref();
4425 let head_changed =
4426 head_text.as_deref() != current_head.as_deref();
4427 if index_changed && head_changed {
4428 if index_text == head_text {
4429 Some(DiffBasesChange::SetBoth(head_text))
4430 } else {
4431 Some(DiffBasesChange::SetEach {
4432 index: index_text,
4433 head: head_text,
4434 })
4435 }
4436 } else if index_changed {
4437 Some(DiffBasesChange::SetIndex(index_text))
4438 } else if head_changed {
4439 Some(DiffBasesChange::SetHead(head_text))
4440 } else {
4441 None
4442 }
4443 }
4444 (Some(current_index), None) => {
4445 let index_changed =
4446 index_text.as_deref() != current_index.as_deref();
4447 index_changed
4448 .then_some(DiffBasesChange::SetIndex(index_text))
4449 }
4450 (None, Some(current_head)) => {
4451 let head_changed =
4452 head_text.as_deref() != current_head.as_deref();
4453 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4454 }
4455 (None, None) => None,
4456 };
4457
4458 changes.push((buffer.clone(), change))
4459 }
4460 changes
4461 })
4462 .await;
4463
4464 git_store.update(&mut cx, |git_store, cx| {
4465 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4466 let buffer_snapshot = buffer.read(cx).text_snapshot();
4467 let buffer_id = buffer_snapshot.remote_id();
4468 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4469 continue;
4470 };
4471
4472 let downstream_client = git_store.downstream_client();
4473 diff_state.update(cx, |diff_state, cx| {
4474 use proto::update_diff_bases::Mode;
4475
4476 if let Some((diff_bases_change, (client, project_id))) =
4477 diff_bases_change.clone().zip(downstream_client)
4478 {
4479 let (staged_text, committed_text, mode) = match diff_bases_change {
4480 DiffBasesChange::SetIndex(index) => {
4481 (index, None, Mode::IndexOnly)
4482 }
4483 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4484 DiffBasesChange::SetEach { index, head } => {
4485 (index, head, Mode::IndexAndHead)
4486 }
4487 DiffBasesChange::SetBoth(text) => {
4488 (None, text, Mode::IndexMatchesHead)
4489 }
4490 };
4491 client
4492 .send(proto::UpdateDiffBases {
4493 project_id: project_id.to_proto(),
4494 buffer_id: buffer_id.to_proto(),
4495 staged_text,
4496 committed_text,
4497 mode: mode as i32,
4498 })
4499 .log_err();
4500 }
4501
4502 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4503 });
4504 }
4505 })
4506 },
4507 );
4508 }
4509
4510 pub fn send_job<F, Fut, R>(
4511 &mut self,
4512 status: Option<SharedString>,
4513 job: F,
4514 ) -> oneshot::Receiver<R>
4515 where
4516 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4517 Fut: Future<Output = R> + 'static,
4518 R: Send + 'static,
4519 {
4520 self.send_keyed_job(None, status, job)
4521 }
4522
4523 fn send_keyed_job<F, Fut, R>(
4524 &mut self,
4525 key: Option<GitJobKey>,
4526 status: Option<SharedString>,
4527 job: F,
4528 ) -> oneshot::Receiver<R>
4529 where
4530 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4531 Fut: Future<Output = R> + 'static,
4532 R: Send + 'static,
4533 {
4534 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4535 let job_id = post_inc(&mut self.job_id);
4536 let this = self.this.clone();
4537 self.job_sender
4538 .unbounded_send(GitJob {
4539 key,
4540 job: Box::new(move |state, cx: &mut AsyncApp| {
4541 let job = job(state, cx.clone());
4542 cx.spawn(async move |cx| {
4543 if let Some(s) = status.clone() {
4544 this.update(cx, |this, cx| {
4545 this.active_jobs.insert(
4546 job_id,
4547 JobInfo {
4548 start: Instant::now(),
4549 message: s.clone(),
4550 },
4551 );
4552
4553 cx.notify();
4554 })
4555 .ok();
4556 }
4557 let result = job.await;
4558
4559 this.update(cx, |this, cx| {
4560 this.active_jobs.remove(&job_id);
4561 cx.notify();
4562 })
4563 .ok();
4564
4565 result_tx.send(result).ok();
4566 })
4567 }),
4568 })
4569 .ok();
4570 result_rx
4571 }
4572
4573 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4574 let Some(git_store) = self.git_store.upgrade() else {
4575 return;
4576 };
4577 let entity = cx.entity();
4578 git_store.update(cx, |git_store, cx| {
4579 let Some((&id, _)) = git_store
4580 .repositories
4581 .iter()
4582 .find(|(_, handle)| *handle == &entity)
4583 else {
4584 return;
4585 };
4586 git_store.active_repo_id = Some(id);
4587 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4588 });
4589 }
4590
4591 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4592 self.snapshot.status()
4593 }
4594
4595 pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option<DiffStat> {
4596 self.snapshot.diff_stat_for_path(path)
4597 }
4598
4599 pub fn cached_stash(&self) -> GitStash {
4600 self.snapshot.stash_entries.clone()
4601 }
4602
4603 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4604 let git_store = self.git_store.upgrade()?;
4605 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4606 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4607 let abs_path = SanitizedPath::new(&abs_path);
4608 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4609 Some(ProjectPath {
4610 worktree_id: worktree.read(cx).id(),
4611 path: relative_path,
4612 })
4613 }
4614
4615 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4616 let git_store = self.git_store.upgrade()?;
4617 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4618 let abs_path = worktree_store.absolutize(path, cx)?;
4619 self.snapshot.abs_path_to_repo_path(&abs_path)
4620 }
4621
4622 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4623 other
4624 .read(cx)
4625 .snapshot
4626 .work_directory_abs_path
4627 .starts_with(&self.snapshot.work_directory_abs_path)
4628 }
4629
4630 pub fn open_commit_buffer(
4631 &mut self,
4632 languages: Option<Arc<LanguageRegistry>>,
4633 buffer_store: Entity<BufferStore>,
4634 cx: &mut Context<Self>,
4635 ) -> Task<Result<Entity<Buffer>>> {
4636 let id = self.id;
4637 if let Some(buffer) = self.commit_message_buffer.clone() {
4638 return Task::ready(Ok(buffer));
4639 }
4640 let this = cx.weak_entity();
4641
4642 let rx = self.send_job(None, move |state, mut cx| async move {
4643 let Some(this) = this.upgrade() else {
4644 bail!("git store was dropped");
4645 };
4646 match state {
4647 RepositoryState::Local(..) => {
4648 this.update(&mut cx, |_, cx| {
4649 Self::open_local_commit_buffer(languages, buffer_store, cx)
4650 })
4651 .await
4652 }
4653 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4654 let request = client.request(proto::OpenCommitMessageBuffer {
4655 project_id: project_id.0,
4656 repository_id: id.to_proto(),
4657 });
4658 let response = request.await.context("requesting to open commit buffer")?;
4659 let buffer_id = BufferId::new(response.buffer_id)?;
4660 let buffer = buffer_store
4661 .update(&mut cx, |buffer_store, cx| {
4662 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4663 })
4664 .await?;
4665 if let Some(language_registry) = languages {
4666 let git_commit_language =
4667 language_registry.language_for_name("Git Commit").await?;
4668 buffer.update(&mut cx, |buffer, cx| {
4669 buffer.set_language(Some(git_commit_language), cx);
4670 });
4671 }
4672 this.update(&mut cx, |this, _| {
4673 this.commit_message_buffer = Some(buffer.clone());
4674 });
4675 Ok(buffer)
4676 }
4677 }
4678 });
4679
4680 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4681 }
4682
4683 fn open_local_commit_buffer(
4684 language_registry: Option<Arc<LanguageRegistry>>,
4685 buffer_store: Entity<BufferStore>,
4686 cx: &mut Context<Self>,
4687 ) -> Task<Result<Entity<Buffer>>> {
4688 cx.spawn(async move |repository, cx| {
4689 let git_commit_language = match language_registry {
4690 Some(language_registry) => {
4691 Some(language_registry.language_for_name("Git Commit").await?)
4692 }
4693 None => None,
4694 };
4695 let buffer = buffer_store
4696 .update(cx, |buffer_store, cx| {
4697 buffer_store.create_buffer(git_commit_language, false, cx)
4698 })
4699 .await?;
4700
4701 repository.update(cx, |repository, _| {
4702 repository.commit_message_buffer = Some(buffer.clone());
4703 })?;
4704 Ok(buffer)
4705 })
4706 }
4707
4708 pub fn checkout_files(
4709 &mut self,
4710 commit: &str,
4711 paths: Vec<RepoPath>,
4712 cx: &mut Context<Self>,
4713 ) -> Task<Result<()>> {
4714 let commit = commit.to_string();
4715 let id = self.id;
4716
4717 self.spawn_job_with_tracking(
4718 paths.clone(),
4719 pending_op::GitStatus::Reverted,
4720 cx,
4721 async move |this, cx| {
4722 this.update(cx, |this, _cx| {
4723 this.send_job(
4724 Some(format!("git checkout {}", commit).into()),
4725 move |git_repo, _| async move {
4726 match git_repo {
4727 RepositoryState::Local(LocalRepositoryState {
4728 backend,
4729 environment,
4730 ..
4731 }) => {
4732 backend
4733 .checkout_files(commit, paths, environment.clone())
4734 .await
4735 }
4736 RepositoryState::Remote(RemoteRepositoryState {
4737 project_id,
4738 client,
4739 }) => {
4740 client
4741 .request(proto::GitCheckoutFiles {
4742 project_id: project_id.0,
4743 repository_id: id.to_proto(),
4744 commit,
4745 paths: paths
4746 .into_iter()
4747 .map(|p| p.to_proto())
4748 .collect(),
4749 })
4750 .await?;
4751
4752 Ok(())
4753 }
4754 }
4755 },
4756 )
4757 })?
4758 .await?
4759 },
4760 )
4761 }
4762
4763 pub fn reset(
4764 &mut self,
4765 commit: String,
4766 reset_mode: ResetMode,
4767 _cx: &mut App,
4768 ) -> oneshot::Receiver<Result<()>> {
4769 let id = self.id;
4770
4771 self.send_job(None, move |git_repo, _| async move {
4772 match git_repo {
4773 RepositoryState::Local(LocalRepositoryState {
4774 backend,
4775 environment,
4776 ..
4777 }) => backend.reset(commit, reset_mode, environment).await,
4778 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4779 client
4780 .request(proto::GitReset {
4781 project_id: project_id.0,
4782 repository_id: id.to_proto(),
4783 commit,
4784 mode: match reset_mode {
4785 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4786 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4787 },
4788 })
4789 .await?;
4790
4791 Ok(())
4792 }
4793 }
4794 })
4795 }
4796
4797 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4798 let id = self.id;
4799 self.send_job(None, move |git_repo, _cx| async move {
4800 match git_repo {
4801 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4802 backend.show(commit).await
4803 }
4804 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4805 let resp = client
4806 .request(proto::GitShow {
4807 project_id: project_id.0,
4808 repository_id: id.to_proto(),
4809 commit,
4810 })
4811 .await?;
4812
4813 Ok(CommitDetails {
4814 sha: resp.sha.into(),
4815 message: resp.message.into(),
4816 commit_timestamp: resp.commit_timestamp,
4817 author_email: resp.author_email.into(),
4818 author_name: resp.author_name.into(),
4819 })
4820 }
4821 }
4822 })
4823 }
4824
4825 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4826 let id = self.id;
4827 self.send_job(None, move |git_repo, cx| async move {
4828 match git_repo {
4829 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4830 backend.load_commit(commit, cx).await
4831 }
4832 RepositoryState::Remote(RemoteRepositoryState {
4833 client, project_id, ..
4834 }) => {
4835 let response = client
4836 .request(proto::LoadCommitDiff {
4837 project_id: project_id.0,
4838 repository_id: id.to_proto(),
4839 commit,
4840 })
4841 .await?;
4842 Ok(CommitDiff {
4843 files: response
4844 .files
4845 .into_iter()
4846 .map(|file| {
4847 Ok(CommitFile {
4848 path: RepoPath::from_proto(&file.path)?,
4849 old_text: file.old_text,
4850 new_text: file.new_text,
4851 is_binary: file.is_binary,
4852 })
4853 })
4854 .collect::<Result<Vec<_>>>()?,
4855 })
4856 }
4857 }
4858 })
4859 }
4860
4861 pub fn file_history(
4862 &mut self,
4863 path: RepoPath,
4864 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4865 self.file_history_paginated(path, 0, None)
4866 }
4867
4868 pub fn file_history_paginated(
4869 &mut self,
4870 path: RepoPath,
4871 skip: usize,
4872 limit: Option<usize>,
4873 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4874 let id = self.id;
4875 self.send_job(None, move |git_repo, _cx| async move {
4876 match git_repo {
4877 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4878 backend.file_history_paginated(path, skip, limit).await
4879 }
4880 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4881 let response = client
4882 .request(proto::GitFileHistory {
4883 project_id: project_id.0,
4884 repository_id: id.to_proto(),
4885 path: path.to_proto(),
4886 skip: skip as u64,
4887 limit: limit.map(|l| l as u64),
4888 })
4889 .await?;
4890 Ok(git::repository::FileHistory {
4891 entries: response
4892 .entries
4893 .into_iter()
4894 .map(|entry| git::repository::FileHistoryEntry {
4895 sha: entry.sha.into(),
4896 subject: entry.subject.into(),
4897 message: entry.message.into(),
4898 commit_timestamp: entry.commit_timestamp,
4899 author_name: entry.author_name.into(),
4900 author_email: entry.author_email.into(),
4901 })
4902 .collect(),
4903 path: RepoPath::from_proto(&response.path)?,
4904 })
4905 }
4906 }
4907 })
4908 }
4909
4910 pub fn get_graph_data(
4911 &self,
4912 log_source: LogSource,
4913 log_order: LogOrder,
4914 ) -> Option<&InitialGitGraphData> {
4915 self.initial_graph_data.get(&(log_source, log_order))
4916 }
4917
4918 pub fn search_commits(
4919 &mut self,
4920 log_source: LogSource,
4921 search_args: SearchCommitArgs,
4922 request_tx: smol::channel::Sender<Oid>,
4923 cx: &mut Context<Self>,
4924 ) {
4925 let repository_state = self.repository_state.clone();
4926
4927 cx.background_spawn(async move {
4928 let repo_state = repository_state.await;
4929
4930 match repo_state {
4931 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4932 backend
4933 .search_commits(log_source, search_args, request_tx)
4934 .await
4935 .log_err();
4936 }
4937 Ok(RepositoryState::Remote(_)) => {}
4938 Err(_) => {}
4939 };
4940 })
4941 .detach();
4942 }
4943
4944 pub fn graph_data(
4945 &mut self,
4946 log_source: LogSource,
4947 log_order: LogOrder,
4948 range: Range<usize>,
4949 cx: &mut Context<Self>,
4950 ) -> GraphDataResponse<'_> {
4951 let initial_commit_data = self
4952 .initial_graph_data
4953 .entry((log_source.clone(), log_order))
4954 .or_insert_with(|| {
4955 let state = self.repository_state.clone();
4956 let log_source = log_source.clone();
4957
4958 let fetch_task = cx.spawn(async move |repository, cx| {
4959 let state = state.await;
4960 let result = match state {
4961 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4962 Self::local_git_graph_data(
4963 repository.clone(),
4964 backend,
4965 log_source.clone(),
4966 log_order,
4967 cx,
4968 )
4969 .await
4970 }
4971 Ok(RepositoryState::Remote(_)) => {
4972 Err("Git graph is not supported for collab yet".into())
4973 }
4974 Err(e) => Err(SharedString::from(e)),
4975 };
4976
4977 if let Err(fetch_task_error) = result {
4978 repository
4979 .update(cx, |repository, _| {
4980 if let Some(data) = repository
4981 .initial_graph_data
4982 .get_mut(&(log_source, log_order))
4983 {
4984 data.error = Some(fetch_task_error);
4985 } else {
4986 debug_panic!(
4987 "This task would be dropped if this entry doesn't exist"
4988 );
4989 }
4990 })
4991 .ok();
4992 }
4993 });
4994
4995 InitialGitGraphData {
4996 fetch_task,
4997 error: None,
4998 commit_data: Vec::new(),
4999 commit_oid_to_index: HashMap::default(),
5000 }
5001 });
5002
5003 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
5004 let max_end = initial_commit_data.commit_data.len();
5005
5006 GraphDataResponse {
5007 commits: &initial_commit_data.commit_data
5008 [range.start.min(max_start)..range.end.min(max_end)],
5009 is_loading: !initial_commit_data.fetch_task.is_ready(),
5010 error: initial_commit_data.error.clone(),
5011 }
5012 }
5013
5014 async fn local_git_graph_data(
5015 this: WeakEntity<Self>,
5016 backend: Arc<dyn GitRepository>,
5017 log_source: LogSource,
5018 log_order: LogOrder,
5019 cx: &mut AsyncApp,
5020 ) -> Result<(), SharedString> {
5021 let (request_tx, request_rx) =
5022 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
5023
5024 let task = cx.background_executor().spawn({
5025 let log_source = log_source.clone();
5026 async move {
5027 backend
5028 .initial_graph_data(log_source, log_order, request_tx)
5029 .await
5030 .map_err(|err| SharedString::from(err.to_string()))
5031 }
5032 });
5033
5034 let graph_data_key = (log_source, log_order);
5035
5036 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
5037 this.update(cx, |repository, cx| {
5038 let graph_data = repository
5039 .initial_graph_data
5040 .entry(graph_data_key.clone())
5041 .and_modify(|graph_data| {
5042 for commit_data in initial_graph_commit_data {
5043 graph_data
5044 .commit_oid_to_index
5045 .insert(commit_data.sha, graph_data.commit_data.len());
5046 graph_data.commit_data.push(commit_data);
5047 }
5048 cx.emit(RepositoryEvent::GraphEvent(
5049 graph_data_key.clone(),
5050 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
5051 ));
5052 });
5053
5054 match &graph_data {
5055 Entry::Occupied(_) => {}
5056 Entry::Vacant(_) => {
5057 debug_panic!("This task should be dropped if data doesn't exist");
5058 }
5059 }
5060 })
5061 .ok();
5062 }
5063
5064 task.await?;
5065 Ok(())
5066 }
5067
5068 pub fn fetch_commit_data(
5069 &mut self,
5070 sha: Oid,
5071 await_result: bool,
5072 cx: &mut Context<Self>,
5073 ) -> &CommitDataState {
5074 if self.commit_data.contains_key(&sha) {
5075 let data = &self.commit_data[&sha];
5076
5077 if let CommitDataState::Loading(None) = data
5078 && await_result
5079 {
5080 let (tx, rx) = oneshot::channel();
5081 self.commit_data
5082 .insert(sha, CommitDataState::Loading(Some(rx.shared())));
5083
5084 let handler = self.get_handler(cx);
5085 handler.completion_senders.insert(sha, tx);
5086 }
5087
5088 return &self.commit_data[&sha];
5089 }
5090
5091 let (state, completer) = if await_result {
5092 let (tx, rx) = oneshot::channel();
5093 (CommitDataState::Loading(Some(rx.shared())), Some(tx))
5094 } else {
5095 (CommitDataState::Loading(None), None)
5096 };
5097
5098 self.commit_data.insert(sha, state);
5099
5100 let handler = self.get_handler(cx);
5101 if let Some(tx) = completer {
5102 handler.completion_senders.insert(sha, tx);
5103 }
5104 let mut has_failed = false;
5105 if handler.commit_data_request.try_send(sha).is_ok() {
5106 handler.pending_requests.insert(sha);
5107 } else {
5108 has_failed = true;
5109 handler.completion_senders.remove(&sha);
5110 debug_assert!(
5111 matches!(
5112 self.commit_data.remove(&sha),
5113 Some(CommitDataState::Loading(_))
5114 ),
5115 "Commit data should still be loading when enqueueing the request fails"
5116 );
5117 }
5118
5119 &self.commit_data.get(&sha).unwrap_or_else(|| {
5120 debug_assert!(!has_failed, "This should always be inserted");
5121 &CommitDataState::Loading(None)
5122 })
5123 }
5124
5125 fn get_handler(&mut self, cx: &mut Context<Self>) -> &mut CommitDataHandler {
5126 if matches!(self.commit_data_handler, CommitDataHandlerState::Closed) {
5127 self.commit_data_handler =
5128 CommitDataHandlerState::Open(self.open_commit_data_handler(cx));
5129 }
5130
5131 match &mut self.commit_data_handler {
5132 CommitDataHandlerState::Open(handler) => handler,
5133 CommitDataHandlerState::Closed => unreachable!(),
5134 }
5135 }
5136
5137 fn open_commit_data_handler(&self, cx: &Context<Self>) -> CommitDataHandler {
5138 let state = self.repository_state.clone();
5139 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, CommitData)>(64);
5140 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
5141
5142 let foreground_task = cx.spawn(async move |this, cx| {
5143 while let Ok((sha, commit_data)) = result_rx.recv().await {
5144 let result = this.update(cx, |this, cx| {
5145 let data = Arc::new(commit_data);
5146
5147 if let CommitDataHandlerState::Open(handler) = &mut this.commit_data_handler {
5148 handler.pending_requests.remove(&sha);
5149 if let Some(completion_sender) = handler.completion_senders.remove(&sha) {
5150 completion_sender.send(data.clone()).ok();
5151 }
5152 } else {
5153 debug_panic!("The handler state has to be open for this task to exist");
5154 }
5155
5156 let old_value = this.commit_data.insert(sha, CommitDataState::Loaded(data));
5157 debug_assert!(
5158 !matches!(old_value, Some(CommitDataState::Loaded(_))),
5159 "We should never overwrite commit data"
5160 );
5161
5162 cx.notify();
5163 });
5164 if result.is_err() {
5165 break;
5166 }
5167 }
5168
5169 this.update(cx, |this, _cx| {
5170 let CommitDataHandlerState::Open(handler) = std::mem::replace(
5171 &mut this.commit_data_handler,
5172 CommitDataHandlerState::Closed,
5173 ) else {
5174 debug_panic!("The handler state has to be open for this task to exist");
5175 return;
5176 };
5177
5178 for sha in handler.pending_requests {
5179 this.commit_data.remove(&sha);
5180 }
5181 })
5182 .ok();
5183 });
5184
5185 let request_tx_for_handler = request_tx;
5186 let repository_id = self.id;
5187 let background_executor = cx.background_executor().clone();
5188
5189 cx.background_spawn(async move {
5190 match state.await {
5191 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
5192 Self::local_commit_data_reader(
5193 backend,
5194 request_rx,
5195 result_tx,
5196 background_executor,
5197 )
5198 .await;
5199 }
5200 Ok(RepositoryState::Remote(RemoteRepositoryState { project_id, client })) => {
5201 Self::remote_commit_data_reader(
5202 project_id,
5203 client,
5204 repository_id,
5205 request_rx,
5206 result_tx,
5207 background_executor,
5208 )
5209 .await;
5210 }
5211 Err(error) => {
5212 log::error!("failed to get repository state: {error}");
5213 return;
5214 }
5215 };
5216 })
5217 .detach();
5218
5219 CommitDataHandler {
5220 _task: foreground_task,
5221 commit_data_request: request_tx_for_handler,
5222 completion_senders: HashMap::default(),
5223 pending_requests: HashSet::default(),
5224 }
5225 }
5226
5227 async fn local_commit_data_reader(
5228 backend: Arc<dyn GitRepository>,
5229 request_rx: smol::channel::Receiver<Oid>,
5230 result_tx: smol::channel::Sender<(Oid, CommitData)>,
5231 background_executor: BackgroundExecutor,
5232 ) {
5233 let reader = match backend.commit_data_reader() {
5234 Ok(reader) => reader,
5235 Err(error) => {
5236 log::error!("failed to create commit data reader: {error:?}");
5237 return;
5238 }
5239 };
5240
5241 loop {
5242 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
5243
5244 futures::select_biased! {
5245 sha = futures::FutureExt::fuse(request_rx.recv()) => {
5246 let Ok(sha) = sha else {
5247 break;
5248 };
5249
5250 match reader.read(sha).await {
5251 Ok(commit_data) => {
5252 if result_tx.send((sha, commit_data)).await.is_err() {
5253 break;
5254 }
5255 }
5256 Err(error) => {
5257 log::error!("failed to read commit data for {sha}: {error:?}");
5258 }
5259 }
5260 }
5261 _ = futures::FutureExt::fuse(timeout) => {
5262 break;
5263 }
5264 }
5265 }
5266
5267 drop(result_tx);
5268 }
5269
5270 async fn remote_commit_data_reader(
5271 project_id: ProjectId,
5272 client: AnyProtoClient,
5273 repository_id: RepositoryId,
5274 request_rx: smol::channel::Receiver<Oid>,
5275 result_tx: smol::channel::Sender<(Oid, CommitData)>,
5276 background_executor: BackgroundExecutor,
5277 ) {
5278 let mut response_futures =
5279 FuturesUnordered::<BoxFuture<'static, Result<proto::GetCommitDataResponse>>>::new();
5280 let mut accept_requests = true;
5281 let mut next_request = Self::get_next_request(
5282 project_id,
5283 client.clone(),
5284 repository_id,
5285 &request_rx,
5286 &background_executor,
5287 )
5288 .boxed()
5289 .fuse();
5290
5291 loop {
5292 if !accept_requests && response_futures.is_empty() {
5293 break;
5294 }
5295
5296 if response_futures.is_empty() {
5297 match (&mut next_request).await {
5298 NextCommitDataRequest::Request(request) => {
5299 response_futures.push(request);
5300 next_request = Self::get_next_request(
5301 project_id,
5302 client.clone(),
5303 repository_id,
5304 &request_rx,
5305 &background_executor,
5306 )
5307 .boxed()
5308 .fuse();
5309 }
5310 NextCommitDataRequest::Closed | NextCommitDataRequest::Idle => break,
5311 }
5312 }
5313
5314 let next_response = response_futures.next().fuse();
5315 futures::pin_mut!(next_response);
5316
5317 futures::select_biased! {
5318 request = next_request => {
5319 match request {
5320 NextCommitDataRequest::Request(request) => {
5321 response_futures.push(request);
5322 }
5323 NextCommitDataRequest::Idle => {}
5324 NextCommitDataRequest::Closed => {
5325 accept_requests = false;
5326 }
5327 }
5328
5329 if accept_requests {
5330 next_request = Self::get_next_request(
5331 project_id,
5332 client.clone(),
5333 repository_id,
5334 &request_rx,
5335 &background_executor,
5336 )
5337 .boxed()
5338 .fuse();
5339 }
5340 }
5341 result = next_response => {
5342 let Some(result) = result else {
5343 continue;
5344 };
5345
5346 if let Ok(commit_data) = result {
5347 for commit in commit_data.commits {
5348 let Ok(commit_data) = commit_data_from_proto(commit) else {
5349 continue;
5350 };
5351
5352 if result_tx
5353 .send((commit_data.sha, commit_data))
5354 .await
5355 .is_err()
5356 {
5357 return;
5358 }
5359 }
5360 }
5361 }
5362 }
5363 }
5364
5365 drop(result_tx);
5366 }
5367
5368 async fn get_next_request(
5369 project_id: ProjectId,
5370 client: AnyProtoClient,
5371 repository_id: RepositoryId,
5372 request_rx: &smol::channel::Receiver<Oid>,
5373 background_executor: &BackgroundExecutor,
5374 ) -> NextCommitDataRequest {
5375 let mut queued_shas = Vec::with_capacity(64);
5376
5377 loop {
5378 if queued_shas.len() >= 64 {
5379 break;
5380 }
5381
5382 let timeout = background_executor.timer(Duration::from_millis(5));
5383
5384 futures::select_biased! {
5385 sha = futures::FutureExt::fuse(request_rx.recv()) => {
5386 let Ok(sha) = sha else {
5387 break;
5388 };
5389
5390 queued_shas.push(sha);
5391
5392 }
5393 _ = futures::FutureExt::fuse(timeout) => {
5394 break;
5395 }
5396 }
5397 }
5398
5399 if queued_shas.is_empty() && request_rx.is_closed() {
5400 NextCommitDataRequest::Closed
5401 } else if queued_shas.is_empty() {
5402 NextCommitDataRequest::Idle
5403 } else {
5404 NextCommitDataRequest::Request(
5405 client
5406 .request(proto::GetCommitData {
5407 project_id: project_id.to_proto(),
5408 repository_id: repository_id.to_proto(),
5409 shas: queued_shas.into_iter().map(|oid| oid.to_string()).collect(),
5410 })
5411 .boxed(),
5412 )
5413 }
5414 }
5415
5416 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
5417 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
5418 }
5419
5420 fn save_buffers<'a>(
5421 &self,
5422 entries: impl IntoIterator<Item = &'a RepoPath>,
5423 cx: &mut Context<Self>,
5424 ) -> Vec<Task<anyhow::Result<()>>> {
5425 let mut save_futures = Vec::new();
5426 if let Some(buffer_store) = self.buffer_store(cx) {
5427 buffer_store.update(cx, |buffer_store, cx| {
5428 for path in entries {
5429 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
5430 continue;
5431 };
5432 if let Some(buffer) = buffer_store.get_by_path(&project_path)
5433 && buffer
5434 .read(cx)
5435 .file()
5436 .is_some_and(|file| file.disk_state().exists())
5437 && buffer.read(cx).has_unsaved_edits()
5438 {
5439 save_futures.push(buffer_store.save_buffer(buffer, cx));
5440 }
5441 }
5442 })
5443 }
5444 save_futures
5445 }
5446
5447 pub fn stage_entries(
5448 &mut self,
5449 entries: Vec<RepoPath>,
5450 cx: &mut Context<Self>,
5451 ) -> Task<anyhow::Result<()>> {
5452 self.stage_or_unstage_entries(true, entries, cx)
5453 }
5454
5455 pub fn unstage_entries(
5456 &mut self,
5457 entries: Vec<RepoPath>,
5458 cx: &mut Context<Self>,
5459 ) -> Task<anyhow::Result<()>> {
5460 self.stage_or_unstage_entries(false, entries, cx)
5461 }
5462
5463 fn stage_or_unstage_entries(
5464 &mut self,
5465 stage: bool,
5466 entries: Vec<RepoPath>,
5467 cx: &mut Context<Self>,
5468 ) -> Task<anyhow::Result<()>> {
5469 if entries.is_empty() {
5470 return Task::ready(Ok(()));
5471 }
5472 let Some(git_store) = self.git_store.upgrade() else {
5473 return Task::ready(Ok(()));
5474 };
5475 let id = self.id;
5476 let save_tasks = self.save_buffers(&entries, cx);
5477 let paths = entries
5478 .iter()
5479 .map(|p| p.as_unix_str())
5480 .collect::<Vec<_>>()
5481 .join(" ");
5482 let status = if stage {
5483 format!("git add {paths}")
5484 } else {
5485 format!("git reset {paths}")
5486 };
5487 let job_key = GitJobKey::WriteIndex(entries.clone());
5488
5489 self.spawn_job_with_tracking(
5490 entries.clone(),
5491 if stage {
5492 pending_op::GitStatus::Staged
5493 } else {
5494 pending_op::GitStatus::Unstaged
5495 },
5496 cx,
5497 async move |this, cx| {
5498 for save_task in save_tasks {
5499 save_task.await?;
5500 }
5501
5502 this.update(cx, |this, cx| {
5503 let weak_this = cx.weak_entity();
5504 this.send_keyed_job(
5505 Some(job_key),
5506 Some(status.into()),
5507 move |git_repo, mut cx| async move {
5508 let hunk_staging_operation_counts = weak_this
5509 .update(&mut cx, |this, cx| {
5510 let mut hunk_staging_operation_counts = HashMap::default();
5511 for path in &entries {
5512 let Some(project_path) =
5513 this.repo_path_to_project_path(path, cx)
5514 else {
5515 continue;
5516 };
5517 let Some(buffer) = git_store
5518 .read(cx)
5519 .buffer_store
5520 .read(cx)
5521 .get_by_path(&project_path)
5522 else {
5523 continue;
5524 };
5525 let Some(diff_state) = git_store
5526 .read(cx)
5527 .diffs
5528 .get(&buffer.read(cx).remote_id())
5529 .cloned()
5530 else {
5531 continue;
5532 };
5533 let Some(uncommitted_diff) =
5534 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
5535 |uncommitted_diff| uncommitted_diff.upgrade(),
5536 )
5537 else {
5538 continue;
5539 };
5540 let buffer_snapshot = buffer.read(cx).text_snapshot();
5541 let file_exists = buffer
5542 .read(cx)
5543 .file()
5544 .is_some_and(|file| file.disk_state().exists());
5545 let hunk_staging_operation_count =
5546 diff_state.update(cx, |diff_state, cx| {
5547 uncommitted_diff.update(
5548 cx,
5549 |uncommitted_diff, cx| {
5550 uncommitted_diff
5551 .stage_or_unstage_all_hunks(
5552 stage,
5553 &buffer_snapshot,
5554 file_exists,
5555 cx,
5556 );
5557 },
5558 );
5559
5560 diff_state.hunk_staging_operation_count += 1;
5561 diff_state.hunk_staging_operation_count
5562 });
5563 hunk_staging_operation_counts.insert(
5564 diff_state.downgrade(),
5565 hunk_staging_operation_count,
5566 );
5567 }
5568 hunk_staging_operation_counts
5569 })
5570 .unwrap_or_default();
5571
5572 let result = match git_repo {
5573 RepositoryState::Local(LocalRepositoryState {
5574 backend,
5575 environment,
5576 ..
5577 }) => {
5578 if stage {
5579 backend.stage_paths(entries, environment.clone()).await
5580 } else {
5581 backend.unstage_paths(entries, environment.clone()).await
5582 }
5583 }
5584 RepositoryState::Remote(RemoteRepositoryState {
5585 project_id,
5586 client,
5587 }) => {
5588 if stage {
5589 client
5590 .request(proto::Stage {
5591 project_id: project_id.0,
5592 repository_id: id.to_proto(),
5593 paths: entries
5594 .into_iter()
5595 .map(|repo_path| repo_path.to_proto())
5596 .collect(),
5597 })
5598 .await
5599 .context("sending stage request")
5600 .map(|_| ())
5601 } else {
5602 client
5603 .request(proto::Unstage {
5604 project_id: project_id.0,
5605 repository_id: id.to_proto(),
5606 paths: entries
5607 .into_iter()
5608 .map(|repo_path| repo_path.to_proto())
5609 .collect(),
5610 })
5611 .await
5612 .context("sending unstage request")
5613 .map(|_| ())
5614 }
5615 }
5616 };
5617
5618 for (diff_state, hunk_staging_operation_count) in
5619 hunk_staging_operation_counts
5620 {
5621 diff_state
5622 .update(&mut cx, |diff_state, cx| {
5623 if result.is_ok() {
5624 diff_state.hunk_staging_operation_count_as_of_write =
5625 hunk_staging_operation_count;
5626 } else if let Some(uncommitted_diff) =
5627 &diff_state.uncommitted_diff
5628 {
5629 uncommitted_diff
5630 .update(cx, |uncommitted_diff, cx| {
5631 uncommitted_diff.clear_pending_hunks(cx);
5632 })
5633 .ok();
5634 }
5635 })
5636 .ok();
5637 }
5638
5639 result
5640 },
5641 )
5642 })?
5643 .await?
5644 },
5645 )
5646 }
5647
5648 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5649 let snapshot = self.snapshot.clone();
5650 let pending_ops = self.pending_ops.clone();
5651 let to_stage = cx.background_spawn(async move {
5652 snapshot
5653 .status()
5654 .filter_map(|entry| {
5655 if let Some(ops) =
5656 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5657 {
5658 if ops.staging() || ops.staged() {
5659 None
5660 } else {
5661 Some(entry.repo_path)
5662 }
5663 } else if entry.status.staging().is_fully_staged() {
5664 None
5665 } else {
5666 Some(entry.repo_path)
5667 }
5668 })
5669 .collect()
5670 });
5671
5672 cx.spawn(async move |this, cx| {
5673 let to_stage = to_stage.await;
5674 this.update(cx, |this, cx| {
5675 this.stage_or_unstage_entries(true, to_stage, cx)
5676 })?
5677 .await
5678 })
5679 }
5680
5681 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5682 let snapshot = self.snapshot.clone();
5683 let pending_ops = self.pending_ops.clone();
5684 let to_unstage = cx.background_spawn(async move {
5685 snapshot
5686 .status()
5687 .filter_map(|entry| {
5688 if let Some(ops) =
5689 pending_ops.get(&PathKey(entry.repo_path.as_ref().clone()), ())
5690 {
5691 if !ops.staging() && !ops.staged() {
5692 None
5693 } else {
5694 Some(entry.repo_path)
5695 }
5696 } else if entry.status.staging().is_fully_unstaged() {
5697 None
5698 } else {
5699 Some(entry.repo_path)
5700 }
5701 })
5702 .collect()
5703 });
5704
5705 cx.spawn(async move |this, cx| {
5706 let to_unstage = to_unstage.await;
5707 this.update(cx, |this, cx| {
5708 this.stage_or_unstage_entries(false, to_unstage, cx)
5709 })?
5710 .await
5711 })
5712 }
5713
5714 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5715 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5716
5717 self.stash_entries(to_stash, cx)
5718 }
5719
5720 pub fn stash_entries(
5721 &mut self,
5722 entries: Vec<RepoPath>,
5723 cx: &mut Context<Self>,
5724 ) -> Task<anyhow::Result<()>> {
5725 let id = self.id;
5726
5727 cx.spawn(async move |this, cx| {
5728 this.update(cx, |this, _| {
5729 this.send_job(None, move |git_repo, _cx| async move {
5730 match git_repo {
5731 RepositoryState::Local(LocalRepositoryState {
5732 backend,
5733 environment,
5734 ..
5735 }) => backend.stash_paths(entries, environment).await,
5736 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5737 client
5738 .request(proto::Stash {
5739 project_id: project_id.0,
5740 repository_id: id.to_proto(),
5741 paths: entries
5742 .into_iter()
5743 .map(|repo_path| repo_path.to_proto())
5744 .collect(),
5745 })
5746 .await?;
5747 Ok(())
5748 }
5749 }
5750 })
5751 })?
5752 .await??;
5753 Ok(())
5754 })
5755 }
5756
5757 pub fn stash_pop(
5758 &mut self,
5759 index: Option<usize>,
5760 cx: &mut Context<Self>,
5761 ) -> Task<anyhow::Result<()>> {
5762 let id = self.id;
5763 cx.spawn(async move |this, cx| {
5764 this.update(cx, |this, _| {
5765 this.send_job(None, move |git_repo, _cx| async move {
5766 match git_repo {
5767 RepositoryState::Local(LocalRepositoryState {
5768 backend,
5769 environment,
5770 ..
5771 }) => backend.stash_pop(index, environment).await,
5772 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5773 client
5774 .request(proto::StashPop {
5775 project_id: project_id.0,
5776 repository_id: id.to_proto(),
5777 stash_index: index.map(|i| i as u64),
5778 })
5779 .await
5780 .context("sending stash pop request")?;
5781 Ok(())
5782 }
5783 }
5784 })
5785 })?
5786 .await??;
5787 Ok(())
5788 })
5789 }
5790
5791 pub fn stash_apply(
5792 &mut self,
5793 index: Option<usize>,
5794 cx: &mut Context<Self>,
5795 ) -> Task<anyhow::Result<()>> {
5796 let id = self.id;
5797 cx.spawn(async move |this, cx| {
5798 this.update(cx, |this, _| {
5799 this.send_job(None, move |git_repo, _cx| async move {
5800 match git_repo {
5801 RepositoryState::Local(LocalRepositoryState {
5802 backend,
5803 environment,
5804 ..
5805 }) => backend.stash_apply(index, environment).await,
5806 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5807 client
5808 .request(proto::StashApply {
5809 project_id: project_id.0,
5810 repository_id: id.to_proto(),
5811 stash_index: index.map(|i| i as u64),
5812 })
5813 .await
5814 .context("sending stash apply request")?;
5815 Ok(())
5816 }
5817 }
5818 })
5819 })?
5820 .await??;
5821 Ok(())
5822 })
5823 }
5824
5825 pub fn add_path_to_gitignore(
5826 &mut self,
5827 repo_path: &RepoPath,
5828 is_dir: bool,
5829 ) -> oneshot::Receiver<Result<()>> {
5830 let work_dir = self.snapshot.work_directory_abs_path.clone();
5831 let path_display = repo_path.as_ref().display(PathStyle::Posix);
5832 let file_path_str = if is_dir {
5833 format!("{}/", path_display)
5834 } else {
5835 path_display.to_string()
5836 };
5837
5838 self.send_job(None, move |git_repo, _cx| async move {
5839 match git_repo {
5840 RepositoryState::Local(LocalRepositoryState { fs, .. }) => {
5841 let gitignore_path = work_dir.join(".gitignore");
5842
5843 let existing_content = fs.load(&gitignore_path).await.unwrap_or_default();
5844
5845 if existing_content
5846 .lines()
5847 .any(|line| line.trim() == file_path_str)
5848 {
5849 return Ok(());
5850 }
5851
5852 let new_content = if existing_content.is_empty() {
5853 format!("{}\n", file_path_str)
5854 } else if existing_content.ends_with('\n') {
5855 format!("{}{}\n", existing_content, file_path_str)
5856 } else {
5857 format!("{}\n{}\n", existing_content, file_path_str)
5858 };
5859
5860 fs.save(
5861 &gitignore_path,
5862 &text::Rope::from(new_content.as_str()),
5863 text::LineEnding::Unix,
5864 )
5865 .await
5866 }
5867 RepositoryState::Remote(_) => Err(anyhow::anyhow!(
5868 "Cannot modify .gitignore on remote repository"
5869 )),
5870 }
5871 })
5872 }
5873
5874 pub fn stash_drop(
5875 &mut self,
5876 index: Option<usize>,
5877 cx: &mut Context<Self>,
5878 ) -> oneshot::Receiver<anyhow::Result<()>> {
5879 let id = self.id;
5880 let updates_tx = self
5881 .git_store()
5882 .and_then(|git_store| match &git_store.read(cx).state {
5883 GitStoreState::Local { downstream, .. } => downstream
5884 .as_ref()
5885 .map(|downstream| downstream.updates_tx.clone()),
5886 _ => None,
5887 });
5888 let this = cx.weak_entity();
5889 self.send_job(None, move |git_repo, mut cx| async move {
5890 match git_repo {
5891 RepositoryState::Local(LocalRepositoryState {
5892 backend,
5893 environment,
5894 ..
5895 }) => {
5896 // TODO would be nice to not have to do this manually
5897 let result = backend.stash_drop(index, environment).await;
5898 if result.is_ok()
5899 && let Ok(stash_entries) = backend.stash_entries().await
5900 {
5901 let snapshot = this.update(&mut cx, |this, cx| {
5902 this.snapshot.stash_entries = stash_entries;
5903 cx.emit(RepositoryEvent::StashEntriesChanged);
5904 this.snapshot.clone()
5905 })?;
5906 if let Some(updates_tx) = updates_tx {
5907 updates_tx
5908 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5909 .ok();
5910 }
5911 }
5912
5913 result
5914 }
5915 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5916 client
5917 .request(proto::StashDrop {
5918 project_id: project_id.0,
5919 repository_id: id.to_proto(),
5920 stash_index: index.map(|i| i as u64),
5921 })
5922 .await
5923 .context("sending stash pop request")?;
5924 Ok(())
5925 }
5926 }
5927 })
5928 }
5929
5930 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5931 let id = self.id;
5932 self.send_job(
5933 Some(format!("git hook {}", hook.as_str()).into()),
5934 move |git_repo, _cx| async move {
5935 match git_repo {
5936 RepositoryState::Local(LocalRepositoryState {
5937 backend,
5938 environment,
5939 ..
5940 }) => backend.run_hook(hook, environment.clone()).await,
5941 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5942 client
5943 .request(proto::RunGitHook {
5944 project_id: project_id.0,
5945 repository_id: id.to_proto(),
5946 hook: hook.to_proto(),
5947 })
5948 .await?;
5949
5950 Ok(())
5951 }
5952 }
5953 },
5954 )
5955 }
5956
5957 pub fn commit(
5958 &mut self,
5959 message: SharedString,
5960 name_and_email: Option<(SharedString, SharedString)>,
5961 options: CommitOptions,
5962 askpass: AskPassDelegate,
5963 cx: &mut App,
5964 ) -> oneshot::Receiver<Result<()>> {
5965 let id = self.id;
5966 let askpass_delegates = self.askpass_delegates.clone();
5967 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5968
5969 let rx = self.run_hook(RunHook::PreCommit, cx);
5970
5971 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5972 rx.await??;
5973
5974 match git_repo {
5975 RepositoryState::Local(LocalRepositoryState {
5976 backend,
5977 environment,
5978 ..
5979 }) => {
5980 backend
5981 .commit(message, name_and_email, options, askpass, environment)
5982 .await
5983 }
5984 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5985 askpass_delegates.lock().insert(askpass_id, askpass);
5986 let _defer = util::defer(|| {
5987 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5988 debug_assert!(askpass_delegate.is_some());
5989 });
5990 let (name, email) = name_and_email.unzip();
5991 client
5992 .request(proto::Commit {
5993 project_id: project_id.0,
5994 repository_id: id.to_proto(),
5995 message: String::from(message),
5996 name: name.map(String::from),
5997 email: email.map(String::from),
5998 options: Some(proto::commit::CommitOptions {
5999 amend: options.amend,
6000 signoff: options.signoff,
6001 allow_empty: options.allow_empty,
6002 }),
6003 askpass_id,
6004 })
6005 .await?;
6006
6007 Ok(())
6008 }
6009 }
6010 })
6011 }
6012
6013 pub fn fetch(
6014 &mut self,
6015 fetch_options: FetchOptions,
6016 askpass: AskPassDelegate,
6017 _cx: &mut App,
6018 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
6019 let askpass_delegates = self.askpass_delegates.clone();
6020 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
6021 let id = self.id;
6022
6023 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
6024 match git_repo {
6025 RepositoryState::Local(LocalRepositoryState {
6026 backend,
6027 environment,
6028 ..
6029 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
6030 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6031 askpass_delegates.lock().insert(askpass_id, askpass);
6032 let _defer = util::defer(|| {
6033 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
6034 debug_assert!(askpass_delegate.is_some());
6035 });
6036
6037 let response = client
6038 .request(proto::Fetch {
6039 project_id: project_id.0,
6040 repository_id: id.to_proto(),
6041 askpass_id,
6042 remote: fetch_options.to_proto(),
6043 })
6044 .await?;
6045
6046 Ok(RemoteCommandOutput {
6047 stdout: response.stdout,
6048 stderr: response.stderr,
6049 })
6050 }
6051 }
6052 })
6053 }
6054
6055 pub fn push(
6056 &mut self,
6057 branch: SharedString,
6058 remote_branch: SharedString,
6059 remote: SharedString,
6060 options: Option<PushOptions>,
6061 askpass: AskPassDelegate,
6062 cx: &mut Context<Self>,
6063 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
6064 let askpass_delegates = self.askpass_delegates.clone();
6065 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
6066 let id = self.id;
6067
6068 let args = options
6069 .map(|option| match option {
6070 PushOptions::SetUpstream => " --set-upstream",
6071 PushOptions::Force => " --force-with-lease",
6072 })
6073 .unwrap_or("");
6074
6075 let updates_tx = self
6076 .git_store()
6077 .and_then(|git_store| match &git_store.read(cx).state {
6078 GitStoreState::Local { downstream, .. } => downstream
6079 .as_ref()
6080 .map(|downstream| downstream.updates_tx.clone()),
6081 _ => None,
6082 });
6083
6084 let this = cx.weak_entity();
6085 self.send_job(
6086 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
6087 move |git_repo, mut cx| async move {
6088 match git_repo {
6089 RepositoryState::Local(LocalRepositoryState {
6090 backend,
6091 environment,
6092 ..
6093 }) => {
6094 let result = backend
6095 .push(
6096 branch.to_string(),
6097 remote_branch.to_string(),
6098 remote.to_string(),
6099 options,
6100 askpass,
6101 environment.clone(),
6102 cx.clone(),
6103 )
6104 .await;
6105 // TODO would be nice to not have to do this manually
6106 if result.is_ok() {
6107 let branches = backend.branches().await?;
6108 let branch = branches.into_iter().find(|branch| branch.is_head);
6109 log::info!("head branch after scan is {branch:?}");
6110 let snapshot = this.update(&mut cx, |this, cx| {
6111 this.snapshot.branch = branch;
6112 cx.emit(RepositoryEvent::HeadChanged);
6113 this.snapshot.clone()
6114 })?;
6115 if let Some(updates_tx) = updates_tx {
6116 updates_tx
6117 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6118 .ok();
6119 }
6120 }
6121 result
6122 }
6123 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6124 askpass_delegates.lock().insert(askpass_id, askpass);
6125 let _defer = util::defer(|| {
6126 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
6127 debug_assert!(askpass_delegate.is_some());
6128 });
6129 let response = client
6130 .request(proto::Push {
6131 project_id: project_id.0,
6132 repository_id: id.to_proto(),
6133 askpass_id,
6134 branch_name: branch.to_string(),
6135 remote_branch_name: remote_branch.to_string(),
6136 remote_name: remote.to_string(),
6137 options: options.map(|options| match options {
6138 PushOptions::Force => proto::push::PushOptions::Force,
6139 PushOptions::SetUpstream => {
6140 proto::push::PushOptions::SetUpstream
6141 }
6142 }
6143 as i32),
6144 })
6145 .await?;
6146
6147 Ok(RemoteCommandOutput {
6148 stdout: response.stdout,
6149 stderr: response.stderr,
6150 })
6151 }
6152 }
6153 },
6154 )
6155 }
6156
6157 pub fn pull(
6158 &mut self,
6159 branch: Option<SharedString>,
6160 remote: SharedString,
6161 rebase: bool,
6162 askpass: AskPassDelegate,
6163 _cx: &mut App,
6164 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
6165 let askpass_delegates = self.askpass_delegates.clone();
6166 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
6167 let id = self.id;
6168
6169 let mut status = "git pull".to_string();
6170 if rebase {
6171 status.push_str(" --rebase");
6172 }
6173 status.push_str(&format!(" {}", remote));
6174 if let Some(b) = &branch {
6175 status.push_str(&format!(" {}", b));
6176 }
6177
6178 self.send_job(Some(status.into()), move |git_repo, cx| async move {
6179 match git_repo {
6180 RepositoryState::Local(LocalRepositoryState {
6181 backend,
6182 environment,
6183 ..
6184 }) => {
6185 backend
6186 .pull(
6187 branch.as_ref().map(|b| b.to_string()),
6188 remote.to_string(),
6189 rebase,
6190 askpass,
6191 environment.clone(),
6192 cx,
6193 )
6194 .await
6195 }
6196 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6197 askpass_delegates.lock().insert(askpass_id, askpass);
6198 let _defer = util::defer(|| {
6199 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
6200 debug_assert!(askpass_delegate.is_some());
6201 });
6202 let response = client
6203 .request(proto::Pull {
6204 project_id: project_id.0,
6205 repository_id: id.to_proto(),
6206 askpass_id,
6207 rebase,
6208 branch_name: branch.as_ref().map(|b| b.to_string()),
6209 remote_name: remote.to_string(),
6210 })
6211 .await?;
6212
6213 Ok(RemoteCommandOutput {
6214 stdout: response.stdout,
6215 stderr: response.stderr,
6216 })
6217 }
6218 }
6219 })
6220 }
6221
6222 fn spawn_set_index_text_job(
6223 &mut self,
6224 path: RepoPath,
6225 content: Option<String>,
6226 hunk_staging_operation_count: Option<usize>,
6227 cx: &mut Context<Self>,
6228 ) -> oneshot::Receiver<anyhow::Result<()>> {
6229 let id = self.id;
6230 let this = cx.weak_entity();
6231 let git_store = self.git_store.clone();
6232 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
6233 self.send_keyed_job(
6234 Some(GitJobKey::WriteIndex(vec![path.clone()])),
6235 None,
6236 move |git_repo, mut cx| async move {
6237 log::debug!(
6238 "start updating index text for buffer {}",
6239 path.as_unix_str()
6240 );
6241
6242 match git_repo {
6243 RepositoryState::Local(LocalRepositoryState {
6244 fs,
6245 backend,
6246 environment,
6247 ..
6248 }) => {
6249 let executable = match fs.metadata(&abs_path).await {
6250 Ok(Some(meta)) => meta.is_executable,
6251 Ok(None) => false,
6252 Err(_err) => false,
6253 };
6254 backend
6255 .set_index_text(path.clone(), content, environment.clone(), executable)
6256 .await?;
6257 }
6258 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6259 client
6260 .request(proto::SetIndexText {
6261 project_id: project_id.0,
6262 repository_id: id.to_proto(),
6263 path: path.to_proto(),
6264 text: content,
6265 })
6266 .await?;
6267 }
6268 }
6269 log::debug!(
6270 "finish updating index text for buffer {}",
6271 path.as_unix_str()
6272 );
6273
6274 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
6275 let project_path = this
6276 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
6277 .ok()
6278 .flatten();
6279 git_store
6280 .update(&mut cx, |git_store, cx| {
6281 let buffer_id = git_store
6282 .buffer_store
6283 .read(cx)
6284 .get_by_path(&project_path?)?
6285 .read(cx)
6286 .remote_id();
6287 let diff_state = git_store.diffs.get(&buffer_id)?;
6288 diff_state.update(cx, |diff_state, _| {
6289 diff_state.hunk_staging_operation_count_as_of_write =
6290 hunk_staging_operation_count;
6291 });
6292 Some(())
6293 })
6294 .context("Git store dropped")?;
6295 }
6296 Ok(())
6297 },
6298 )
6299 }
6300
6301 pub fn create_remote(
6302 &mut self,
6303 remote_name: String,
6304 remote_url: String,
6305 ) -> oneshot::Receiver<Result<()>> {
6306 let id = self.id;
6307 self.send_job(
6308 Some(format!("git remote add {remote_name} {remote_url}").into()),
6309 move |repo, _cx| async move {
6310 match repo {
6311 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6312 backend.create_remote(remote_name, remote_url).await
6313 }
6314 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6315 client
6316 .request(proto::GitCreateRemote {
6317 project_id: project_id.0,
6318 repository_id: id.to_proto(),
6319 remote_name,
6320 remote_url,
6321 })
6322 .await?;
6323
6324 Ok(())
6325 }
6326 }
6327 },
6328 )
6329 }
6330
6331 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
6332 let id = self.id;
6333 self.send_job(
6334 Some(format!("git remove remote {remote_name}").into()),
6335 move |repo, _cx| async move {
6336 match repo {
6337 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6338 backend.remove_remote(remote_name).await
6339 }
6340 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6341 client
6342 .request(proto::GitRemoveRemote {
6343 project_id: project_id.0,
6344 repository_id: id.to_proto(),
6345 remote_name,
6346 })
6347 .await?;
6348
6349 Ok(())
6350 }
6351 }
6352 },
6353 )
6354 }
6355
6356 pub fn get_remotes(
6357 &mut self,
6358 branch_name: Option<String>,
6359 is_push: bool,
6360 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
6361 let id = self.id;
6362 self.send_job(None, move |repo, _cx| async move {
6363 match repo {
6364 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6365 let remote = if let Some(branch_name) = branch_name {
6366 if is_push {
6367 backend.get_push_remote(branch_name).await?
6368 } else {
6369 backend.get_branch_remote(branch_name).await?
6370 }
6371 } else {
6372 None
6373 };
6374
6375 match remote {
6376 Some(remote) => Ok(vec![remote]),
6377 None => backend.get_all_remotes().await,
6378 }
6379 }
6380 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6381 let response = client
6382 .request(proto::GetRemotes {
6383 project_id: project_id.0,
6384 repository_id: id.to_proto(),
6385 branch_name,
6386 is_push,
6387 })
6388 .await?;
6389
6390 let remotes = response
6391 .remotes
6392 .into_iter()
6393 .map(|remotes| Remote {
6394 name: remotes.name.into(),
6395 })
6396 .collect();
6397
6398 Ok(remotes)
6399 }
6400 }
6401 })
6402 }
6403
6404 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
6405 let id = self.id;
6406 self.send_job(None, move |repo, _| async move {
6407 match repo {
6408 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6409 backend.branches().await
6410 }
6411 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6412 let response = client
6413 .request(proto::GitGetBranches {
6414 project_id: project_id.0,
6415 repository_id: id.to_proto(),
6416 })
6417 .await?;
6418
6419 let branches = response
6420 .branches
6421 .into_iter()
6422 .map(|branch| proto_to_branch(&branch))
6423 .collect();
6424
6425 Ok(branches)
6426 }
6427 }
6428 })
6429 }
6430
6431 /// If this is a linked worktree (*NOT* the main checkout of a repository),
6432 /// returns the pathed for the linked worktree.
6433 ///
6434 /// Returns None if this is the main checkout.
6435 pub fn linked_worktree_path(&self) -> Option<&Arc<Path>> {
6436 if self.work_directory_abs_path != self.original_repo_abs_path {
6437 Some(&self.work_directory_abs_path)
6438 } else {
6439 None
6440 }
6441 }
6442
6443 pub fn path_for_new_linked_worktree(
6444 &self,
6445 branch_name: &str,
6446 worktree_directory_setting: &str,
6447 ) -> Result<PathBuf> {
6448 let original_repo = self.original_repo_abs_path.clone();
6449 let project_name = original_repo
6450 .file_name()
6451 .ok_or_else(|| anyhow!("git repo must have a directory name"))?;
6452 let directory = worktrees_directory_for_repo(&original_repo, worktree_directory_setting)?;
6453 Ok(directory.join(branch_name).join(project_name))
6454 }
6455
6456 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
6457 let id = self.id;
6458 self.send_job(None, move |repo, _| async move {
6459 match repo {
6460 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6461 backend.worktrees().await
6462 }
6463 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6464 let response = client
6465 .request(proto::GitGetWorktrees {
6466 project_id: project_id.0,
6467 repository_id: id.to_proto(),
6468 })
6469 .await?;
6470
6471 let worktrees = response
6472 .worktrees
6473 .into_iter()
6474 .map(|worktree| proto_to_worktree(&worktree))
6475 .collect();
6476
6477 Ok(worktrees)
6478 }
6479 }
6480 })
6481 }
6482
6483 pub fn create_worktree(
6484 &mut self,
6485 target: CreateWorktreeTarget,
6486 path: PathBuf,
6487 ) -> oneshot::Receiver<Result<()>> {
6488 let id = self.id;
6489 let job_description = match target.branch_name() {
6490 Some(branch_name) => format!("git worktree add: {branch_name}"),
6491 None => "git worktree add (detached)".to_string(),
6492 };
6493 self.send_job(Some(job_description.into()), move |repo, _cx| async move {
6494 match repo {
6495 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6496 backend.create_worktree(target, path).await
6497 }
6498 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6499 let (name, commit, use_existing_branch) = match target {
6500 CreateWorktreeTarget::ExistingBranch { branch_name } => {
6501 (Some(branch_name), None, true)
6502 }
6503 CreateWorktreeTarget::NewBranch {
6504 branch_name,
6505 base_sha,
6506 } => (Some(branch_name), base_sha, false),
6507 CreateWorktreeTarget::Detached { base_sha } => (None, base_sha, false),
6508 };
6509
6510 client
6511 .request(proto::GitCreateWorktree {
6512 project_id: project_id.0,
6513 repository_id: id.to_proto(),
6514 name: name.unwrap_or_default(),
6515 directory: path.to_string_lossy().to_string(),
6516 commit,
6517 use_existing_branch,
6518 })
6519 .await?;
6520
6521 Ok(())
6522 }
6523 }
6524 })
6525 }
6526
6527 pub fn create_worktree_detached(
6528 &mut self,
6529 path: PathBuf,
6530 commit: String,
6531 ) -> oneshot::Receiver<Result<()>> {
6532 self.create_worktree(
6533 CreateWorktreeTarget::Detached {
6534 base_sha: Some(commit),
6535 },
6536 path,
6537 )
6538 }
6539
6540 pub fn checkout_branch_in_worktree(
6541 &mut self,
6542 branch_name: String,
6543 worktree_path: PathBuf,
6544 create: bool,
6545 ) -> oneshot::Receiver<Result<()>> {
6546 let description = if create {
6547 format!("git checkout -b {branch_name}")
6548 } else {
6549 format!("git checkout {branch_name}")
6550 };
6551 self.send_job(Some(description.into()), move |repo, _cx| async move {
6552 match repo {
6553 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6554 backend
6555 .checkout_branch_in_worktree(branch_name, worktree_path, create)
6556 .await
6557 }
6558 RepositoryState::Remote(_) => {
6559 log::warn!("checkout_branch_in_worktree not supported for remote repositories");
6560 Ok(())
6561 }
6562 }
6563 })
6564 }
6565
6566 pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
6567 let id = self.id;
6568 self.send_job(None, move |repo, _cx| async move {
6569 match repo {
6570 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6571 Ok(backend.head_sha().await)
6572 }
6573 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6574 let response = client
6575 .request(proto::GitGetHeadSha {
6576 project_id: project_id.0,
6577 repository_id: id.to_proto(),
6578 })
6579 .await?;
6580
6581 Ok(response.sha)
6582 }
6583 }
6584 })
6585 }
6586
6587 fn edit_ref(
6588 &mut self,
6589 ref_name: String,
6590 commit: Option<String>,
6591 ) -> oneshot::Receiver<Result<()>> {
6592 let id = self.id;
6593 self.send_job(None, move |repo, _cx| async move {
6594 match repo {
6595 RepositoryState::Local(LocalRepositoryState { backend, .. }) => match commit {
6596 Some(commit) => backend.update_ref(ref_name, commit).await,
6597 None => backend.delete_ref(ref_name).await,
6598 },
6599 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6600 let action = match commit {
6601 Some(sha) => proto::git_edit_ref::Action::UpdateToCommit(sha),
6602 None => {
6603 proto::git_edit_ref::Action::Delete(proto::git_edit_ref::DeleteRef {})
6604 }
6605 };
6606 client
6607 .request(proto::GitEditRef {
6608 project_id: project_id.0,
6609 repository_id: id.to_proto(),
6610 ref_name,
6611 action: Some(action),
6612 })
6613 .await?;
6614 Ok(())
6615 }
6616 }
6617 })
6618 }
6619
6620 pub fn update_ref(
6621 &mut self,
6622 ref_name: String,
6623 commit: String,
6624 ) -> oneshot::Receiver<Result<()>> {
6625 self.edit_ref(ref_name, Some(commit))
6626 }
6627
6628 pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
6629 self.edit_ref(ref_name, None)
6630 }
6631
6632 pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
6633 let id = self.id;
6634 self.send_job(None, move |repo, _cx| async move {
6635 match repo {
6636 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6637 backend.repair_worktrees().await
6638 }
6639 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6640 client
6641 .request(proto::GitRepairWorktrees {
6642 project_id: project_id.0,
6643 repository_id: id.to_proto(),
6644 })
6645 .await?;
6646 Ok(())
6647 }
6648 }
6649 })
6650 }
6651
6652 pub fn create_archive_checkpoint(&mut self) -> oneshot::Receiver<Result<(String, String)>> {
6653 let id = self.id;
6654 self.send_job(None, move |repo, _cx| async move {
6655 match repo {
6656 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6657 backend.create_archive_checkpoint().await
6658 }
6659 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6660 let response = client
6661 .request(proto::GitCreateArchiveCheckpoint {
6662 project_id: project_id.0,
6663 repository_id: id.to_proto(),
6664 })
6665 .await?;
6666 Ok((response.staged_commit_sha, response.unstaged_commit_sha))
6667 }
6668 }
6669 })
6670 }
6671
6672 pub fn restore_archive_checkpoint(
6673 &mut self,
6674 staged_sha: String,
6675 unstaged_sha: String,
6676 ) -> oneshot::Receiver<Result<()>> {
6677 let id = self.id;
6678 self.send_job(None, move |repo, _cx| async move {
6679 match repo {
6680 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6681 backend
6682 .restore_archive_checkpoint(staged_sha, unstaged_sha)
6683 .await
6684 }
6685 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6686 client
6687 .request(proto::GitRestoreArchiveCheckpoint {
6688 project_id: project_id.0,
6689 repository_id: id.to_proto(),
6690 staged_commit_sha: staged_sha,
6691 unstaged_commit_sha: unstaged_sha,
6692 })
6693 .await?;
6694 Ok(())
6695 }
6696 }
6697 })
6698 }
6699
6700 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
6701 let id = self.id;
6702 let original_repo_abs_path = self.snapshot.original_repo_abs_path.clone();
6703 self.send_job(
6704 Some(format!("git worktree remove: {}", path.display()).into()),
6705 move |repo, cx| async move {
6706 match repo {
6707 RepositoryState::Local(LocalRepositoryState { backend, fs, .. }) => {
6708 // When forcing, delete the worktree directory ourselves before
6709 // invoking git. `git worktree remove` can remove the admin
6710 // metadata in `.git/worktrees/<name>` but fail to delete the
6711 // working directory (it continues past directory-removal errors),
6712 // leaving an orphaned folder on disk. Deleting first guarantees
6713 // the directory is gone, and `git worktree remove --force`
6714 // tolerates a missing working tree while cleaning up the admin
6715 // entry. We keep this inside the `Local` arm so that for remote
6716 // projects the deletion runs on the remote machine (where the
6717 // `GitRemoveWorktree` RPC is handled against the local repo on
6718 // the headless server) using its own filesystem.
6719 //
6720 // After a successful removal, also delete any empty ancestor
6721 // directories between the worktree path and the configured
6722 // base directory used when creating linked worktrees.
6723 //
6724 // Non-force removals are left untouched before git runs:
6725 // `git worktree remove` must see the dirty working tree to
6726 // refuse the operation.
6727 if force {
6728 fs.remove_dir(
6729 &path,
6730 RemoveOptions {
6731 recursive: true,
6732 ignore_if_not_exists: true,
6733 },
6734 )
6735 .await
6736 .with_context(|| {
6737 format!("failed to delete worktree directory '{}'", path.display())
6738 })?;
6739 }
6740
6741 backend.remove_worktree(path.clone(), force).await?;
6742
6743 let managed_worktree_base = cx.update(|cx| {
6744 let setting = &ProjectSettings::get_global(cx).git.worktree_directory;
6745 worktrees_directory_for_repo(&original_repo_abs_path, setting).log_err()
6746 });
6747
6748 if let Some(managed_worktree_base) = managed_worktree_base {
6749 remove_empty_managed_worktree_ancestors(
6750 fs.as_ref(),
6751 &path,
6752 &managed_worktree_base,
6753 )
6754 .await;
6755 }
6756
6757 Ok(())
6758 }
6759 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6760 client
6761 .request(proto::GitRemoveWorktree {
6762 project_id: project_id.0,
6763 repository_id: id.to_proto(),
6764 path: path.to_string_lossy().to_string(),
6765 force,
6766 })
6767 .await?;
6768
6769 Ok(())
6770 }
6771 }
6772 },
6773 )
6774 }
6775
6776 pub fn rename_worktree(
6777 &mut self,
6778 old_path: PathBuf,
6779 new_path: PathBuf,
6780 ) -> oneshot::Receiver<Result<()>> {
6781 let id = self.id;
6782 self.send_job(
6783 Some(format!("git worktree move: {}", old_path.display()).into()),
6784 move |repo, _cx| async move {
6785 match repo {
6786 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6787 backend.rename_worktree(old_path, new_path).await
6788 }
6789 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6790 client
6791 .request(proto::GitRenameWorktree {
6792 project_id: project_id.0,
6793 repository_id: id.to_proto(),
6794 old_path: old_path.to_string_lossy().to_string(),
6795 new_path: new_path.to_string_lossy().to_string(),
6796 })
6797 .await?;
6798
6799 Ok(())
6800 }
6801 }
6802 },
6803 )
6804 }
6805
6806 pub fn default_branch(
6807 &mut self,
6808 include_remote_name: bool,
6809 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
6810 let id = self.id;
6811 self.send_job(None, move |repo, _| async move {
6812 match repo {
6813 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6814 backend.default_branch(include_remote_name).await
6815 }
6816 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6817 let response = client
6818 .request(proto::GetDefaultBranch {
6819 project_id: project_id.0,
6820 repository_id: id.to_proto(),
6821 })
6822 .await?;
6823
6824 anyhow::Ok(response.branch.map(SharedString::from))
6825 }
6826 }
6827 })
6828 }
6829
6830 pub fn diff_tree(
6831 &mut self,
6832 diff_type: DiffTreeType,
6833 _cx: &App,
6834 ) -> oneshot::Receiver<Result<TreeDiff>> {
6835 let repository_id = self.snapshot.id;
6836 self.send_job(None, move |repo, _cx| async move {
6837 match repo {
6838 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6839 backend.diff_tree(diff_type).await
6840 }
6841 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6842 let response = client
6843 .request(proto::GetTreeDiff {
6844 project_id: project_id.0,
6845 repository_id: repository_id.0,
6846 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
6847 base: diff_type.base().to_string(),
6848 head: diff_type.head().to_string(),
6849 })
6850 .await?;
6851
6852 let entries = response
6853 .entries
6854 .into_iter()
6855 .filter_map(|entry| {
6856 let status = match entry.status() {
6857 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
6858 proto::tree_diff_status::Status::Modified => {
6859 TreeDiffStatus::Modified {
6860 old: git::Oid::from_str(
6861 &entry.oid.context("missing oid").log_err()?,
6862 )
6863 .log_err()?,
6864 }
6865 }
6866 proto::tree_diff_status::Status::Deleted => {
6867 TreeDiffStatus::Deleted {
6868 old: git::Oid::from_str(
6869 &entry.oid.context("missing oid").log_err()?,
6870 )
6871 .log_err()?,
6872 }
6873 }
6874 };
6875 Some((
6876 RepoPath::from_rel_path(
6877 &RelPath::from_proto(&entry.path).log_err()?,
6878 ),
6879 status,
6880 ))
6881 })
6882 .collect();
6883
6884 Ok(TreeDiff { entries })
6885 }
6886 }
6887 })
6888 }
6889
6890 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
6891 let id = self.id;
6892 self.send_job(None, move |repo, _cx| async move {
6893 match repo {
6894 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6895 backend.diff(diff_type).await
6896 }
6897 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6898 let (proto_diff_type, merge_base_ref) = match &diff_type {
6899 DiffType::HeadToIndex => {
6900 (proto::git_diff::DiffType::HeadToIndex.into(), None)
6901 }
6902 DiffType::HeadToWorktree => {
6903 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
6904 }
6905 DiffType::MergeBase { base_ref } => (
6906 proto::git_diff::DiffType::MergeBase.into(),
6907 Some(base_ref.to_string()),
6908 ),
6909 };
6910 let response = client
6911 .request(proto::GitDiff {
6912 project_id: project_id.0,
6913 repository_id: id.to_proto(),
6914 diff_type: proto_diff_type,
6915 merge_base_ref,
6916 })
6917 .await?;
6918
6919 Ok(response.diff)
6920 }
6921 }
6922 })
6923 }
6924
6925 pub fn create_branch(
6926 &mut self,
6927 branch_name: String,
6928 base_branch: Option<String>,
6929 ) -> oneshot::Receiver<Result<()>> {
6930 let id = self.id;
6931 let status_msg = if let Some(ref base) = base_branch {
6932 format!("git switch -c {branch_name} {base}").into()
6933 } else {
6934 format!("git switch -c {branch_name}").into()
6935 };
6936 self.send_job(Some(status_msg), move |repo, _cx| async move {
6937 match repo {
6938 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6939 backend.create_branch(branch_name, base_branch).await
6940 }
6941 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6942 client
6943 .request(proto::GitCreateBranch {
6944 project_id: project_id.0,
6945 repository_id: id.to_proto(),
6946 branch_name,
6947 })
6948 .await?;
6949
6950 Ok(())
6951 }
6952 }
6953 })
6954 }
6955
6956 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6957 let id = self.id;
6958 self.send_job(
6959 Some(format!("git switch {branch_name}").into()),
6960 move |repo, _cx| async move {
6961 match repo {
6962 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6963 backend.change_branch(branch_name).await
6964 }
6965 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6966 client
6967 .request(proto::GitChangeBranch {
6968 project_id: project_id.0,
6969 repository_id: id.to_proto(),
6970 branch_name,
6971 })
6972 .await?;
6973
6974 Ok(())
6975 }
6976 }
6977 },
6978 )
6979 }
6980
6981 pub fn delete_branch(
6982 &mut self,
6983 is_remote: bool,
6984 branch_name: String,
6985 ) -> oneshot::Receiver<Result<()>> {
6986 let id = self.id;
6987 self.send_job(
6988 Some(
6989 format!(
6990 "git branch {} {}",
6991 if is_remote { "-dr" } else { "-d" },
6992 branch_name
6993 )
6994 .into(),
6995 ),
6996 move |repo, _cx| async move {
6997 match repo {
6998 RepositoryState::Local(state) => {
6999 state.backend.delete_branch(is_remote, branch_name).await
7000 }
7001 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7002 client
7003 .request(proto::GitDeleteBranch {
7004 project_id: project_id.0,
7005 repository_id: id.to_proto(),
7006 is_remote,
7007 branch_name,
7008 })
7009 .await?;
7010
7011 Ok(())
7012 }
7013 }
7014 },
7015 )
7016 }
7017
7018 pub fn rename_branch(
7019 &mut self,
7020 branch: String,
7021 new_name: String,
7022 ) -> oneshot::Receiver<Result<()>> {
7023 let id = self.id;
7024 self.send_job(
7025 Some(format!("git branch -m {branch} {new_name}").into()),
7026 move |repo, _cx| async move {
7027 match repo {
7028 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7029 backend.rename_branch(branch, new_name).await
7030 }
7031 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7032 client
7033 .request(proto::GitRenameBranch {
7034 project_id: project_id.0,
7035 repository_id: id.to_proto(),
7036 branch,
7037 new_name,
7038 })
7039 .await?;
7040
7041 Ok(())
7042 }
7043 }
7044 },
7045 )
7046 }
7047
7048 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
7049 let id = self.id;
7050 self.send_job(None, move |repo, _cx| async move {
7051 match repo {
7052 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7053 backend.check_for_pushed_commit().await
7054 }
7055 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7056 let response = client
7057 .request(proto::CheckForPushedCommits {
7058 project_id: project_id.0,
7059 repository_id: id.to_proto(),
7060 })
7061 .await?;
7062
7063 let branches = response.pushed_to.into_iter().map(Into::into).collect();
7064
7065 Ok(branches)
7066 }
7067 }
7068 })
7069 }
7070
7071 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
7072 let id = self.id;
7073 self.send_job(None, move |repo, _cx| async move {
7074 match repo {
7075 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7076 backend.checkpoint().await
7077 }
7078 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7079 let response = client
7080 .request(proto::GitCreateCheckpoint {
7081 project_id: project_id.0,
7082 repository_id: id.to_proto(),
7083 })
7084 .await?;
7085
7086 Ok(GitRepositoryCheckpoint {
7087 commit_sha: Oid::from_bytes(&response.commit_sha)?,
7088 })
7089 }
7090 }
7091 })
7092 }
7093
7094 pub fn restore_checkpoint(
7095 &mut self,
7096 checkpoint: GitRepositoryCheckpoint,
7097 ) -> oneshot::Receiver<Result<()>> {
7098 let id = self.id;
7099 self.send_job(None, move |repo, _cx| async move {
7100 match repo {
7101 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7102 backend.restore_checkpoint(checkpoint).await
7103 }
7104 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7105 client
7106 .request(proto::GitRestoreCheckpoint {
7107 project_id: project_id.0,
7108 repository_id: id.to_proto(),
7109 commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
7110 })
7111 .await?;
7112 Ok(())
7113 }
7114 }
7115 })
7116 }
7117
7118 pub(crate) fn apply_remote_update(
7119 &mut self,
7120 update: proto::UpdateRepository,
7121 cx: &mut Context<Self>,
7122 ) -> Result<()> {
7123 if let Some(main_path) = &update.original_repo_abs_path {
7124 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
7125 }
7126
7127 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
7128 let new_head_commit = update
7129 .head_commit_details
7130 .as_ref()
7131 .map(proto_to_commit_details);
7132 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
7133 cx.emit(RepositoryEvent::HeadChanged)
7134 }
7135 self.snapshot.branch = new_branch;
7136 self.snapshot.head_commit = new_head_commit;
7137
7138 if update.is_last_update {
7139 let new_branch_list: Arc<[Branch]> =
7140 update.branch_list.iter().map(proto_to_branch).collect();
7141 if *self.snapshot.branch_list != *new_branch_list {
7142 cx.emit(RepositoryEvent::BranchListChanged);
7143 }
7144 self.snapshot.branch_list = new_branch_list;
7145 }
7146
7147 // We don't store any merge head state for downstream projects; the upstream
7148 // will track it and we will just get the updated conflicts
7149 let new_merge_heads = TreeMap::from_ordered_entries(
7150 update
7151 .current_merge_conflicts
7152 .into_iter()
7153 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
7154 );
7155 let conflicts_changed =
7156 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
7157 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
7158 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
7159 let new_stash_entries = GitStash {
7160 entries: update
7161 .stash_entries
7162 .iter()
7163 .filter_map(|entry| proto_to_stash(entry).ok())
7164 .collect(),
7165 };
7166 if self.snapshot.stash_entries != new_stash_entries {
7167 cx.emit(RepositoryEvent::StashEntriesChanged)
7168 }
7169 self.snapshot.stash_entries = new_stash_entries;
7170 let new_linked_worktrees: Arc<[GitWorktree]> = update
7171 .linked_worktrees
7172 .iter()
7173 .map(proto_to_worktree)
7174 .collect();
7175 if *self.snapshot.linked_worktrees != *new_linked_worktrees {
7176 cx.emit(RepositoryEvent::GitWorktreeListChanged);
7177 }
7178 self.snapshot.linked_worktrees = new_linked_worktrees;
7179 self.snapshot.remote_upstream_url = update.remote_upstream_url;
7180 self.snapshot.remote_origin_url = update.remote_origin_url;
7181
7182 let edits = update
7183 .removed_statuses
7184 .into_iter()
7185 .filter_map(|path| {
7186 Some(sum_tree::Edit::Remove(PathKey(
7187 RelPath::from_proto(&path).log_err()?,
7188 )))
7189 })
7190 .chain(
7191 update
7192 .updated_statuses
7193 .into_iter()
7194 .filter_map(|updated_status| {
7195 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
7196 }),
7197 )
7198 .collect::<Vec<_>>();
7199 if conflicts_changed || !edits.is_empty() {
7200 cx.emit(RepositoryEvent::StatusesChanged);
7201 }
7202 self.snapshot.statuses_by_path.edit(edits, ());
7203
7204 if update.is_last_update {
7205 self.snapshot.scan_id = update.scan_id;
7206 }
7207 self.clear_pending_ops(cx);
7208 Ok(())
7209 }
7210
7211 pub fn compare_checkpoints(
7212 &mut self,
7213 left: GitRepositoryCheckpoint,
7214 right: GitRepositoryCheckpoint,
7215 ) -> oneshot::Receiver<Result<bool>> {
7216 let id = self.id;
7217 self.send_job(None, move |repo, _cx| async move {
7218 match repo {
7219 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7220 backend.compare_checkpoints(left, right).await
7221 }
7222 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7223 let response = client
7224 .request(proto::GitCompareCheckpoints {
7225 project_id: project_id.0,
7226 repository_id: id.to_proto(),
7227 left_commit_sha: left.commit_sha.as_bytes().to_vec(),
7228 right_commit_sha: right.commit_sha.as_bytes().to_vec(),
7229 })
7230 .await?;
7231 Ok(response.equal)
7232 }
7233 }
7234 })
7235 }
7236
7237 pub fn diff_checkpoints(
7238 &mut self,
7239 base_checkpoint: GitRepositoryCheckpoint,
7240 target_checkpoint: GitRepositoryCheckpoint,
7241 ) -> oneshot::Receiver<Result<String>> {
7242 let id = self.id;
7243 self.send_job(None, move |repo, _cx| async move {
7244 match repo {
7245 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7246 backend
7247 .diff_checkpoints(base_checkpoint, target_checkpoint)
7248 .await
7249 }
7250 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7251 let response = client
7252 .request(proto::GitDiffCheckpoints {
7253 project_id: project_id.0,
7254 repository_id: id.to_proto(),
7255 base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
7256 target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
7257 })
7258 .await?;
7259 Ok(response.diff)
7260 }
7261 }
7262 })
7263 }
7264
7265 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
7266 let updated = SumTree::from_iter(
7267 self.pending_ops.iter().filter_map(|ops| {
7268 let inner_ops: Vec<PendingOp> =
7269 ops.ops.iter().filter(|op| op.running()).cloned().collect();
7270 if inner_ops.is_empty() {
7271 None
7272 } else {
7273 Some(PendingOps {
7274 repo_path: ops.repo_path.clone(),
7275 ops: inner_ops,
7276 })
7277 }
7278 }),
7279 (),
7280 );
7281
7282 if updated != self.pending_ops {
7283 cx.emit(RepositoryEvent::PendingOpsChanged {
7284 pending_ops: self.pending_ops.clone(),
7285 })
7286 }
7287
7288 self.pending_ops = updated;
7289 }
7290
7291 fn schedule_scan(
7292 &mut self,
7293 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
7294 cx: &mut Context<Self>,
7295 ) {
7296 let this = cx.weak_entity();
7297 let _ = self.send_keyed_job(
7298 Some(GitJobKey::ReloadGitState),
7299 None,
7300 |state, mut cx| async move {
7301 log::debug!("run scheduled git status scan");
7302
7303 let Some(this) = this.upgrade() else {
7304 return Ok(());
7305 };
7306 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
7307 bail!("not a local repository")
7308 };
7309 let snapshot = compute_snapshot(this.clone(), backend.clone(), &mut cx).await?;
7310 this.update(&mut cx, |this, cx| {
7311 this.clear_pending_ops(cx);
7312 });
7313 if let Some(updates_tx) = updates_tx {
7314 updates_tx
7315 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
7316 .ok();
7317 }
7318 Ok(())
7319 },
7320 );
7321 }
7322
7323 fn spawn_local_git_worker(
7324 state: Shared<Task<Result<LocalRepositoryState, String>>>,
7325 cx: &mut Context<Self>,
7326 ) -> mpsc::UnboundedSender<GitJob> {
7327 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
7328
7329 cx.spawn(async move |_, cx| {
7330 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
7331 if let Some(git_hosting_provider_registry) =
7332 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
7333 {
7334 git_hosting_providers::register_additional_providers(
7335 git_hosting_provider_registry,
7336 state.backend.clone(),
7337 )
7338 .await;
7339 }
7340 let state = RepositoryState::Local(state);
7341 let mut jobs = VecDeque::new();
7342 loop {
7343 while let Ok(next_job) = job_rx.try_recv() {
7344 jobs.push_back(next_job);
7345 }
7346
7347 if let Some(job) = jobs.pop_front() {
7348 if let Some(current_key) = &job.key
7349 && jobs
7350 .iter()
7351 .any(|other_job| other_job.key.as_ref() == Some(current_key))
7352 {
7353 continue;
7354 }
7355 (job.job)(state.clone(), cx).await;
7356 } else if let Some(job) = job_rx.next().await {
7357 jobs.push_back(job);
7358 } else {
7359 break;
7360 }
7361 }
7362 anyhow::Ok(())
7363 })
7364 .detach_and_log_err(cx);
7365
7366 job_tx
7367 }
7368
7369 fn spawn_remote_git_worker(
7370 state: RemoteRepositoryState,
7371 cx: &mut Context<Self>,
7372 ) -> mpsc::UnboundedSender<GitJob> {
7373 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
7374
7375 cx.spawn(async move |_, cx| {
7376 let state = RepositoryState::Remote(state);
7377 let mut jobs = VecDeque::new();
7378 loop {
7379 while let Ok(next_job) = job_rx.try_recv() {
7380 jobs.push_back(next_job);
7381 }
7382
7383 if let Some(job) = jobs.pop_front() {
7384 if let Some(current_key) = &job.key
7385 && jobs
7386 .iter()
7387 .any(|other_job| other_job.key.as_ref() == Some(current_key))
7388 {
7389 continue;
7390 }
7391 (job.job)(state.clone(), cx).await;
7392 } else if let Some(job) = job_rx.next().await {
7393 jobs.push_back(job);
7394 } else {
7395 break;
7396 }
7397 }
7398 anyhow::Ok(())
7399 })
7400 .detach_and_log_err(cx);
7401
7402 job_tx
7403 }
7404
7405 fn load_staged_text(
7406 &mut self,
7407 buffer_id: BufferId,
7408 repo_path: RepoPath,
7409 cx: &App,
7410 ) -> Task<Result<Option<String>>> {
7411 let rx = self.send_job(None, move |state, _| async move {
7412 match state {
7413 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7414 anyhow::Ok(backend.load_index_text(repo_path).await)
7415 }
7416 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7417 let response = client
7418 .request(proto::OpenUnstagedDiff {
7419 project_id: project_id.to_proto(),
7420 buffer_id: buffer_id.to_proto(),
7421 })
7422 .await?;
7423 Ok(response.staged_text)
7424 }
7425 }
7426 });
7427 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7428 }
7429
7430 fn load_committed_text(
7431 &mut self,
7432 buffer_id: BufferId,
7433 repo_path: RepoPath,
7434 cx: &App,
7435 ) -> Task<Result<DiffBasesChange>> {
7436 let rx = self.send_job(None, move |state, _| async move {
7437 match state {
7438 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7439 let committed_text = backend.load_committed_text(repo_path.clone()).await;
7440 let staged_text = backend.load_index_text(repo_path).await;
7441 let diff_bases_change = if committed_text == staged_text {
7442 DiffBasesChange::SetBoth(committed_text)
7443 } else {
7444 DiffBasesChange::SetEach {
7445 index: staged_text,
7446 head: committed_text,
7447 }
7448 };
7449 anyhow::Ok(diff_bases_change)
7450 }
7451 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
7452 use proto::open_uncommitted_diff_response::Mode;
7453
7454 let response = client
7455 .request(proto::OpenUncommittedDiff {
7456 project_id: project_id.to_proto(),
7457 buffer_id: buffer_id.to_proto(),
7458 })
7459 .await?;
7460 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
7461 let bases = match mode {
7462 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
7463 Mode::IndexAndHead => DiffBasesChange::SetEach {
7464 head: response.committed_text,
7465 index: response.staged_text,
7466 },
7467 };
7468 Ok(bases)
7469 }
7470 }
7471 });
7472
7473 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7474 }
7475
7476 pub fn load_commit_template_text(
7477 &mut self,
7478 ) -> oneshot::Receiver<Result<Option<GitCommitTemplate>>> {
7479 self.send_job(None, move |git_repo, _cx| async move {
7480 match git_repo {
7481 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7482 backend.load_commit_template().await
7483 }
7484 RepositoryState::Remote(_) => Ok(None),
7485 }
7486 })
7487 }
7488
7489 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
7490 let repository_id = self.snapshot.id;
7491 let rx = self.send_job(None, move |state, _| async move {
7492 match state {
7493 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
7494 backend.load_blob_content(oid).await
7495 }
7496 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
7497 let response = client
7498 .request(proto::GetBlobContent {
7499 project_id: project_id.to_proto(),
7500 repository_id: repository_id.0,
7501 oid: oid.to_string(),
7502 })
7503 .await?;
7504 Ok(response.content)
7505 }
7506 }
7507 });
7508 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
7509 }
7510
7511 fn paths_changed(
7512 &mut self,
7513 paths: Vec<RepoPath>,
7514 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
7515 cx: &mut Context<Self>,
7516 ) {
7517 if !paths.is_empty() {
7518 self.paths_needing_status_update.push(paths);
7519 }
7520
7521 let this = cx.weak_entity();
7522 let _ = self.send_keyed_job(
7523 Some(GitJobKey::RefreshStatuses),
7524 None,
7525 |state, mut cx| async move {
7526 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
7527 (
7528 this.snapshot.clone(),
7529 mem::take(&mut this.paths_needing_status_update),
7530 )
7531 })?;
7532 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
7533 bail!("not a local repository")
7534 };
7535
7536 if changed_paths.is_empty() {
7537 return Ok(());
7538 }
7539
7540 let has_head = prev_snapshot.head_commit.is_some();
7541
7542 let stash_entries = backend.stash_entries().await?;
7543 let changed_path_statuses = cx
7544 .background_spawn(async move {
7545 let mut changed_paths =
7546 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
7547 let changed_paths_vec = changed_paths.iter().cloned().collect::<Vec<_>>();
7548
7549 let status_task = backend.status(&changed_paths_vec);
7550 let diff_stat_future = if has_head {
7551 backend.diff_stat(&changed_paths_vec)
7552 } else {
7553 future::ready(Ok(status::GitDiffStat {
7554 entries: Arc::default(),
7555 }))
7556 .boxed()
7557 };
7558
7559 let (statuses, diff_stats) =
7560 futures::future::try_join(status_task, diff_stat_future).await?;
7561
7562 let diff_stats: HashMap<RepoPath, DiffStat> =
7563 HashMap::from_iter(diff_stats.entries.into_iter().cloned());
7564
7565 let mut changed_path_statuses = Vec::new();
7566 let prev_statuses = prev_snapshot.statuses_by_path.clone();
7567 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7568
7569 for (repo_path, status) in &*statuses.entries {
7570 let current_diff_stat = diff_stats.get(repo_path).copied();
7571
7572 changed_paths.remove(repo_path);
7573 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
7574 && cursor.item().is_some_and(|entry| {
7575 entry.status == *status && entry.diff_stat == current_diff_stat
7576 })
7577 {
7578 continue;
7579 }
7580
7581 changed_path_statuses.push(Edit::Insert(StatusEntry {
7582 repo_path: repo_path.clone(),
7583 status: *status,
7584 diff_stat: current_diff_stat,
7585 }));
7586 }
7587 let mut cursor = prev_statuses.cursor::<PathProgress>(());
7588 for path in changed_paths.into_iter() {
7589 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
7590 changed_path_statuses
7591 .push(Edit::Remove(PathKey(path.as_ref().clone())));
7592 }
7593 }
7594 anyhow::Ok(changed_path_statuses)
7595 })
7596 .await?;
7597
7598 this.update(&mut cx, |this, cx| {
7599 if this.snapshot.stash_entries != stash_entries {
7600 cx.emit(RepositoryEvent::StashEntriesChanged);
7601 this.snapshot.stash_entries = stash_entries;
7602 }
7603
7604 if !changed_path_statuses.is_empty() {
7605 cx.emit(RepositoryEvent::StatusesChanged);
7606 this.snapshot
7607 .statuses_by_path
7608 .edit(changed_path_statuses, ());
7609 this.snapshot.scan_id += 1;
7610 }
7611
7612 if let Some(updates_tx) = updates_tx {
7613 updates_tx
7614 .unbounded_send(DownstreamUpdate::UpdateRepository(
7615 this.snapshot.clone(),
7616 ))
7617 .ok();
7618 }
7619 })
7620 },
7621 );
7622 }
7623
7624 /// currently running git command and when it started
7625 pub fn current_job(&self) -> Option<JobInfo> {
7626 self.active_jobs.values().next().cloned()
7627 }
7628
7629 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
7630 self.send_job(None, |_, _| async {})
7631 }
7632
7633 fn spawn_job_with_tracking<AsyncFn>(
7634 &mut self,
7635 paths: Vec<RepoPath>,
7636 git_status: pending_op::GitStatus,
7637 cx: &mut Context<Self>,
7638 f: AsyncFn,
7639 ) -> Task<Result<()>>
7640 where
7641 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
7642 {
7643 let ids = self.new_pending_ops_for_paths(paths, git_status);
7644
7645 cx.spawn(async move |this, cx| {
7646 let (job_status, result) = match f(this.clone(), cx).await {
7647 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
7648 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
7649 Err(err) => (pending_op::JobStatus::Error, Err(err)),
7650 };
7651
7652 this.update(cx, |this, _| {
7653 let mut edits = Vec::with_capacity(ids.len());
7654 for (id, entry) in ids {
7655 if let Some(mut ops) = this
7656 .pending_ops
7657 .get(&PathKey(entry.as_ref().clone()), ())
7658 .cloned()
7659 {
7660 if let Some(op) = ops.op_by_id_mut(id) {
7661 op.job_status = job_status;
7662 }
7663 edits.push(sum_tree::Edit::Insert(ops));
7664 }
7665 }
7666 this.pending_ops.edit(edits, ());
7667 })?;
7668
7669 result
7670 })
7671 }
7672
7673 fn new_pending_ops_for_paths(
7674 &mut self,
7675 paths: Vec<RepoPath>,
7676 git_status: pending_op::GitStatus,
7677 ) -> Vec<(PendingOpId, RepoPath)> {
7678 let mut edits = Vec::with_capacity(paths.len());
7679 let mut ids = Vec::with_capacity(paths.len());
7680 for path in paths {
7681 let mut ops = self
7682 .pending_ops
7683 .get(&PathKey(path.as_ref().clone()), ())
7684 .cloned()
7685 .unwrap_or_else(|| PendingOps::new(&path));
7686 let id = ops.max_id() + 1;
7687 ops.ops.push(PendingOp {
7688 id,
7689 git_status,
7690 job_status: pending_op::JobStatus::Running,
7691 });
7692 edits.push(sum_tree::Edit::Insert(ops));
7693 ids.push((id, path));
7694 }
7695 self.pending_ops.edit(edits, ());
7696 ids
7697 }
7698 pub fn default_remote_url(&self) -> Option<String> {
7699 self.remote_upstream_url
7700 .clone()
7701 .or(self.remote_origin_url.clone())
7702 }
7703}
7704
7705/// If `path` is a git linked worktree checkout, resolves it to the main
7706/// repository's working directory path. Returns `None` if `path` is a normal
7707/// repository, not a git repo, or if resolution fails.
7708///
7709/// Resolution works by:
7710/// 1. Reading the `.git` file to get the `gitdir:` pointer
7711/// 2. Following that to the worktree-specific git directory
7712/// 3. Reading the `commondir` file to find the shared `.git` directory
7713/// 4. Deriving the main repo's working directory from the common dir
7714pub async fn resolve_git_worktree_to_main_repo(fs: &dyn Fs, path: &Path) -> Option<PathBuf> {
7715 let dot_git = path.join(".git");
7716 let metadata = fs.metadata(&dot_git).await.ok()??;
7717 if metadata.is_dir {
7718 return None; // Normal repo, not a linked worktree
7719 }
7720 // It's a .git file — parse the gitdir: pointer
7721 let content = fs.load(&dot_git).await.ok()?;
7722 let gitdir_rel = content.strip_prefix("gitdir:")?.trim();
7723 let gitdir_abs = fs.canonicalize(&path.join(gitdir_rel)).await.ok()?;
7724 // Read commondir to find the main .git directory
7725 let commondir_content = fs.load(&gitdir_abs.join("commondir")).await.ok()?;
7726 let common_dir = fs
7727 .canonicalize(&gitdir_abs.join(commondir_content.trim()))
7728 .await
7729 .ok()?;
7730 git::repository::original_repo_path_from_common_dir(&common_dir)
7731}
7732
7733/// Validates that the resolved worktree directory is acceptable:
7734/// - The setting must not be an absolute path.
7735/// - The resolved path must be either a subdirectory of the working
7736/// directory or a subdirectory of its parent (i.e., a sibling).
7737///
7738/// Returns `Ok(resolved_path)` or an error with a user-facing message.
7739pub fn worktrees_directory_for_repo(
7740 original_repo_abs_path: &Path,
7741 worktree_directory_setting: &str,
7742) -> Result<PathBuf> {
7743 // Check the original setting before trimming, since a path like "///"
7744 // is absolute but becomes "" after stripping trailing separators.
7745 // Also check for leading `/` or `\` explicitly, because on Windows
7746 // `Path::is_absolute()` requires a drive letter — so `/tmp/worktrees`
7747 // would slip through even though it's clearly not a relative path.
7748 if Path::new(worktree_directory_setting).is_absolute()
7749 || worktree_directory_setting.starts_with('/')
7750 || worktree_directory_setting.starts_with('\\')
7751 {
7752 anyhow::bail!(
7753 "git.worktree_directory must be a relative path, got: {worktree_directory_setting:?}"
7754 );
7755 }
7756
7757 if worktree_directory_setting.is_empty() {
7758 anyhow::bail!("git.worktree_directory must not be empty");
7759 }
7760
7761 let trimmed = worktree_directory_setting.trim_end_matches(['/', '\\']);
7762 if trimmed == ".." {
7763 anyhow::bail!("git.worktree_directory must not be \"..\" (use \"../some-name\" instead)");
7764 }
7765
7766 let joined = original_repo_abs_path.join(trimmed);
7767 let resolved = util::normalize_path(&joined);
7768 let resolved = if resolved.starts_with(original_repo_abs_path) {
7769 resolved
7770 } else if let Some(repo_dir_name) = original_repo_abs_path.file_name() {
7771 resolved.join(repo_dir_name)
7772 } else {
7773 resolved
7774 };
7775
7776 let parent = original_repo_abs_path
7777 .parent()
7778 .unwrap_or(original_repo_abs_path);
7779
7780 if !resolved.starts_with(parent) {
7781 anyhow::bail!(
7782 "git.worktree_directory resolved to {resolved:?}, which is outside \
7783 the project root and its parent directory. It must resolve to a \
7784 subdirectory of {original_repo_abs_path:?} or a sibling of it."
7785 );
7786 }
7787
7788 Ok(resolved)
7789}
7790
7791async fn remove_empty_managed_worktree_ancestors(fs: &dyn Fs, child_path: &Path, base_path: &Path) {
7792 let mut current = child_path;
7793 while let Some(parent) = current.parent() {
7794 if parent == base_path {
7795 break;
7796 }
7797 if !parent.starts_with(base_path) {
7798 break;
7799 }
7800
7801 let result = fs
7802 .remove_dir(
7803 parent,
7804 RemoveOptions {
7805 recursive: false,
7806 ignore_if_not_exists: true,
7807 },
7808 )
7809 .await;
7810
7811 match result {
7812 Ok(()) => {
7813 log::info!(
7814 "Removed empty managed worktree directory: {}",
7815 parent.display()
7816 );
7817 }
7818 Err(error) => {
7819 log::debug!(
7820 "Stopped removing managed worktree parent directories at {}: {error}",
7821 parent.display()
7822 );
7823 break;
7824 }
7825 }
7826
7827 current = parent;
7828 }
7829}
7830
7831/// Returns a short name for a linked worktree suitable for UI display
7832///
7833/// Uses the main worktree path to come up with a short name that disambiguates
7834/// the linked worktree from the main worktree.
7835pub fn linked_worktree_short_name(
7836 main_worktree_path: &Path,
7837 linked_worktree_path: &Path,
7838) -> Option<SharedString> {
7839 if main_worktree_path == linked_worktree_path {
7840 return None;
7841 }
7842
7843 let project_name = main_worktree_path.file_name()?.to_str()?;
7844 let directory_name = linked_worktree_path.file_name()?.to_str()?;
7845 let name = if directory_name != project_name {
7846 directory_name.to_string()
7847 } else {
7848 linked_worktree_path
7849 .parent()?
7850 .file_name()?
7851 .to_str()?
7852 .to_string()
7853 };
7854 Some(name.into())
7855}
7856
7857fn get_permalink_in_rust_registry_src(
7858 provider_registry: Arc<GitHostingProviderRegistry>,
7859 path: PathBuf,
7860 selection: Range<u32>,
7861) -> Result<url::Url> {
7862 #[derive(Deserialize)]
7863 struct CargoVcsGit {
7864 sha1: String,
7865 }
7866
7867 #[derive(Deserialize)]
7868 struct CargoVcsInfo {
7869 git: CargoVcsGit,
7870 path_in_vcs: String,
7871 }
7872
7873 #[derive(Deserialize)]
7874 struct CargoPackage {
7875 repository: String,
7876 }
7877
7878 #[derive(Deserialize)]
7879 struct CargoToml {
7880 package: CargoPackage,
7881 }
7882
7883 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
7884 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
7885 Some((dir, json))
7886 }) else {
7887 bail!("No .cargo_vcs_info.json found in parent directories")
7888 };
7889 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
7890 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
7891 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
7892 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
7893 .context("parsing package.repository field of manifest")?;
7894 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
7895 let permalink = provider.build_permalink(
7896 remote,
7897 BuildPermalinkParams::new(
7898 &cargo_vcs_info.git.sha1,
7899 &RepoPath::from_rel_path(
7900 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
7901 ),
7902 Some(selection),
7903 ),
7904 );
7905 Ok(permalink)
7906}
7907
7908fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
7909 let Some(blame) = blame else {
7910 return proto::BlameBufferResponse {
7911 blame_response: None,
7912 };
7913 };
7914
7915 let entries = blame
7916 .entries
7917 .into_iter()
7918 .map(|entry| proto::BlameEntry {
7919 sha: entry.sha.as_bytes().into(),
7920 start_line: entry.range.start,
7921 end_line: entry.range.end,
7922 original_line_number: entry.original_line_number,
7923 author: entry.author,
7924 author_mail: entry.author_mail,
7925 author_time: entry.author_time,
7926 author_tz: entry.author_tz,
7927 committer: entry.committer_name,
7928 committer_mail: entry.committer_email,
7929 committer_time: entry.committer_time,
7930 committer_tz: entry.committer_tz,
7931 summary: entry.summary,
7932 previous: entry.previous,
7933 filename: entry.filename,
7934 })
7935 .collect::<Vec<_>>();
7936
7937 let messages = blame
7938 .messages
7939 .into_iter()
7940 .map(|(oid, message)| proto::CommitMessage {
7941 oid: oid.as_bytes().into(),
7942 message,
7943 })
7944 .collect::<Vec<_>>();
7945
7946 proto::BlameBufferResponse {
7947 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
7948 }
7949}
7950
7951fn deserialize_blame_buffer_response(
7952 response: proto::BlameBufferResponse,
7953) -> Option<git::blame::Blame> {
7954 let response = response.blame_response?;
7955 let entries = response
7956 .entries
7957 .into_iter()
7958 .filter_map(|entry| {
7959 Some(git::blame::BlameEntry {
7960 sha: git::Oid::from_bytes(&entry.sha).ok()?,
7961 range: entry.start_line..entry.end_line,
7962 original_line_number: entry.original_line_number,
7963 committer_name: entry.committer,
7964 committer_time: entry.committer_time,
7965 committer_tz: entry.committer_tz,
7966 committer_email: entry.committer_mail,
7967 author: entry.author,
7968 author_mail: entry.author_mail,
7969 author_time: entry.author_time,
7970 author_tz: entry.author_tz,
7971 summary: entry.summary,
7972 previous: entry.previous,
7973 filename: entry.filename,
7974 })
7975 })
7976 .collect::<Vec<_>>();
7977
7978 let messages = response
7979 .messages
7980 .into_iter()
7981 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
7982 .collect::<HashMap<_, _>>();
7983
7984 Some(Blame { entries, messages })
7985}
7986
7987fn commit_data_to_proto(commit: &CommitData) -> proto::CommitData {
7988 proto::CommitData {
7989 sha: commit.sha.to_string(),
7990 parents: commit.parents.iter().map(|p| p.to_string()).collect(),
7991 author_name: commit.author_name.to_string(),
7992 author_email: commit.author_email.to_string(),
7993 commit_timestamp: commit.commit_timestamp,
7994 subject: commit.subject.to_string(),
7995 message: commit.message.to_string(),
7996 }
7997}
7998
7999fn commit_data_from_proto(commit: proto::CommitData) -> Result<CommitData> {
8000 let sha = Oid::from_str(&commit.sha)?;
8001 let mut parents = SmallVec::with_capacity(commit.parents.len());
8002 for parent in &commit.parents {
8003 parents.push(Oid::from_str(parent)?);
8004 }
8005 Ok(CommitData {
8006 sha,
8007 parents,
8008 author_name: SharedString::from(commit.author_name),
8009 author_email: SharedString::from(commit.author_email),
8010 commit_timestamp: commit.commit_timestamp,
8011 subject: SharedString::from(commit.subject),
8012 message: SharedString::from(commit.message),
8013 })
8014}
8015
8016fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
8017 proto::Branch {
8018 is_head: branch.is_head,
8019 ref_name: branch.ref_name.to_string(),
8020 unix_timestamp: branch
8021 .most_recent_commit
8022 .as_ref()
8023 .map(|commit| commit.commit_timestamp as u64),
8024 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
8025 ref_name: upstream.ref_name.to_string(),
8026 tracking: upstream
8027 .tracking
8028 .status()
8029 .map(|upstream| proto::UpstreamTracking {
8030 ahead: upstream.ahead as u64,
8031 behind: upstream.behind as u64,
8032 }),
8033 }),
8034 most_recent_commit: branch
8035 .most_recent_commit
8036 .as_ref()
8037 .map(|commit| proto::CommitSummary {
8038 sha: commit.sha.to_string(),
8039 subject: commit.subject.to_string(),
8040 commit_timestamp: commit.commit_timestamp,
8041 author_name: commit.author_name.to_string(),
8042 }),
8043 }
8044}
8045
8046fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
8047 proto::Worktree {
8048 path: worktree.path.to_string_lossy().to_string(),
8049 ref_name: worktree
8050 .ref_name
8051 .as_ref()
8052 .map(|s| s.to_string())
8053 .unwrap_or_default(),
8054 sha: worktree.sha.to_string(),
8055 is_main: worktree.is_main,
8056 is_bare: worktree.is_bare,
8057 }
8058}
8059
8060fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
8061 git::repository::Worktree {
8062 path: PathBuf::from(proto.path.clone()),
8063 ref_name: if proto.ref_name.is_empty() {
8064 None
8065 } else {
8066 Some(SharedString::from(&proto.ref_name))
8067 },
8068 sha: proto.sha.clone().into(),
8069 is_main: proto.is_main,
8070 is_bare: proto.is_bare,
8071 }
8072}
8073
8074fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
8075 git::repository::Branch {
8076 is_head: proto.is_head,
8077 ref_name: proto.ref_name.clone().into(),
8078 upstream: proto
8079 .upstream
8080 .as_ref()
8081 .map(|upstream| git::repository::Upstream {
8082 ref_name: upstream.ref_name.to_string().into(),
8083 tracking: upstream
8084 .tracking
8085 .as_ref()
8086 .map(|tracking| {
8087 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
8088 ahead: tracking.ahead as u32,
8089 behind: tracking.behind as u32,
8090 })
8091 })
8092 .unwrap_or(git::repository::UpstreamTracking::Gone),
8093 }),
8094 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
8095 git::repository::CommitSummary {
8096 sha: commit.sha.to_string().into(),
8097 subject: commit.subject.to_string().into(),
8098 commit_timestamp: commit.commit_timestamp,
8099 author_name: commit.author_name.to_string().into(),
8100 has_parent: true,
8101 }
8102 }),
8103 }
8104}
8105
8106fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
8107 proto::GitCommitDetails {
8108 sha: commit.sha.to_string(),
8109 message: commit.message.to_string(),
8110 commit_timestamp: commit.commit_timestamp,
8111 author_email: commit.author_email.to_string(),
8112 author_name: commit.author_name.to_string(),
8113 }
8114}
8115
8116fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
8117 CommitDetails {
8118 sha: proto.sha.clone().into(),
8119 message: proto.message.clone().into(),
8120 commit_timestamp: proto.commit_timestamp,
8121 author_email: proto.author_email.clone().into(),
8122 author_name: proto.author_name.clone().into(),
8123 }
8124}
8125
8126#[cfg(any(test, feature = "test-support"))]
8127impl Repository {
8128 pub fn loaded_commit_data_for_test(&self) -> HashMap<Oid, CommitData> {
8129 self.commit_data
8130 .iter()
8131 .filter_map(|(sha, state)| match state {
8132 CommitDataState::Loaded(data) => Some((*sha, data.as_ref().clone())),
8133 CommitDataState::Loading(_) => None,
8134 })
8135 .collect()
8136 }
8137}
8138
8139#[cfg(test)]
8140mod tests {
8141 use super::*;
8142 use crate::Project;
8143 use fs::FakeFs;
8144 use gpui::TestAppContext;
8145 use gpui::proptest::prelude::*;
8146 use rand::{SeedableRng, rngs::StdRng};
8147 use serde_json::json;
8148 use settings::SettingsStore;
8149 use std::path::Path;
8150
8151 fn init_test(cx: &mut TestAppContext) {
8152 cx.update(|cx| {
8153 let settings_store = SettingsStore::test(cx);
8154 cx.set_global(settings_store);
8155 });
8156 }
8157
8158 fn verify_invariants(repository: &Repository) -> anyhow::Result<()> {
8159 match &repository.commit_data_handler {
8160 CommitDataHandlerState::Open(handler) => {
8161 verify_loading_entries_are_pending(repository, handler)?;
8162 verify_await_result_loading_entries_have_completion_senders(repository, handler)?;
8163 verify_pending_requests_are_loading(repository, handler)?;
8164 verify_completion_senders_are_await_result_loading(repository, handler)?;
8165 verify_completion_senders_are_pending(handler)?;
8166 verify_non_await_result_loading_entries_have_no_completion_sender(
8167 repository, handler,
8168 )?;
8169 verify_loaded_entries_are_not_pending(repository, handler)?;
8170 verify_loaded_entries_have_no_completion_sender(repository, handler)?;
8171 }
8172 CommitDataHandlerState::Closed => {
8173 verify_closed_handler_invariants(repository)?;
8174 }
8175 }
8176
8177 Ok(())
8178 }
8179
8180 fn verify_loading_entries_are_pending(
8181 repository: &Repository,
8182 handler: &CommitDataHandler,
8183 ) -> anyhow::Result<()> {
8184 for (sha, state) in &repository.commit_data {
8185 if matches!(state, CommitDataState::Loading(_)) {
8186 anyhow::ensure!(
8187 handler.pending_requests.contains(sha),
8188 "loading commit data for {sha} must be tracked in pending_requests"
8189 );
8190 }
8191 }
8192
8193 Ok(())
8194 }
8195
8196 fn verify_await_result_loading_entries_have_completion_senders(
8197 repository: &Repository,
8198 handler: &CommitDataHandler,
8199 ) -> anyhow::Result<()> {
8200 for (sha, state) in &repository.commit_data {
8201 if matches!(state, CommitDataState::Loading(Some(_))) {
8202 anyhow::ensure!(
8203 handler.completion_senders.contains_key(sha),
8204 "await-result loading commit data for {sha} must have a completion sender"
8205 );
8206 }
8207 }
8208
8209 Ok(())
8210 }
8211
8212 fn verify_pending_requests_are_loading(
8213 repository: &Repository,
8214 handler: &CommitDataHandler,
8215 ) -> anyhow::Result<()> {
8216 for sha in &handler.pending_requests {
8217 anyhow::ensure!(
8218 matches!(
8219 repository.commit_data.get(sha),
8220 Some(CommitDataState::Loading(_))
8221 ),
8222 "pending request for {sha} must correspond to loading commit data"
8223 );
8224 }
8225
8226 Ok(())
8227 }
8228
8229 fn verify_completion_senders_are_await_result_loading(
8230 repository: &Repository,
8231 handler: &CommitDataHandler,
8232 ) -> anyhow::Result<()> {
8233 for sha in handler.completion_senders.keys() {
8234 anyhow::ensure!(
8235 matches!(
8236 repository.commit_data.get(sha),
8237 Some(CommitDataState::Loading(Some(_)))
8238 ),
8239 "completion sender for {sha} must correspond to await-result loading commit data"
8240 );
8241 }
8242
8243 Ok(())
8244 }
8245
8246 fn verify_completion_senders_are_pending(handler: &CommitDataHandler) -> anyhow::Result<()> {
8247 for sha in handler.completion_senders.keys() {
8248 anyhow::ensure!(
8249 handler.pending_requests.contains(sha),
8250 "completion sender for {sha} must also be tracked as pending"
8251 );
8252 }
8253
8254 Ok(())
8255 }
8256
8257 fn verify_non_await_result_loading_entries_have_no_completion_sender(
8258 repository: &Repository,
8259 handler: &CommitDataHandler,
8260 ) -> anyhow::Result<()> {
8261 for (sha, state) in &repository.commit_data {
8262 if matches!(state, CommitDataState::Loading(None)) {
8263 anyhow::ensure!(
8264 !handler.completion_senders.contains_key(sha),
8265 "non-await-result loading commit data for {sha} must not have a completion sender"
8266 );
8267 }
8268 }
8269
8270 Ok(())
8271 }
8272
8273 fn verify_loaded_entries_are_not_pending(
8274 repository: &Repository,
8275 handler: &CommitDataHandler,
8276 ) -> anyhow::Result<()> {
8277 for (sha, state) in &repository.commit_data {
8278 if matches!(state, CommitDataState::Loaded(_)) {
8279 anyhow::ensure!(
8280 !handler.pending_requests.contains(sha),
8281 "loaded commit data for {sha} must not still be pending"
8282 );
8283 }
8284 }
8285
8286 Ok(())
8287 }
8288
8289 fn verify_loaded_entries_have_no_completion_sender(
8290 repository: &Repository,
8291 handler: &CommitDataHandler,
8292 ) -> anyhow::Result<()> {
8293 for (sha, state) in &repository.commit_data {
8294 if matches!(state, CommitDataState::Loaded(_)) {
8295 anyhow::ensure!(
8296 !handler.completion_senders.contains_key(sha),
8297 "loaded commit data for {sha} must not keep a completion sender"
8298 );
8299 }
8300 }
8301
8302 Ok(())
8303 }
8304
8305 fn verify_closed_handler_invariants(repository: &Repository) -> anyhow::Result<()> {
8306 for (sha, state) in &repository.commit_data {
8307 anyhow::ensure!(
8308 !matches!(state, CommitDataState::Loading(_)),
8309 "closed handler must not keep loading commit data for {sha}"
8310 );
8311 }
8312
8313 Ok(())
8314 }
8315
8316 #[gpui::property_test(config = ProptestConfig {
8317 cases: 20,
8318 ..Default::default()
8319 })]
8320 async fn test_commit_data_random_invariants(
8321 #[strategy = any::<u64>()] seed: u64,
8322 #[strategy = gpui::proptest::collection::vec(0usize..2000, 1..200)] commit_indexes: Vec<
8323 usize,
8324 >,
8325 #[strategy = gpui::proptest::collection::vec(any::<bool>(), 1..200)] await_results: Vec<
8326 bool,
8327 >,
8328 #[strategy = gpui::proptest::collection::vec(0usize..2000, 0..200)] failing_commit_indexes: Vec<
8329 usize,
8330 >,
8331 #[strategy = gpui::proptest::collection::vec(0usize..2000, 0..200)] missing_commit_indexes: Vec<
8332 usize,
8333 >,
8334 cx: &mut TestAppContext,
8335 ) {
8336 init_test(cx);
8337 let mut rng = StdRng::seed_from_u64(seed);
8338
8339 let commit_shas = (0..2000).map(|_| Oid::random(&mut rng)).collect::<Vec<_>>();
8340 let failing_shas = failing_commit_indexes
8341 .into_iter()
8342 .map(|index| commit_shas[index % commit_shas.len()])
8343 .collect::<HashSet<_>>();
8344 let missing_shas = missing_commit_indexes
8345 .into_iter()
8346 .map(|index| commit_shas[index % commit_shas.len()])
8347 .collect::<HashSet<_>>();
8348 let commit_data = commit_shas
8349 .iter()
8350 .filter(|sha| !missing_shas.contains(sha))
8351 .map(|sha| {
8352 (
8353 CommitData {
8354 sha: *sha,
8355 parents: SmallVec::new(),
8356 author_name: SharedString::from(format!("Author {sha}")),
8357 author_email: SharedString::from(format!("{sha}@example.com")),
8358 commit_timestamp: rng.random_range(0..10_000),
8359 subject: SharedString::from(format!("Subject {sha}")),
8360 message: SharedString::from(format!("Subject {sha}\n\nBody for {sha}")),
8361 },
8362 failing_shas.contains(sha),
8363 )
8364 })
8365 .collect::<Vec<_>>();
8366 let expected_loaded_shas = commit_indexes
8367 .iter()
8368 .map(|index| commit_shas[index % commit_shas.len()])
8369 .filter(|sha| !failing_shas.contains(sha) && !missing_shas.contains(sha))
8370 .collect::<HashSet<_>>();
8371
8372 let fs = FakeFs::new(cx.executor());
8373 fs.insert_tree(
8374 Path::new("/project"),
8375 json!({
8376 ".git": {},
8377 "file.txt": "content",
8378 }),
8379 )
8380 .await;
8381 fs.set_commit_data(Path::new("/project/.git"), commit_data);
8382
8383 let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
8384 project
8385 .update(cx, |project, cx| project.git_scans_complete(cx))
8386 .await;
8387
8388 let repository = project.read_with(cx, |project, cx| {
8389 project
8390 .active_repository(cx)
8391 .expect("should have a repository")
8392 });
8393
8394 cx.update(|cx| {
8395 cx.observe(&repository, |repo, cx| {
8396 verify_invariants(repo.read(cx))
8397 .context("Invariant weren't held after a cx.notify")
8398 .unwrap();
8399 })
8400 })
8401 .detach();
8402
8403 let mut next_step = 0;
8404 while next_step < commit_indexes.len() {
8405 let remaining_steps = commit_indexes.len() - next_step;
8406 let chunk_size = rng.random_range(1..=remaining_steps.min(16));
8407 let chunk_end = next_step + chunk_size;
8408
8409 for step in next_step..chunk_end {
8410 let sha = commit_shas[commit_indexes[step] % commit_shas.len()];
8411 let await_result = await_results[step % await_results.len()];
8412
8413 repository.update(cx, |repository, cx| {
8414 repository.fetch_commit_data(sha, await_result, cx);
8415 verify_invariants(repository)
8416 .with_context(|| {
8417 format!(
8418 "commit data invariant violation after step {} for sha {}",
8419 step + 1,
8420 sha,
8421 )
8422 })
8423 .unwrap();
8424 });
8425 }
8426
8427 cx.run_until_parked();
8428 repository.read_with(cx, |repository, _cx| {
8429 verify_invariants(repository)
8430 .with_context(|| {
8431 format!(
8432 "commit data invariant violation after draining through step {}",
8433 chunk_end,
8434 )
8435 })
8436 .unwrap();
8437 });
8438
8439 next_step = chunk_end;
8440 }
8441
8442 cx.run_until_parked();
8443 repository.read_with(cx, |repository, _cx| {
8444 verify_invariants(repository)
8445 .with_context(|| "commit data invariant violation after final drain".to_string())
8446 .unwrap();
8447
8448 let loaded_shas = repository
8449 .commit_data
8450 .iter()
8451 .filter_map(|(sha, state)| match state {
8452 CommitDataState::Loaded(_) => Some(*sha),
8453 CommitDataState::Loading(_) => None,
8454 })
8455 .collect::<HashSet<_>>();
8456 let missing_loaded_shas = expected_loaded_shas
8457 .difference(&loaded_shas)
8458 .copied()
8459 .collect::<Vec<_>>();
8460 let unexpected_loaded_shas = loaded_shas
8461 .difference(&expected_loaded_shas)
8462 .copied()
8463 .collect::<Vec<_>>();
8464 assert!(
8465 missing_loaded_shas.is_empty() && unexpected_loaded_shas.is_empty(),
8466 "loaded commit data SHAs after final drain did not match expectation. missing: {:?}, unexpected: {:?}",
8467 missing_loaded_shas,
8468 unexpected_loaded_shas,
8469 );
8470 });
8471 }
8472}
8473
8474/// This snapshot computes the repository state on the foreground thread while
8475/// running the git commands on the background thread. We update branch, head,
8476/// remotes, and worktrees first so the UI can react sooner, then compute file
8477/// state and emit those events immediately after.
8478async fn compute_snapshot(
8479 this: Entity<Repository>,
8480 backend: Arc<dyn GitRepository>,
8481 cx: &mut AsyncApp,
8482) -> Result<RepositorySnapshot> {
8483 let (id, work_directory_abs_path, prev_snapshot) = this.update(cx, |this, _| {
8484 this.paths_needing_status_update.clear();
8485 (
8486 this.id,
8487 this.work_directory_abs_path.clone(),
8488 this.snapshot.clone(),
8489 )
8490 });
8491
8492 let head_commit_future = {
8493 let backend = backend.clone();
8494 async move {
8495 Ok(match backend.head_sha().await {
8496 Some(head_sha) => backend.show(head_sha).await.log_err(),
8497 None => None,
8498 })
8499 }
8500 };
8501 let (branches, head_commit, all_worktrees) = cx
8502 .background_spawn({
8503 let backend = backend.clone();
8504 async move {
8505 futures::future::try_join3(
8506 backend.branches(),
8507 head_commit_future,
8508 backend.worktrees(),
8509 )
8510 .await
8511 }
8512 })
8513 .await?;
8514 let branch = branches.iter().find(|branch| branch.is_head).cloned();
8515 let branch_list: Arc<[Branch]> = branches.into();
8516
8517 let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
8518 .into_iter()
8519 .filter(|wt| wt.path != *work_directory_abs_path)
8520 .collect();
8521
8522 let (remote_origin_url, remote_upstream_url) = cx
8523 .background_spawn({
8524 let backend = backend.clone();
8525 async move {
8526 Ok::<_, anyhow::Error>(
8527 futures::future::join(
8528 backend.remote_url("origin"),
8529 backend.remote_url("upstream"),
8530 )
8531 .await,
8532 )
8533 }
8534 })
8535 .await?;
8536
8537 let snapshot = this.update(cx, |this, cx| {
8538 let head_changed =
8539 branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
8540 let branch_list_changed = *branch_list != *this.snapshot.branch_list;
8541 let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
8542
8543 this.snapshot = RepositorySnapshot {
8544 id,
8545 work_directory_abs_path,
8546 branch,
8547 branch_list: branch_list.clone(),
8548 head_commit,
8549 remote_origin_url,
8550 remote_upstream_url,
8551 linked_worktrees,
8552 scan_id: prev_snapshot.scan_id + 1,
8553 ..prev_snapshot
8554 };
8555
8556 if head_changed {
8557 cx.emit(RepositoryEvent::HeadChanged);
8558 }
8559
8560 if branch_list_changed {
8561 cx.emit(RepositoryEvent::BranchListChanged);
8562 }
8563
8564 if worktrees_changed {
8565 cx.emit(RepositoryEvent::GitWorktreeListChanged);
8566 }
8567
8568 this.snapshot.clone()
8569 });
8570
8571 let (statuses, diff_stats, stash_entries) = cx
8572 .background_spawn({
8573 let backend = backend.clone();
8574 let snapshot = snapshot.clone();
8575 async move {
8576 let diff_stat_future: BoxFuture<'_, Result<status::GitDiffStat>> =
8577 if snapshot.head_commit.is_some() {
8578 backend.diff_stat(&[])
8579 } else {
8580 future::ready(Ok(status::GitDiffStat {
8581 entries: Arc::default(),
8582 }))
8583 .boxed()
8584 };
8585 futures::future::try_join3(
8586 backend.status(&[RepoPath::from_rel_path(
8587 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
8588 )]),
8589 diff_stat_future,
8590 backend.stash_entries(),
8591 )
8592 .await
8593 }
8594 })
8595 .await?;
8596
8597 let diff_stat_map: HashMap<&RepoPath, DiffStat> =
8598 diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect();
8599 let mut conflicted_paths = Vec::new();
8600 let statuses_by_path = SumTree::from_iter(
8601 statuses.entries.iter().map(|(repo_path, status)| {
8602 if status.is_conflicted() {
8603 conflicted_paths.push(repo_path.clone());
8604 }
8605 StatusEntry {
8606 repo_path: repo_path.clone(),
8607 status: *status,
8608 diff_stat: diff_stat_map.get(repo_path).copied(),
8609 }
8610 }),
8611 (),
8612 );
8613
8614 let merge_details = cx
8615 .background_spawn({
8616 let backend = backend.clone();
8617 let mut merge_details = snapshot.merge.clone();
8618 async move {
8619 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
8620 Ok::<_, anyhow::Error>((merge_details, conflicts_changed))
8621 }
8622 })
8623 .await?;
8624 let (merge_details, conflicts_changed) = merge_details;
8625 log::debug!("new merge details: {merge_details:?}");
8626
8627 Ok(this.update(cx, |this, cx| {
8628 if conflicts_changed || statuses_by_path != this.snapshot.statuses_by_path {
8629 cx.emit(RepositoryEvent::StatusesChanged);
8630 }
8631 if stash_entries != this.snapshot.stash_entries {
8632 cx.emit(RepositoryEvent::StashEntriesChanged);
8633 }
8634
8635 this.snapshot.scan_id += 1;
8636 this.snapshot.merge = merge_details;
8637 this.snapshot.statuses_by_path = statuses_by_path;
8638 this.snapshot.stash_entries = stash_entries;
8639
8640 this.snapshot.clone()
8641 }))
8642}
8643
8644fn status_from_proto(
8645 simple_status: i32,
8646 status: Option<proto::GitFileStatus>,
8647) -> anyhow::Result<FileStatus> {
8648 use proto::git_file_status::Variant;
8649
8650 let Some(variant) = status.and_then(|status| status.variant) else {
8651 let code = proto::GitStatus::from_i32(simple_status)
8652 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
8653 let result = match code {
8654 proto::GitStatus::Added => TrackedStatus {
8655 worktree_status: StatusCode::Added,
8656 index_status: StatusCode::Unmodified,
8657 }
8658 .into(),
8659 proto::GitStatus::Modified => TrackedStatus {
8660 worktree_status: StatusCode::Modified,
8661 index_status: StatusCode::Unmodified,
8662 }
8663 .into(),
8664 proto::GitStatus::Conflict => UnmergedStatus {
8665 first_head: UnmergedStatusCode::Updated,
8666 second_head: UnmergedStatusCode::Updated,
8667 }
8668 .into(),
8669 proto::GitStatus::Deleted => TrackedStatus {
8670 worktree_status: StatusCode::Deleted,
8671 index_status: StatusCode::Unmodified,
8672 }
8673 .into(),
8674 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
8675 };
8676 return Ok(result);
8677 };
8678
8679 let result = match variant {
8680 Variant::Untracked(_) => FileStatus::Untracked,
8681 Variant::Ignored(_) => FileStatus::Ignored,
8682 Variant::Unmerged(unmerged) => {
8683 let [first_head, second_head] =
8684 [unmerged.first_head, unmerged.second_head].map(|head| {
8685 let code = proto::GitStatus::from_i32(head)
8686 .with_context(|| format!("Invalid git status code: {head}"))?;
8687 let result = match code {
8688 proto::GitStatus::Added => UnmergedStatusCode::Added,
8689 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
8690 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
8691 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
8692 };
8693 Ok(result)
8694 });
8695 let [first_head, second_head] = [first_head?, second_head?];
8696 UnmergedStatus {
8697 first_head,
8698 second_head,
8699 }
8700 .into()
8701 }
8702 Variant::Tracked(tracked) => {
8703 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
8704 .map(|status| {
8705 let code = proto::GitStatus::from_i32(status)
8706 .with_context(|| format!("Invalid git status code: {status}"))?;
8707 let result = match code {
8708 proto::GitStatus::Modified => StatusCode::Modified,
8709 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
8710 proto::GitStatus::Added => StatusCode::Added,
8711 proto::GitStatus::Deleted => StatusCode::Deleted,
8712 proto::GitStatus::Renamed => StatusCode::Renamed,
8713 proto::GitStatus::Copied => StatusCode::Copied,
8714 proto::GitStatus::Unmodified => StatusCode::Unmodified,
8715 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
8716 };
8717 Ok(result)
8718 });
8719 let [index_status, worktree_status] = [index_status?, worktree_status?];
8720 TrackedStatus {
8721 index_status,
8722 worktree_status,
8723 }
8724 .into()
8725 }
8726 };
8727 Ok(result)
8728}
8729
8730fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
8731 use proto::git_file_status::{Tracked, Unmerged, Variant};
8732
8733 let variant = match status {
8734 FileStatus::Untracked => Variant::Untracked(Default::default()),
8735 FileStatus::Ignored => Variant::Ignored(Default::default()),
8736 FileStatus::Unmerged(UnmergedStatus {
8737 first_head,
8738 second_head,
8739 }) => Variant::Unmerged(Unmerged {
8740 first_head: unmerged_status_to_proto(first_head),
8741 second_head: unmerged_status_to_proto(second_head),
8742 }),
8743 FileStatus::Tracked(TrackedStatus {
8744 index_status,
8745 worktree_status,
8746 }) => Variant::Tracked(Tracked {
8747 index_status: tracked_status_to_proto(index_status),
8748 worktree_status: tracked_status_to_proto(worktree_status),
8749 }),
8750 };
8751 proto::GitFileStatus {
8752 variant: Some(variant),
8753 }
8754}
8755
8756fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
8757 match code {
8758 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
8759 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
8760 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
8761 }
8762}
8763
8764fn tracked_status_to_proto(code: StatusCode) -> i32 {
8765 match code {
8766 StatusCode::Added => proto::GitStatus::Added as _,
8767 StatusCode::Deleted => proto::GitStatus::Deleted as _,
8768 StatusCode::Modified => proto::GitStatus::Modified as _,
8769 StatusCode::Renamed => proto::GitStatus::Renamed as _,
8770 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
8771 StatusCode::Copied => proto::GitStatus::Copied as _,
8772 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
8773 }
8774}