1use anyhow::Result;
2use client::{Client, EditPredictionUsage, UserStore};
3use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody};
4use cloud_llm_client::predict_edits_v3::{
5 PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse,
6};
7use cloud_llm_client::{
8 EditPredictionRejectReason, EditPredictionRejection,
9 MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST, MINIMUM_REQUIRED_VERSION_HEADER_NAME,
10 PredictEditsRequestTrigger, RejectEditPredictionsBodyRef, ZED_VERSION_HEADER_NAME,
11};
12use collections::{HashMap, HashSet};
13use copilot::{Copilot, Reinstall, SignIn, SignOut};
14use db::kvp::{Dismissable, KeyValueStore};
15use edit_prediction_context::{RelatedExcerptStore, RelatedExcerptStoreEvent, RelatedFile};
16use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
17use futures::{
18 AsyncReadExt as _, FutureExt as _, StreamExt as _,
19 channel::mpsc::{self, UnboundedReceiver},
20 select_biased,
21};
22use gpui::BackgroundExecutor;
23use gpui::http_client::Url;
24use gpui::{
25 App, AsyncApp, Entity, EntityId, Global, SharedString, Task, WeakEntity, actions,
26 http_client::{self, AsyncBody, Method},
27 prelude::*,
28};
29use heapless::Vec as ArrayVec;
30use language::language_settings::all_language_settings;
31use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToOffset, ToPoint};
32use language::{BufferSnapshot, OffsetRangeExt};
33use language_model::{LlmApiToken, NeedsLlmTokenRefresh};
34use project::{DisableAiSettings, Project, ProjectPath, WorktreeId};
35use release_channel::AppVersion;
36use semver::Version;
37use serde::de::DeserializeOwned;
38use settings::{
39 EditPredictionPromptFormat, EditPredictionProvider, Settings as _, update_settings_file,
40};
41use std::collections::{VecDeque, hash_map};
42use std::env;
43use text::{AnchorRangeExt, Edit};
44use workspace::{AppState, Workspace};
45use zeta_prompt::{ZetaFormat, ZetaPromptInput};
46
47use std::mem;
48use std::ops::Range;
49use std::path::Path;
50use std::rc::Rc;
51use std::str::FromStr as _;
52use std::sync::Arc;
53use std::time::{Duration, Instant};
54
55use thiserror::Error;
56use util::{RangeExt as _, ResultExt as _};
57
58pub mod cursor_excerpt;
59pub mod example_spec;
60pub mod fim;
61mod license_detection;
62pub mod mercury;
63pub mod ollama;
64mod onboarding_modal;
65pub mod open_ai_response;
66mod prediction;
67
68pub mod udiff;
69
70mod capture_example;
71pub mod open_ai_compatible;
72mod zed_edit_prediction_delegate;
73pub mod zeta;
74
75#[cfg(test)]
76mod edit_prediction_tests;
77
78use crate::cursor_excerpt::expand_context_syntactically_then_linewise;
79use crate::example_spec::ExampleSpec;
80use crate::license_detection::LicenseDetectionWatcher;
81use crate::mercury::Mercury;
82use crate::onboarding_modal::ZedPredictModal;
83pub use crate::prediction::EditPrediction;
84pub use crate::prediction::EditPredictionId;
85use crate::prediction::EditPredictionResult;
86pub use capture_example::capture_example;
87pub use language_model::ApiKeyState;
88pub use telemetry_events::EditPredictionRating;
89pub use zed_edit_prediction_delegate::ZedEditPredictionDelegate;
90
91actions!(
92 edit_prediction,
93 [
94 /// Resets the edit prediction onboarding state.
95 ResetOnboarding,
96 /// Clears the edit prediction history.
97 ClearHistory,
98 ]
99);
100
101/// Maximum number of events to track.
102const EVENT_COUNT_MAX: usize = 10;
103const CHANGE_GROUPING_LINE_SPAN: u32 = 8;
104const EDIT_HISTORY_DIFF_SIZE_LIMIT: usize = 2048 * 3; // ~2048 tokens or ~50% of typical prompt budget
105const COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS: usize = 512;
106const LAST_CHANGE_GROUPING_TIME: Duration = Duration::from_secs(1);
107const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice";
108const REJECT_REQUEST_DEBOUNCE: Duration = Duration::from_secs(15);
109const EDIT_PREDICTION_SETTLED_EVENT: &str = "Edit Prediction Settled";
110const EDIT_PREDICTION_SETTLED_TTL: Duration = Duration::from_secs(60 * 5);
111const EDIT_PREDICTION_SETTLED_QUIESCENCE: Duration = Duration::from_secs(10);
112
113pub struct EditPredictionJumpsFeatureFlag;
114
115impl FeatureFlag for EditPredictionJumpsFeatureFlag {
116 const NAME: &'static str = "edit_prediction_jumps";
117}
118
119#[derive(Clone)]
120struct EditPredictionStoreGlobal(Entity<EditPredictionStore>);
121
122impl Global for EditPredictionStoreGlobal {}
123
124/// Configuration for using the raw Zeta2 endpoint.
125/// When set, the client uses the raw endpoint and constructs the prompt itself.
126/// The version is also used as the Baseten environment name (lowercased).
127#[derive(Clone)]
128pub struct Zeta2RawConfig {
129 pub model_id: Option<String>,
130 pub environment: Option<String>,
131 pub format: ZetaFormat,
132}
133
134pub struct EditPredictionStore {
135 client: Arc<Client>,
136 user_store: Entity<UserStore>,
137 llm_token: LlmApiToken,
138 _fetch_experiments_task: Task<()>,
139 projects: HashMap<EntityId, ProjectState>,
140 update_required: bool,
141 edit_prediction_model: EditPredictionModel,
142 zeta2_raw_config: Option<Zeta2RawConfig>,
143 preferred_experiment: Option<String>,
144 available_experiments: Vec<String>,
145 pub mercury: Mercury,
146 data_collection_choice: DataCollectionChoice,
147 reject_predictions_tx: mpsc::UnboundedSender<EditPredictionRejectionPayload>,
148 settled_predictions_tx: mpsc::UnboundedSender<Instant>,
149 shown_predictions: VecDeque<EditPrediction>,
150 rated_predictions: HashSet<EditPredictionId>,
151 #[cfg(test)]
152 settled_event_callback: Option<Box<dyn Fn(EditPredictionId, String)>>,
153}
154
155pub(crate) struct EditPredictionRejectionPayload {
156 rejection: EditPredictionRejection,
157 organization_id: Option<OrganizationId>,
158}
159
160#[derive(Copy, Clone, PartialEq, Eq)]
161pub enum EditPredictionModel {
162 Zeta,
163 Fim { format: EditPredictionPromptFormat },
164 Mercury,
165}
166
167#[derive(Clone)]
168pub struct EditPredictionModelInput {
169 project: Entity<Project>,
170 buffer: Entity<Buffer>,
171 snapshot: BufferSnapshot,
172 position: Anchor,
173 events: Vec<Arc<zeta_prompt::Event>>,
174 related_files: Vec<RelatedFile>,
175 trigger: PredictEditsRequestTrigger,
176 diagnostic_search_range: Range<Point>,
177 debug_tx: Option<mpsc::UnboundedSender<DebugEvent>>,
178 can_collect_data: bool,
179 is_open_source: bool,
180}
181
182#[derive(Debug)]
183pub enum DebugEvent {
184 ContextRetrievalStarted(ContextRetrievalStartedDebugEvent),
185 ContextRetrievalFinished(ContextRetrievalFinishedDebugEvent),
186 EditPredictionStarted(EditPredictionStartedDebugEvent),
187 EditPredictionFinished(EditPredictionFinishedDebugEvent),
188}
189
190#[derive(Debug)]
191pub struct ContextRetrievalStartedDebugEvent {
192 pub project_entity_id: EntityId,
193 pub timestamp: Instant,
194 pub search_prompt: String,
195}
196
197#[derive(Debug)]
198pub struct ContextRetrievalFinishedDebugEvent {
199 pub project_entity_id: EntityId,
200 pub timestamp: Instant,
201 pub metadata: Vec<(&'static str, SharedString)>,
202}
203
204#[derive(Debug)]
205pub struct EditPredictionStartedDebugEvent {
206 pub buffer: WeakEntity<Buffer>,
207 pub position: Anchor,
208 pub prompt: Option<String>,
209}
210
211#[derive(Debug)]
212pub struct EditPredictionFinishedDebugEvent {
213 pub buffer: WeakEntity<Buffer>,
214 pub position: Anchor,
215 pub model_output: Option<String>,
216}
217
218/// An event with associated metadata for reconstructing buffer state.
219#[derive(Clone)]
220pub struct StoredEvent {
221 pub event: Arc<zeta_prompt::Event>,
222 pub old_snapshot: TextBufferSnapshot,
223 pub new_snapshot_version: clock::Global,
224 pub total_edit_range: Range<Anchor>,
225}
226
227impl StoredEvent {
228 fn can_merge(
229 &self,
230 next_old_event: &StoredEvent,
231 latest_snapshot: &TextBufferSnapshot,
232 latest_edit_range: &Range<Anchor>,
233 ) -> bool {
234 // Events must be for the same buffer and be contiguous across included snapshots to be mergeable.
235 if self.old_snapshot.remote_id() != next_old_event.old_snapshot.remote_id() {
236 return false;
237 }
238 if self.old_snapshot.remote_id() != latest_snapshot.remote_id() {
239 return false;
240 }
241 if self.new_snapshot_version != next_old_event.old_snapshot.version {
242 return false;
243 }
244 if !latest_snapshot
245 .version
246 .observed_all(&next_old_event.new_snapshot_version)
247 {
248 return false;
249 }
250
251 let a_is_predicted = matches!(
252 self.event.as_ref(),
253 zeta_prompt::Event::BufferChange {
254 predicted: true,
255 ..
256 }
257 );
258 let b_is_predicted = matches!(
259 next_old_event.event.as_ref(),
260 zeta_prompt::Event::BufferChange {
261 predicted: true,
262 ..
263 }
264 );
265
266 // If events come from the same source (both predicted or both manual) then
267 // we would have coalesced them already.
268 if a_is_predicted == b_is_predicted {
269 return false;
270 }
271
272 let left_range = self.total_edit_range.to_point(latest_snapshot);
273 let right_range = next_old_event.total_edit_range.to_point(latest_snapshot);
274 let latest_range = latest_edit_range.to_point(latest_snapshot);
275
276 // Events near to the latest edit are not merged if their sources differ.
277 if lines_between_ranges(&left_range, &latest_range)
278 .min(lines_between_ranges(&right_range, &latest_range))
279 <= CHANGE_GROUPING_LINE_SPAN
280 {
281 return false;
282 }
283
284 // Events that are distant from each other are not merged.
285 if lines_between_ranges(&left_range, &right_range) > CHANGE_GROUPING_LINE_SPAN {
286 return false;
287 }
288
289 true
290 }
291}
292
293fn lines_between_ranges(left: &Range<Point>, right: &Range<Point>) -> u32 {
294 if left.start > right.end {
295 return left.start.row - right.end.row;
296 }
297 if right.start > left.end {
298 return right.start.row - left.end.row;
299 }
300 0
301}
302
303struct ProjectState {
304 events: VecDeque<StoredEvent>,
305 last_event: Option<LastEvent>,
306 recent_paths: VecDeque<ProjectPath>,
307 registered_buffers: HashMap<gpui::EntityId, RegisteredBuffer>,
308 current_prediction: Option<CurrentEditPrediction>,
309 next_pending_prediction_id: usize,
310 pending_predictions: ArrayVec<PendingPrediction, 2, u8>,
311 debug_tx: Option<mpsc::UnboundedSender<DebugEvent>>,
312 last_edit_prediction_refresh: Option<(EntityId, Instant)>,
313 last_jump_prediction_refresh: Option<(EntityId, Instant)>,
314 cancelled_predictions: HashSet<usize>,
315 context: Entity<RelatedExcerptStore>,
316 license_detection_watchers: HashMap<WorktreeId, Rc<LicenseDetectionWatcher>>,
317 _subscriptions: [gpui::Subscription; 2],
318 copilot: Option<Entity<Copilot>>,
319}
320
321impl ProjectState {
322 pub fn events(&self, cx: &App) -> Vec<StoredEvent> {
323 self.events
324 .iter()
325 .cloned()
326 .chain(self.last_event.as_ref().iter().flat_map(|event| {
327 let (one, two) = event.split_by_pause();
328 let one = one.finalize(&self.license_detection_watchers, cx);
329 let two = two.and_then(|two| two.finalize(&self.license_detection_watchers, cx));
330 one.into_iter().chain(two)
331 }))
332 .collect()
333 }
334
335 fn cancel_pending_prediction(
336 &mut self,
337 pending_prediction: PendingPrediction,
338 cx: &mut Context<EditPredictionStore>,
339 ) {
340 self.cancelled_predictions.insert(pending_prediction.id);
341
342 if pending_prediction.drop_on_cancel {
343 drop(pending_prediction.task);
344 } else {
345 cx.spawn(async move |this, cx| {
346 let Some(prediction_id) = pending_prediction.task.await else {
347 return;
348 };
349
350 this.update(cx, |this, cx| {
351 this.reject_prediction(
352 prediction_id,
353 EditPredictionRejectReason::Canceled,
354 false,
355 None,
356 None,
357 cx,
358 );
359 })
360 .ok();
361 })
362 .detach()
363 }
364 }
365
366 fn active_buffer(
367 &self,
368 project: &Entity<Project>,
369 cx: &App,
370 ) -> Option<(Entity<Buffer>, Option<Anchor>)> {
371 let project = project.read(cx);
372 let active_path = project.path_for_entry(project.active_entry()?, cx)?;
373 let active_buffer = project.buffer_store().read(cx).get_by_path(&active_path)?;
374 let registered_buffer = self.registered_buffers.get(&active_buffer.entity_id())?;
375 Some((active_buffer, registered_buffer.last_position))
376 }
377}
378
379#[derive(Debug, Clone)]
380struct CurrentEditPrediction {
381 pub requested_by: PredictionRequestedBy,
382 pub prediction: EditPrediction,
383 pub was_shown: bool,
384 pub shown_with: Option<edit_prediction_types::SuggestionDisplayType>,
385 pub e2e_latency: std::time::Duration,
386}
387
388impl CurrentEditPrediction {
389 fn should_replace_prediction(&self, old_prediction: &Self, cx: &App) -> bool {
390 let Some(new_edits) = self
391 .prediction
392 .interpolate(&self.prediction.buffer.read(cx))
393 else {
394 return false;
395 };
396
397 if self.prediction.buffer != old_prediction.prediction.buffer {
398 return true;
399 }
400
401 let Some(old_edits) = old_prediction
402 .prediction
403 .interpolate(&old_prediction.prediction.buffer.read(cx))
404 else {
405 return true;
406 };
407
408 let requested_by_buffer_id = self.requested_by.buffer_id();
409
410 // This reduces the occurrence of UI thrash from replacing edits
411 //
412 // TODO: This is fairly arbitrary - should have a more general heuristic that handles multiple edits.
413 if requested_by_buffer_id == Some(self.prediction.buffer.entity_id())
414 && requested_by_buffer_id == Some(old_prediction.prediction.buffer.entity_id())
415 && old_edits.len() == 1
416 && new_edits.len() == 1
417 {
418 let (old_range, old_text) = &old_edits[0];
419 let (new_range, new_text) = &new_edits[0];
420 new_range == old_range && new_text.starts_with(old_text.as_ref())
421 } else {
422 true
423 }
424 }
425}
426
427#[derive(Debug, Clone)]
428enum PredictionRequestedBy {
429 DiagnosticsUpdate,
430 Buffer(EntityId),
431}
432
433impl PredictionRequestedBy {
434 pub fn buffer_id(&self) -> Option<EntityId> {
435 match self {
436 PredictionRequestedBy::DiagnosticsUpdate => None,
437 PredictionRequestedBy::Buffer(buffer_id) => Some(*buffer_id),
438 }
439 }
440}
441
442const DIAGNOSTIC_LINES_RANGE: u32 = 20;
443
444#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
445pub enum DiagnosticSearchScope {
446 Local,
447 Global,
448}
449
450#[derive(Debug)]
451struct PendingPrediction {
452 id: usize,
453 task: Task<Option<EditPredictionId>>,
454 /// If true, the task is dropped immediately on cancel (cancelling the HTTP request).
455 /// If false, the task is awaited to completion so rejection can be reported.
456 drop_on_cancel: bool,
457}
458
459/// A prediction from the perspective of a buffer.
460#[derive(Debug)]
461enum BufferEditPrediction<'a> {
462 Local { prediction: &'a EditPrediction },
463 Jump { prediction: &'a EditPrediction },
464}
465
466#[cfg(test)]
467impl std::ops::Deref for BufferEditPrediction<'_> {
468 type Target = EditPrediction;
469
470 fn deref(&self) -> &Self::Target {
471 match self {
472 BufferEditPrediction::Local { prediction } => prediction,
473 BufferEditPrediction::Jump { prediction } => prediction,
474 }
475 }
476}
477
478#[derive(Clone)]
479
480struct PendingSettledPrediction {
481 request_id: EditPredictionId,
482 editable_anchor_range: Range<Anchor>,
483 example: Option<ExampleSpec>,
484 enqueued_at: Instant,
485 last_edit_at: Instant,
486 e2e_latency: std::time::Duration,
487}
488
489struct RegisteredBuffer {
490 file: Option<Arc<dyn File>>,
491 snapshot: TextBufferSnapshot,
492 pending_predictions: Vec<PendingSettledPrediction>,
493 last_position: Option<Anchor>,
494 _subscriptions: [gpui::Subscription; 2],
495}
496
497#[derive(Clone)]
498struct LastEvent {
499 old_snapshot: TextBufferSnapshot,
500 new_snapshot: TextBufferSnapshot,
501 old_file: Option<Arc<dyn File>>,
502 new_file: Option<Arc<dyn File>>,
503 latest_edit_range: Range<Anchor>,
504 total_edit_range: Range<Anchor>,
505 total_edit_range_at_last_pause_boundary: Option<Range<Anchor>>,
506 predicted: bool,
507 snapshot_after_last_editing_pause: Option<TextBufferSnapshot>,
508 last_edit_time: Option<Instant>,
509}
510
511impl LastEvent {
512 pub fn finalize(
513 &self,
514 license_detection_watchers: &HashMap<WorktreeId, Rc<LicenseDetectionWatcher>>,
515 cx: &App,
516 ) -> Option<StoredEvent> {
517 let path = buffer_path_with_id_fallback(self.new_file.as_ref(), &self.new_snapshot, cx);
518 let old_path = buffer_path_with_id_fallback(self.old_file.as_ref(), &self.old_snapshot, cx);
519
520 let in_open_source_repo =
521 [self.new_file.as_ref(), self.old_file.as_ref()]
522 .iter()
523 .all(|file| {
524 file.is_some_and(|file| {
525 license_detection_watchers
526 .get(&file.worktree_id(cx))
527 .is_some_and(|watcher| watcher.is_project_open_source())
528 })
529 });
530
531 let (diff, edit_range) = compute_diff_between_snapshots_in_range(
532 &self.old_snapshot,
533 &self.new_snapshot,
534 &self.total_edit_range,
535 )?;
536
537 if path == old_path && diff.is_empty() {
538 None
539 } else {
540 Some(StoredEvent {
541 event: Arc::new(zeta_prompt::Event::BufferChange {
542 old_path,
543 path,
544 diff,
545 in_open_source_repo,
546 predicted: self.predicted,
547 }),
548 old_snapshot: self.old_snapshot.clone(),
549 new_snapshot_version: self.new_snapshot.version.clone(),
550 total_edit_range: self.new_snapshot.anchor_before(edit_range.start)
551 ..self.new_snapshot.anchor_before(edit_range.end),
552 })
553 }
554 }
555
556 pub fn split_by_pause(&self) -> (LastEvent, Option<LastEvent>) {
557 let Some(boundary_snapshot) = self.snapshot_after_last_editing_pause.as_ref() else {
558 return (self.clone(), None);
559 };
560
561 let total_edit_range_before_pause = self
562 .total_edit_range_at_last_pause_boundary
563 .clone()
564 .unwrap_or_else(|| self.total_edit_range.clone());
565
566 let Some(total_edit_range_after_pause) =
567 compute_total_edit_range_between_snapshots(boundary_snapshot, &self.new_snapshot)
568 else {
569 return (self.clone(), None);
570 };
571
572 let latest_edit_range_before_pause = total_edit_range_before_pause.clone();
573 let latest_edit_range_after_pause = total_edit_range_after_pause.clone();
574
575 let before = LastEvent {
576 old_snapshot: self.old_snapshot.clone(),
577 new_snapshot: boundary_snapshot.clone(),
578 old_file: self.old_file.clone(),
579 new_file: self.new_file.clone(),
580 latest_edit_range: latest_edit_range_before_pause,
581 total_edit_range: total_edit_range_before_pause,
582 total_edit_range_at_last_pause_boundary: None,
583 predicted: self.predicted,
584 snapshot_after_last_editing_pause: None,
585 last_edit_time: self.last_edit_time,
586 };
587
588 let after = LastEvent {
589 old_snapshot: boundary_snapshot.clone(),
590 new_snapshot: self.new_snapshot.clone(),
591 old_file: self.old_file.clone(),
592 new_file: self.new_file.clone(),
593 latest_edit_range: latest_edit_range_after_pause,
594 total_edit_range: total_edit_range_after_pause,
595 total_edit_range_at_last_pause_boundary: None,
596 predicted: self.predicted,
597 snapshot_after_last_editing_pause: None,
598 last_edit_time: self.last_edit_time,
599 };
600
601 (before, Some(after))
602 }
603}
604
605fn compute_total_edit_range_between_snapshots(
606 old_snapshot: &TextBufferSnapshot,
607 new_snapshot: &TextBufferSnapshot,
608) -> Option<Range<Anchor>> {
609 let edits: Vec<Edit<usize>> = new_snapshot
610 .edits_since::<usize>(&old_snapshot.version)
611 .collect();
612
613 let (first_edit, last_edit) = edits.first().zip(edits.last())?;
614 let new_start_point = new_snapshot.offset_to_point(first_edit.new.start);
615 let new_end_point = new_snapshot.offset_to_point(last_edit.new.end);
616
617 Some(new_snapshot.anchor_before(new_start_point)..new_snapshot.anchor_before(new_end_point))
618}
619
620fn compute_old_range_for_new_range(
621 old_snapshot: &TextBufferSnapshot,
622 new_snapshot: &TextBufferSnapshot,
623 total_edit_range: &Range<Anchor>,
624) -> Option<Range<Point>> {
625 let new_start_offset = total_edit_range.start.to_offset(new_snapshot);
626 let new_end_offset = total_edit_range.end.to_offset(new_snapshot);
627
628 let edits: Vec<Edit<usize>> = new_snapshot
629 .edits_since::<usize>(&old_snapshot.version)
630 .collect();
631 let mut old_start_offset = None;
632 let mut old_end_offset = None;
633 let mut delta: isize = 0;
634
635 for edit in &edits {
636 if old_start_offset.is_none() && new_start_offset <= edit.new.end {
637 old_start_offset = Some(if new_start_offset < edit.new.start {
638 new_start_offset.checked_add_signed(-delta)?
639 } else {
640 edit.old.start
641 });
642 }
643
644 if old_end_offset.is_none() && new_end_offset <= edit.new.end {
645 old_end_offset = Some(if new_end_offset < edit.new.start {
646 new_end_offset.checked_add_signed(-delta)?
647 } else {
648 edit.old.end
649 });
650 }
651
652 delta += edit.new.len() as isize - edit.old.len() as isize;
653 }
654
655 let old_start_offset =
656 old_start_offset.unwrap_or_else(|| new_start_offset.saturating_add_signed(-delta));
657 let old_end_offset =
658 old_end_offset.unwrap_or_else(|| new_end_offset.saturating_add_signed(-delta));
659
660 Some(
661 old_snapshot.offset_to_point(old_start_offset)
662 ..old_snapshot.offset_to_point(old_end_offset),
663 )
664}
665
666fn compute_diff_between_snapshots_in_range(
667 old_snapshot: &TextBufferSnapshot,
668 new_snapshot: &TextBufferSnapshot,
669 total_edit_range: &Range<Anchor>,
670) -> Option<(String, Range<Point>)> {
671 let new_start_point = total_edit_range.start.to_point(new_snapshot);
672 let new_end_point = total_edit_range.end.to_point(new_snapshot);
673 let old_range = compute_old_range_for_new_range(old_snapshot, new_snapshot, total_edit_range)?;
674 let old_start_point = old_range.start;
675 let old_end_point = old_range.end;
676
677 const CONTEXT_LINES: u32 = 3;
678
679 let old_context_start_row = old_start_point.row.saturating_sub(CONTEXT_LINES);
680 let new_context_start_row = new_start_point.row.saturating_sub(CONTEXT_LINES);
681 let old_context_end_row =
682 (old_end_point.row + 1 + CONTEXT_LINES).min(old_snapshot.max_point().row);
683 let new_context_end_row =
684 (new_end_point.row + 1 + CONTEXT_LINES).min(new_snapshot.max_point().row);
685
686 let old_start_line_offset = old_snapshot.point_to_offset(Point::new(old_context_start_row, 0));
687 let new_start_line_offset = new_snapshot.point_to_offset(Point::new(new_context_start_row, 0));
688 let old_end_line_offset = old_snapshot
689 .point_to_offset(Point::new(old_context_end_row + 1, 0).min(old_snapshot.max_point()));
690 let new_end_line_offset = new_snapshot
691 .point_to_offset(Point::new(new_context_end_row + 1, 0).min(new_snapshot.max_point()));
692 let old_edit_range = old_start_line_offset..old_end_line_offset;
693 let new_edit_range = new_start_line_offset..new_end_line_offset;
694
695 if new_edit_range.len() > EDIT_HISTORY_DIFF_SIZE_LIMIT
696 || old_edit_range.len() > EDIT_HISTORY_DIFF_SIZE_LIMIT
697 {
698 return None;
699 }
700
701 let old_region_text: String = old_snapshot.text_for_range(old_edit_range).collect();
702 let new_region_text: String = new_snapshot.text_for_range(new_edit_range).collect();
703
704 let diff = language::unified_diff_with_offsets(
705 &old_region_text,
706 &new_region_text,
707 old_context_start_row,
708 new_context_start_row,
709 );
710
711 Some((diff, new_start_point..new_end_point))
712}
713
714fn buffer_path_with_id_fallback(
715 file: Option<&Arc<dyn File>>,
716 snapshot: &TextBufferSnapshot,
717 cx: &App,
718) -> Arc<Path> {
719 if let Some(file) = file {
720 file.full_path(cx).into()
721 } else {
722 Path::new(&format!("untitled-{}", snapshot.remote_id())).into()
723 }
724}
725
726impl EditPredictionStore {
727 pub fn try_global(cx: &App) -> Option<Entity<Self>> {
728 cx.try_global::<EditPredictionStoreGlobal>()
729 .map(|global| global.0.clone())
730 }
731
732 pub fn global(
733 client: &Arc<Client>,
734 user_store: &Entity<UserStore>,
735 cx: &mut App,
736 ) -> Entity<Self> {
737 cx.try_global::<EditPredictionStoreGlobal>()
738 .map(|global| global.0.clone())
739 .unwrap_or_else(|| {
740 let ep_store = cx.new(|cx| Self::new(client.clone(), user_store.clone(), cx));
741 cx.set_global(EditPredictionStoreGlobal(ep_store.clone()));
742 ep_store
743 })
744 }
745
746 pub fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
747 let data_collection_choice = Self::load_data_collection_choice(cx);
748
749 let llm_token = LlmApiToken::global(cx);
750
751 let (reject_tx, reject_rx) = mpsc::unbounded();
752 cx.background_spawn({
753 let client = client.clone();
754 let llm_token = llm_token.clone();
755 let app_version = AppVersion::global(cx);
756 let background_executor = cx.background_executor().clone();
757 async move {
758 Self::handle_rejected_predictions(
759 reject_rx,
760 client,
761 llm_token,
762 app_version,
763 background_executor,
764 )
765 .await
766 }
767 })
768 .detach();
769
770 let (settled_predictions_tx, settled_predictions_rx) = mpsc::unbounded();
771 cx.spawn(async move |this, cx| {
772 Self::run_settled_predictions_worker(this, settled_predictions_rx, cx).await;
773 })
774 .detach();
775
776 let mut current_user = user_store.read(cx).watch_current_user();
777 let fetch_experiments_task = cx.spawn(async move |this, cx| {
778 while current_user.borrow().is_none() {
779 current_user.next().await;
780 }
781 this.update(cx, |this, cx| {
782 this.refresh_available_experiments(cx);
783 })
784 .log_err();
785 });
786
787 let this = Self {
788 projects: HashMap::default(),
789 client,
790 user_store,
791 llm_token,
792 _fetch_experiments_task: fetch_experiments_task,
793 update_required: false,
794 edit_prediction_model: EditPredictionModel::Zeta,
795 zeta2_raw_config: Self::zeta2_raw_config_from_env(),
796 preferred_experiment: None,
797 available_experiments: Vec::new(),
798 mercury: Mercury::new(cx),
799
800 data_collection_choice,
801 reject_predictions_tx: reject_tx,
802 settled_predictions_tx,
803 rated_predictions: Default::default(),
804 shown_predictions: Default::default(),
805 #[cfg(test)]
806 settled_event_callback: None,
807 };
808
809 this
810 }
811
812 fn zeta2_raw_config_from_env() -> Option<Zeta2RawConfig> {
813 let version_str = env::var("ZED_ZETA_FORMAT").ok()?;
814 let format = ZetaFormat::parse(&version_str).ok()?;
815 let model_id = env::var("ZED_ZETA_MODEL").ok();
816 let environment = env::var("ZED_ZETA_ENVIRONMENT").ok();
817 Some(Zeta2RawConfig {
818 model_id,
819 environment,
820 format,
821 })
822 }
823
824 pub fn set_edit_prediction_model(&mut self, model: EditPredictionModel) {
825 self.edit_prediction_model = model;
826 }
827
828 pub fn set_zeta2_raw_config(&mut self, config: Zeta2RawConfig) {
829 self.zeta2_raw_config = Some(config);
830 }
831
832 pub fn zeta2_raw_config(&self) -> Option<&Zeta2RawConfig> {
833 self.zeta2_raw_config.as_ref()
834 }
835
836 pub fn preferred_experiment(&self) -> Option<&str> {
837 self.preferred_experiment.as_deref()
838 }
839
840 pub fn set_preferred_experiment(&mut self, experiment: Option<String>) {
841 self.preferred_experiment = experiment;
842 }
843
844 pub fn available_experiments(&self) -> &[String] {
845 &self.available_experiments
846 }
847
848 pub fn active_experiment(&self) -> Option<&str> {
849 self.preferred_experiment.as_deref().or_else(|| {
850 self.shown_predictions
851 .iter()
852 .find_map(|p| p.model_version.as_ref())
853 .and_then(|model_version| model_version.strip_prefix("zeta2:"))
854 })
855 }
856
857 pub fn refresh_available_experiments(&mut self, cx: &mut Context<Self>) {
858 let client = self.client.clone();
859 let llm_token = self.llm_token.clone();
860 let app_version = AppVersion::global(cx);
861 let organization_id = self
862 .user_store
863 .read(cx)
864 .current_organization()
865 .map(|organization| organization.id.clone());
866
867 cx.spawn(async move |this, cx| {
868 let experiments = cx
869 .background_spawn(async move {
870 let http_client = client.http_client();
871 let token = llm_token.acquire(&client, organization_id).await?;
872 let url = http_client.build_zed_llm_url("/edit_prediction_experiments", &[])?;
873 let request = http_client::Request::builder()
874 .method(Method::GET)
875 .uri(url.as_ref())
876 .header("Authorization", format!("Bearer {}", token))
877 .header(ZED_VERSION_HEADER_NAME, app_version.to_string())
878 .body(Default::default())?;
879 let mut response = http_client.send(request).await?;
880 if response.status().is_success() {
881 let mut body = Vec::new();
882 response.body_mut().read_to_end(&mut body).await?;
883 let experiments: Vec<String> = serde_json::from_slice(&body)?;
884 Ok(experiments)
885 } else {
886 let mut body = String::new();
887 response.body_mut().read_to_string(&mut body).await?;
888 anyhow::bail!(
889 "Failed to fetch experiments: {:?}\nBody: {}",
890 response.status(),
891 body
892 );
893 }
894 })
895 .await?;
896 this.update(cx, |this, cx| {
897 this.available_experiments = experiments;
898 cx.notify();
899 })?;
900 anyhow::Ok(())
901 })
902 .detach_and_log_err(cx);
903 }
904
905 pub fn icons(&self, cx: &App) -> edit_prediction_types::EditPredictionIconSet {
906 use ui::IconName;
907 match self.edit_prediction_model {
908 EditPredictionModel::Mercury => {
909 edit_prediction_types::EditPredictionIconSet::new(IconName::Inception)
910 }
911 EditPredictionModel::Zeta => {
912 edit_prediction_types::EditPredictionIconSet::new(IconName::ZedPredict)
913 .with_disabled(IconName::ZedPredictDisabled)
914 .with_up(IconName::ZedPredictUp)
915 .with_down(IconName::ZedPredictDown)
916 .with_error(IconName::ZedPredictError)
917 }
918 EditPredictionModel::Fim { .. } => {
919 let settings = &all_language_settings(None, cx).edit_predictions;
920 match settings.provider {
921 EditPredictionProvider::Ollama => {
922 edit_prediction_types::EditPredictionIconSet::new(IconName::AiOllama)
923 }
924 _ => {
925 edit_prediction_types::EditPredictionIconSet::new(IconName::AiOpenAiCompat)
926 }
927 }
928 }
929 }
930 }
931
932 pub fn has_mercury_api_token(&self, cx: &App) -> bool {
933 self.mercury.api_token.read(cx).has_key()
934 }
935
936 pub fn mercury_has_payment_required_error(&self) -> bool {
937 self.mercury.has_payment_required_error()
938 }
939
940 pub fn clear_history(&mut self) {
941 for project_state in self.projects.values_mut() {
942 project_state.events.clear();
943 project_state.last_event.take();
944 }
945 }
946
947 pub fn clear_history_for_project(&mut self, project: &Entity<Project>) {
948 if let Some(project_state) = self.projects.get_mut(&project.entity_id()) {
949 project_state.events.clear();
950 project_state.last_event.take();
951 }
952 }
953
954 pub fn edit_history_for_project(
955 &self,
956 project: &Entity<Project>,
957 cx: &App,
958 ) -> Vec<StoredEvent> {
959 self.projects
960 .get(&project.entity_id())
961 .map(|project_state| project_state.events(cx))
962 .unwrap_or_default()
963 }
964
965 pub fn context_for_project<'a>(
966 &'a self,
967 project: &Entity<Project>,
968 cx: &'a mut App,
969 ) -> Vec<RelatedFile> {
970 self.projects
971 .get(&project.entity_id())
972 .map(|project_state| {
973 project_state.context.update(cx, |context, cx| {
974 context
975 .related_files_with_buffers(cx)
976 .map(|(mut related_file, buffer)| {
977 related_file.in_open_source_repo = buffer
978 .read(cx)
979 .file()
980 .map_or(false, |file| self.is_file_open_source(&project, file, cx));
981 related_file
982 })
983 .collect()
984 })
985 })
986 .unwrap_or_default()
987 }
988
989 pub fn copilot_for_project(&self, project: &Entity<Project>) -> Option<Entity<Copilot>> {
990 self.projects
991 .get(&project.entity_id())
992 .and_then(|project| project.copilot.clone())
993 }
994
995 pub fn start_copilot_for_project(
996 &mut self,
997 project: &Entity<Project>,
998 cx: &mut Context<Self>,
999 ) -> Option<Entity<Copilot>> {
1000 if DisableAiSettings::get(None, cx).disable_ai {
1001 return None;
1002 }
1003 let state = self.get_or_init_project(project, cx);
1004
1005 if state.copilot.is_some() {
1006 return state.copilot.clone();
1007 }
1008 let _project = project.clone();
1009 let project = project.read(cx);
1010
1011 let node = project.node_runtime().cloned();
1012 if let Some(node) = node {
1013 let next_id = project.languages().next_language_server_id();
1014 let fs = project.fs().clone();
1015
1016 let copilot = cx.new(|cx| Copilot::new(Some(_project), next_id, fs, node, cx));
1017 state.copilot = Some(copilot.clone());
1018 Some(copilot)
1019 } else {
1020 None
1021 }
1022 }
1023
1024 pub fn context_for_project_with_buffers<'a>(
1025 &'a self,
1026 project: &Entity<Project>,
1027 cx: &'a mut App,
1028 ) -> Vec<(RelatedFile, Entity<Buffer>)> {
1029 self.projects
1030 .get(&project.entity_id())
1031 .map(|project| {
1032 project.context.update(cx, |context, cx| {
1033 context.related_files_with_buffers(cx).collect()
1034 })
1035 })
1036 .unwrap_or_default()
1037 }
1038
1039 pub fn usage(&self, cx: &App) -> Option<EditPredictionUsage> {
1040 if matches!(self.edit_prediction_model, EditPredictionModel::Zeta) {
1041 self.user_store.read(cx).edit_prediction_usage()
1042 } else {
1043 None
1044 }
1045 }
1046
1047 pub fn register_project(&mut self, project: &Entity<Project>, cx: &mut Context<Self>) {
1048 self.get_or_init_project(project, cx);
1049 }
1050
1051 pub fn register_buffer(
1052 &mut self,
1053 buffer: &Entity<Buffer>,
1054 project: &Entity<Project>,
1055 cx: &mut Context<Self>,
1056 ) {
1057 let project_state = self.get_or_init_project(project, cx);
1058 Self::register_buffer_impl(project_state, buffer, project, cx);
1059 }
1060
1061 fn get_or_init_project(
1062 &mut self,
1063 project: &Entity<Project>,
1064 cx: &mut Context<Self>,
1065 ) -> &mut ProjectState {
1066 let entity_id = project.entity_id();
1067 self.projects
1068 .entry(entity_id)
1069 .or_insert_with(|| ProjectState {
1070 context: {
1071 let related_excerpt_store = cx.new(|cx| RelatedExcerptStore::new(project, cx));
1072 cx.subscribe(&related_excerpt_store, move |this, _, event, _| {
1073 this.handle_excerpt_store_event(entity_id, event);
1074 })
1075 .detach();
1076 related_excerpt_store
1077 },
1078 events: VecDeque::new(),
1079 last_event: None,
1080 recent_paths: VecDeque::new(),
1081 debug_tx: None,
1082 registered_buffers: HashMap::default(),
1083 current_prediction: None,
1084 cancelled_predictions: HashSet::default(),
1085 pending_predictions: ArrayVec::new(),
1086 next_pending_prediction_id: 0,
1087 last_edit_prediction_refresh: None,
1088 last_jump_prediction_refresh: None,
1089 license_detection_watchers: HashMap::default(),
1090 _subscriptions: [
1091 cx.subscribe(&project, Self::handle_project_event),
1092 cx.observe_release(&project, move |this, _, cx| {
1093 this.projects.remove(&entity_id);
1094 cx.notify();
1095 }),
1096 ],
1097 copilot: None,
1098 })
1099 }
1100
1101 pub fn remove_project(&mut self, project: &Entity<Project>) {
1102 self.projects.remove(&project.entity_id());
1103 }
1104
1105 fn handle_excerpt_store_event(
1106 &mut self,
1107 project_entity_id: EntityId,
1108 event: &RelatedExcerptStoreEvent,
1109 ) {
1110 if let Some(project_state) = self.projects.get(&project_entity_id) {
1111 if let Some(debug_tx) = project_state.debug_tx.clone() {
1112 match event {
1113 RelatedExcerptStoreEvent::StartedRefresh => {
1114 debug_tx
1115 .unbounded_send(DebugEvent::ContextRetrievalStarted(
1116 ContextRetrievalStartedDebugEvent {
1117 project_entity_id: project_entity_id,
1118 timestamp: Instant::now(),
1119 search_prompt: String::new(),
1120 },
1121 ))
1122 .ok();
1123 }
1124 RelatedExcerptStoreEvent::FinishedRefresh {
1125 cache_hit_count,
1126 cache_miss_count,
1127 mean_definition_latency,
1128 max_definition_latency,
1129 } => {
1130 debug_tx
1131 .unbounded_send(DebugEvent::ContextRetrievalFinished(
1132 ContextRetrievalFinishedDebugEvent {
1133 project_entity_id: project_entity_id,
1134 timestamp: Instant::now(),
1135 metadata: vec![
1136 (
1137 "Cache Hits",
1138 format!(
1139 "{}/{}",
1140 cache_hit_count,
1141 cache_hit_count + cache_miss_count
1142 )
1143 .into(),
1144 ),
1145 (
1146 "Max LSP Time",
1147 format!("{} ms", max_definition_latency.as_millis())
1148 .into(),
1149 ),
1150 (
1151 "Mean LSP Time",
1152 format!("{} ms", mean_definition_latency.as_millis())
1153 .into(),
1154 ),
1155 ],
1156 },
1157 ))
1158 .ok();
1159 }
1160 }
1161 }
1162 }
1163 }
1164
1165 pub fn debug_info(
1166 &mut self,
1167 project: &Entity<Project>,
1168 cx: &mut Context<Self>,
1169 ) -> mpsc::UnboundedReceiver<DebugEvent> {
1170 let project_state = self.get_or_init_project(project, cx);
1171 let (debug_watch_tx, debug_watch_rx) = mpsc::unbounded();
1172 project_state.debug_tx = Some(debug_watch_tx);
1173 debug_watch_rx
1174 }
1175
1176 fn handle_project_event(
1177 &mut self,
1178 project: Entity<Project>,
1179 event: &project::Event,
1180 cx: &mut Context<Self>,
1181 ) {
1182 if !is_ep_store_provider(all_language_settings(None, cx).edit_predictions.provider) {
1183 return;
1184 }
1185 // TODO [zeta2] init with recent paths
1186 match event {
1187 project::Event::ActiveEntryChanged(Some(active_entry_id)) => {
1188 let Some(project_state) = self.projects.get_mut(&project.entity_id()) else {
1189 return;
1190 };
1191 let path = project.read(cx).path_for_entry(*active_entry_id, cx);
1192 if let Some(path) = path {
1193 if let Some(ix) = project_state
1194 .recent_paths
1195 .iter()
1196 .position(|probe| probe == &path)
1197 {
1198 project_state.recent_paths.remove(ix);
1199 }
1200 project_state.recent_paths.push_front(path);
1201 }
1202 }
1203 project::Event::DiagnosticsUpdated { .. } => {
1204 if cx.has_flag::<EditPredictionJumpsFeatureFlag>() {
1205 self.refresh_prediction_from_diagnostics(
1206 project,
1207 DiagnosticSearchScope::Global,
1208 cx,
1209 );
1210 }
1211 }
1212 _ => (),
1213 }
1214 }
1215
1216 fn register_buffer_impl<'a>(
1217 project_state: &'a mut ProjectState,
1218 buffer: &Entity<Buffer>,
1219 project: &Entity<Project>,
1220 cx: &mut Context<Self>,
1221 ) -> &'a mut RegisteredBuffer {
1222 let buffer_id = buffer.entity_id();
1223
1224 if let Some(file) = buffer.read(cx).file() {
1225 let worktree_id = file.worktree_id(cx);
1226 if let Some(worktree) = project.read(cx).worktree_for_id(worktree_id, cx) {
1227 project_state
1228 .license_detection_watchers
1229 .entry(worktree_id)
1230 .or_insert_with(|| {
1231 let project_entity_id = project.entity_id();
1232 cx.observe_release(&worktree, move |this, _worktree, _cx| {
1233 let Some(project_state) = this.projects.get_mut(&project_entity_id)
1234 else {
1235 return;
1236 };
1237 project_state
1238 .license_detection_watchers
1239 .remove(&worktree_id);
1240 })
1241 .detach();
1242 Rc::new(LicenseDetectionWatcher::new(&worktree, cx))
1243 });
1244 }
1245 }
1246
1247 match project_state.registered_buffers.entry(buffer_id) {
1248 hash_map::Entry::Occupied(entry) => entry.into_mut(),
1249 hash_map::Entry::Vacant(entry) => {
1250 let buf = buffer.read(cx);
1251 let snapshot = buf.text_snapshot();
1252 let file = buf.file().cloned();
1253 let project_entity_id = project.entity_id();
1254 entry.insert(RegisteredBuffer {
1255 snapshot,
1256 file,
1257 last_position: None,
1258 pending_predictions: Vec::new(),
1259 _subscriptions: [
1260 cx.subscribe(buffer, {
1261 let project = project.downgrade();
1262 move |this, buffer, event, cx| {
1263 if let language::BufferEvent::Edited { is_local } = event
1264 && let Some(project) = project.upgrade()
1265 {
1266 this.report_changes_for_buffer(
1267 &buffer, &project, false, *is_local, cx,
1268 );
1269 }
1270 }
1271 }),
1272 cx.observe_release(buffer, move |this, _buffer, _cx| {
1273 let Some(project_state) = this.projects.get_mut(&project_entity_id)
1274 else {
1275 return;
1276 };
1277 project_state.registered_buffers.remove(&buffer_id);
1278 }),
1279 ],
1280 })
1281 }
1282 }
1283 }
1284
1285 fn report_changes_for_buffer(
1286 &mut self,
1287 buffer: &Entity<Buffer>,
1288 project: &Entity<Project>,
1289 is_predicted: bool,
1290 is_local: bool,
1291 cx: &mut Context<Self>,
1292 ) {
1293 let project_state = self.get_or_init_project(project, cx);
1294 let registered_buffer = Self::register_buffer_impl(project_state, buffer, project, cx);
1295
1296 let buf = buffer.read(cx);
1297 let new_file = buf.file().cloned();
1298 let new_snapshot = buf.text_snapshot();
1299 if new_snapshot.version == registered_buffer.snapshot.version {
1300 return;
1301 }
1302 let old_file = mem::replace(&mut registered_buffer.file, new_file.clone());
1303 let old_snapshot = mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone());
1304 let mut edit_range: Option<Range<Anchor>> = None;
1305 let now = cx.background_executor().now();
1306
1307 for (_edit, anchor_range) in
1308 new_snapshot.anchored_edits_since::<usize>(&old_snapshot.version)
1309 {
1310 edit_range = Some(match edit_range {
1311 None => anchor_range,
1312 Some(acc) => acc.start..anchor_range.end,
1313 });
1314 }
1315
1316 let Some(edit_range) = edit_range else {
1317 return;
1318 };
1319
1320 for pending_prediction in &mut registered_buffer.pending_predictions {
1321 if edit_range.overlaps(&pending_prediction.editable_anchor_range, &new_snapshot) {
1322 pending_prediction.last_edit_at = now;
1323 }
1324 }
1325
1326 let include_in_history = is_local
1327 || collaborator_edit_overlaps_locality_region(
1328 project_state,
1329 project,
1330 buffer,
1331 &buf.snapshot(),
1332 &edit_range,
1333 cx,
1334 );
1335
1336 if !include_in_history {
1337 return;
1338 }
1339
1340 let is_recordable_history_edit =
1341 compute_diff_between_snapshots_in_range(&old_snapshot, &new_snapshot, &edit_range)
1342 .is_some();
1343
1344 let events = &mut project_state.events;
1345
1346 if !is_recordable_history_edit {
1347 if let Some(event) = project_state.last_event.take() {
1348 if let Some(event) = event.finalize(&project_state.license_detection_watchers, cx) {
1349 if events.len() + 1 >= EVENT_COUNT_MAX {
1350 events.pop_front();
1351 }
1352 events.push_back(event);
1353 }
1354 }
1355 return;
1356 }
1357
1358 if let Some(last_event) = project_state.last_event.as_mut() {
1359 let is_next_snapshot_of_same_buffer = old_snapshot.remote_id()
1360 == last_event.new_snapshot.remote_id()
1361 && old_snapshot.version == last_event.new_snapshot.version;
1362
1363 let prediction_source_changed = is_predicted != last_event.predicted;
1364
1365 let should_coalesce = is_next_snapshot_of_same_buffer
1366 && !prediction_source_changed
1367 && lines_between_ranges(
1368 &edit_range.to_point(&new_snapshot),
1369 &last_event.latest_edit_range.to_point(&new_snapshot),
1370 ) <= CHANGE_GROUPING_LINE_SPAN;
1371
1372 if should_coalesce {
1373 let pause_elapsed = last_event
1374 .last_edit_time
1375 .map(|t| now.duration_since(t) >= LAST_CHANGE_GROUPING_TIME)
1376 .unwrap_or(false);
1377 if pause_elapsed {
1378 last_event.snapshot_after_last_editing_pause =
1379 Some(last_event.new_snapshot.clone());
1380 last_event.total_edit_range_at_last_pause_boundary =
1381 Some(last_event.total_edit_range.clone());
1382 }
1383
1384 last_event.latest_edit_range = edit_range.clone();
1385 last_event.total_edit_range =
1386 merge_anchor_ranges(&last_event.total_edit_range, &edit_range, &new_snapshot);
1387 last_event.new_snapshot = new_snapshot;
1388 last_event.last_edit_time = Some(now);
1389 return;
1390 }
1391 }
1392
1393 if let Some(event) = project_state.last_event.take() {
1394 if let Some(event) = event.finalize(&project_state.license_detection_watchers, cx) {
1395 if events.len() + 1 >= EVENT_COUNT_MAX {
1396 events.pop_front();
1397 }
1398 events.push_back(event);
1399 }
1400 }
1401
1402 merge_trailing_events_if_needed(events, &old_snapshot, &new_snapshot, &edit_range);
1403
1404 project_state.last_event = Some(LastEvent {
1405 old_file,
1406 new_file,
1407 old_snapshot,
1408 new_snapshot,
1409 latest_edit_range: edit_range.clone(),
1410 total_edit_range: edit_range,
1411 total_edit_range_at_last_pause_boundary: None,
1412 predicted: is_predicted,
1413 snapshot_after_last_editing_pause: None,
1414 last_edit_time: Some(now),
1415 });
1416 }
1417
1418 fn prediction_at(
1419 &mut self,
1420 buffer: &Entity<Buffer>,
1421 position: Option<language::Anchor>,
1422 project: &Entity<Project>,
1423 cx: &App,
1424 ) -> Option<BufferEditPrediction<'_>> {
1425 let project_state = self.projects.get_mut(&project.entity_id())?;
1426 if let Some(position) = position
1427 && let Some(buffer) = project_state
1428 .registered_buffers
1429 .get_mut(&buffer.entity_id())
1430 {
1431 buffer.last_position = Some(position);
1432 }
1433
1434 let CurrentEditPrediction {
1435 requested_by,
1436 prediction,
1437 ..
1438 } = project_state.current_prediction.as_ref()?;
1439
1440 if prediction.targets_buffer(buffer.read(cx)) {
1441 Some(BufferEditPrediction::Local { prediction })
1442 } else {
1443 let show_jump = match requested_by {
1444 PredictionRequestedBy::Buffer(requested_by_buffer_id) => {
1445 requested_by_buffer_id == &buffer.entity_id()
1446 }
1447 PredictionRequestedBy::DiagnosticsUpdate => true,
1448 };
1449
1450 if show_jump {
1451 Some(BufferEditPrediction::Jump { prediction })
1452 } else {
1453 None
1454 }
1455 }
1456 }
1457
1458 fn accept_current_prediction(&mut self, project: &Entity<Project>, cx: &mut Context<Self>) {
1459 let Some(current_prediction) = self
1460 .projects
1461 .get_mut(&project.entity_id())
1462 .and_then(|project_state| project_state.current_prediction.take())
1463 else {
1464 return;
1465 };
1466
1467 self.report_changes_for_buffer(
1468 ¤t_prediction.prediction.buffer,
1469 project,
1470 true,
1471 true,
1472 cx,
1473 );
1474
1475 // can't hold &mut project_state ref across report_changes_for_buffer_call
1476 let Some(project_state) = self.projects.get_mut(&project.entity_id()) else {
1477 return;
1478 };
1479
1480 for pending_prediction in mem::take(&mut project_state.pending_predictions) {
1481 project_state.cancel_pending_prediction(pending_prediction, cx);
1482 }
1483
1484 match self.edit_prediction_model {
1485 EditPredictionModel::Mercury => {
1486 mercury::edit_prediction_accepted(
1487 current_prediction.prediction.id,
1488 self.client.http_client(),
1489 cx,
1490 );
1491 }
1492 EditPredictionModel::Zeta => {
1493 let is_cloud = !matches!(
1494 all_language_settings(None, cx).edit_predictions.provider,
1495 EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi
1496 );
1497 if is_cloud {
1498 zeta::edit_prediction_accepted(self, current_prediction, cx)
1499 }
1500 }
1501 EditPredictionModel::Fim { .. } => {}
1502 }
1503 }
1504
1505 async fn handle_rejected_predictions(
1506 rx: UnboundedReceiver<EditPredictionRejectionPayload>,
1507 client: Arc<Client>,
1508 llm_token: LlmApiToken,
1509 app_version: Version,
1510 background_executor: BackgroundExecutor,
1511 ) {
1512 let mut rx = std::pin::pin!(rx.peekable());
1513 let mut batched = Vec::new();
1514
1515 while let Some(EditPredictionRejectionPayload {
1516 rejection,
1517 organization_id,
1518 }) = rx.next().await
1519 {
1520 batched.push(rejection);
1521
1522 if batched.len() < MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST / 2 {
1523 select_biased! {
1524 next = rx.as_mut().peek().fuse() => {
1525 if next.is_some() {
1526 continue;
1527 }
1528 }
1529 () = background_executor.timer(REJECT_REQUEST_DEBOUNCE).fuse() => {},
1530 }
1531 }
1532
1533 let url = client
1534 .http_client()
1535 .build_zed_llm_url("/predict_edits/reject", &[])
1536 .unwrap();
1537
1538 let flush_count = batched
1539 .len()
1540 // in case items have accumulated after failure
1541 .min(MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST);
1542 let start = batched.len() - flush_count;
1543
1544 let body = RejectEditPredictionsBodyRef {
1545 rejections: &batched[start..],
1546 };
1547
1548 let result = Self::send_api_request::<()>(
1549 |builder| {
1550 let req = builder
1551 .uri(url.as_ref())
1552 .body(serde_json::to_string(&body)?.into());
1553 anyhow::Ok(req?)
1554 },
1555 client.clone(),
1556 llm_token.clone(),
1557 organization_id,
1558 app_version.clone(),
1559 true,
1560 )
1561 .await;
1562
1563 if result.log_err().is_some() {
1564 batched.drain(start..);
1565 }
1566 }
1567 }
1568
1569 async fn run_settled_predictions_worker(
1570 this: WeakEntity<Self>,
1571 mut rx: UnboundedReceiver<Instant>,
1572 cx: &mut AsyncApp,
1573 ) {
1574 let mut next_wake_time: Option<Instant> = None;
1575 loop {
1576 let now = cx.background_executor().now();
1577 if let Some(wake_time) = next_wake_time.take() {
1578 cx.background_executor()
1579 .timer(wake_time.duration_since(now))
1580 .await;
1581 } else {
1582 let Some(new_enqueue_time) = rx.next().await else {
1583 break;
1584 };
1585 next_wake_time = Some(new_enqueue_time + EDIT_PREDICTION_SETTLED_QUIESCENCE);
1586 while rx.next().now_or_never().flatten().is_some() {}
1587 continue;
1588 }
1589
1590 let Some(this) = this.upgrade() else {
1591 break;
1592 };
1593
1594 let now = cx.background_executor().now();
1595
1596 let mut oldest_edited_at = None;
1597
1598 this.update(cx, |this, _| {
1599 for (_, project_state) in this.projects.iter_mut() {
1600 for (_, registered_buffer) in project_state.registered_buffers.iter_mut() {
1601 registered_buffer
1602 .pending_predictions
1603 .retain_mut(|pending_prediction| {
1604 let age =
1605 now.saturating_duration_since(pending_prediction.enqueued_at);
1606 if age >= EDIT_PREDICTION_SETTLED_TTL {
1607 return false;
1608 }
1609
1610 let quiet_for =
1611 now.saturating_duration_since(pending_prediction.last_edit_at);
1612 if quiet_for >= EDIT_PREDICTION_SETTLED_QUIESCENCE {
1613 let settled_editable_region = registered_buffer
1614 .snapshot
1615 .text_for_range(
1616 pending_prediction.editable_anchor_range.clone(),
1617 )
1618 .collect::<String>();
1619
1620 #[cfg(test)]
1621 if let Some(callback) = &this.settled_event_callback {
1622 callback(
1623 pending_prediction.request_id.clone(),
1624 settled_editable_region.clone(),
1625 );
1626 }
1627
1628 telemetry::event!(
1629 EDIT_PREDICTION_SETTLED_EVENT,
1630 request_id = pending_prediction.request_id.0.clone(),
1631 settled_editable_region,
1632 example = pending_prediction.example.take(),
1633 e2e_latency = pending_prediction.e2e_latency.as_millis(),
1634 );
1635
1636 return false;
1637 }
1638
1639 if oldest_edited_at
1640 .is_none_or(|t| pending_prediction.last_edit_at < t)
1641 {
1642 oldest_edited_at = Some(pending_prediction.last_edit_at);
1643 }
1644
1645 true
1646 });
1647 }
1648 }
1649 });
1650
1651 next_wake_time = oldest_edited_at.map(|t| t + EDIT_PREDICTION_SETTLED_QUIESCENCE);
1652 }
1653 }
1654
1655 pub(crate) fn enqueue_settled_prediction(
1656 &mut self,
1657 request_id: EditPredictionId,
1658 project: &Entity<Project>,
1659 edited_buffer: &Entity<Buffer>,
1660 edited_buffer_snapshot: &BufferSnapshot,
1661 editable_offset_range: Range<usize>,
1662 example: Option<ExampleSpec>,
1663 e2e_latency: std::time::Duration,
1664 cx: &mut Context<Self>,
1665 ) {
1666 let this = &mut *self;
1667 let project_state = this.get_or_init_project(project, cx);
1668 if let Some(buffer) = project_state
1669 .registered_buffers
1670 .get_mut(&edited_buffer.entity_id())
1671 {
1672 let now = cx.background_executor().now();
1673 buffer.pending_predictions.push(PendingSettledPrediction {
1674 request_id: request_id,
1675 editable_anchor_range: edited_buffer_snapshot
1676 .anchor_range_around(editable_offset_range),
1677 example,
1678 e2e_latency,
1679 enqueued_at: now,
1680 last_edit_at: now,
1681 });
1682 this.settled_predictions_tx.unbounded_send(now).ok();
1683 }
1684 }
1685
1686 fn reject_current_prediction(
1687 &mut self,
1688 reason: EditPredictionRejectReason,
1689 project: &Entity<Project>,
1690 cx: &App,
1691 ) {
1692 if let Some(project_state) = self.projects.get_mut(&project.entity_id()) {
1693 project_state.pending_predictions.clear();
1694 if let Some(prediction) = project_state.current_prediction.take() {
1695 let model_version = prediction.prediction.model_version.clone();
1696 self.reject_prediction(
1697 prediction.prediction.id,
1698 reason,
1699 prediction.was_shown,
1700 model_version,
1701 Some(prediction.e2e_latency),
1702 cx,
1703 );
1704 }
1705 };
1706 }
1707
1708 fn did_show_current_prediction(
1709 &mut self,
1710 project: &Entity<Project>,
1711 display_type: edit_prediction_types::SuggestionDisplayType,
1712 _cx: &mut Context<Self>,
1713 ) {
1714 let Some(project_state) = self.projects.get_mut(&project.entity_id()) else {
1715 return;
1716 };
1717
1718 let Some(current_prediction) = project_state.current_prediction.as_mut() else {
1719 return;
1720 };
1721
1722 let is_jump = display_type == edit_prediction_types::SuggestionDisplayType::Jump;
1723 let previous_shown_with = current_prediction.shown_with;
1724
1725 if previous_shown_with.is_none() || !is_jump {
1726 current_prediction.shown_with = Some(display_type);
1727 }
1728
1729 let is_first_non_jump_show = !current_prediction.was_shown && !is_jump;
1730
1731 if is_first_non_jump_show {
1732 current_prediction.was_shown = true;
1733 }
1734
1735 if is_first_non_jump_show {
1736 self.shown_predictions
1737 .push_front(current_prediction.prediction.clone());
1738 if self.shown_predictions.len() > 50 {
1739 let completion = self.shown_predictions.pop_back().unwrap();
1740 self.rated_predictions.remove(&completion.id);
1741 }
1742 }
1743 }
1744
1745 fn reject_prediction(
1746 &mut self,
1747 prediction_id: EditPredictionId,
1748 reason: EditPredictionRejectReason,
1749 was_shown: bool,
1750 model_version: Option<String>,
1751 e2e_latency: Option<std::time::Duration>,
1752 cx: &App,
1753 ) {
1754 match self.edit_prediction_model {
1755 EditPredictionModel::Zeta => {
1756 let is_cloud = !matches!(
1757 all_language_settings(None, cx).edit_predictions.provider,
1758 EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi
1759 );
1760
1761 if is_cloud {
1762 let organization_id = self
1763 .user_store
1764 .read(cx)
1765 .current_organization()
1766 .map(|organization| organization.id.clone());
1767
1768 self.reject_predictions_tx
1769 .unbounded_send(EditPredictionRejectionPayload {
1770 rejection: EditPredictionRejection {
1771 request_id: prediction_id.to_string(),
1772 reason,
1773 was_shown,
1774 model_version,
1775 e2e_latency_ms: e2e_latency.map(|latency| latency.as_millis()),
1776 },
1777 organization_id,
1778 })
1779 .log_err();
1780 }
1781 }
1782 EditPredictionModel::Mercury => {
1783 mercury::edit_prediction_rejected(
1784 prediction_id,
1785 was_shown,
1786 reason,
1787 self.client.http_client(),
1788 cx,
1789 );
1790 }
1791 EditPredictionModel::Fim { .. } => {}
1792 }
1793 }
1794
1795 fn is_refreshing(&self, project: &Entity<Project>) -> bool {
1796 self.projects
1797 .get(&project.entity_id())
1798 .is_some_and(|project_state| !project_state.pending_predictions.is_empty())
1799 }
1800
1801 pub fn refresh_prediction_from_buffer(
1802 &mut self,
1803 project: Entity<Project>,
1804 buffer: Entity<Buffer>,
1805 position: language::Anchor,
1806 cx: &mut Context<Self>,
1807 ) {
1808 self.queue_prediction_refresh(
1809 project.clone(),
1810 PredictEditsRequestTrigger::Other,
1811 buffer.entity_id(),
1812 cx,
1813 move |this, cx| {
1814 let Some(request_task) = this
1815 .update(cx, |this, cx| {
1816 this.request_prediction(
1817 &project,
1818 &buffer,
1819 position,
1820 PredictEditsRequestTrigger::Other,
1821 cx,
1822 )
1823 })
1824 .log_err()
1825 else {
1826 return Task::ready(anyhow::Ok(None));
1827 };
1828
1829 cx.spawn(async move |_cx| {
1830 request_task.await.map(|prediction_result| {
1831 prediction_result.map(|prediction_result| {
1832 (
1833 prediction_result,
1834 PredictionRequestedBy::Buffer(buffer.entity_id()),
1835 )
1836 })
1837 })
1838 })
1839 },
1840 )
1841 }
1842
1843 pub fn refresh_prediction_from_diagnostics(
1844 &mut self,
1845 project: Entity<Project>,
1846 scope: DiagnosticSearchScope,
1847 cx: &mut Context<Self>,
1848 ) {
1849 if !is_ep_store_provider(all_language_settings(None, cx).edit_predictions.provider) {
1850 return;
1851 }
1852
1853 if currently_following(&project, cx) {
1854 return;
1855 }
1856
1857 let Some(project_state) = self.projects.get_mut(&project.entity_id()) else {
1858 return;
1859 };
1860
1861 // Prefer predictions from buffer
1862 if project_state.current_prediction.is_some() {
1863 log::debug!(
1864 "edit_prediction: diagnostic refresh skipped, current prediction already exists"
1865 );
1866 return;
1867 }
1868
1869 self.queue_prediction_refresh(
1870 project.clone(),
1871 PredictEditsRequestTrigger::Diagnostics,
1872 project.entity_id(),
1873 cx,
1874 move |this, cx| {
1875 let Some((active_buffer, snapshot, cursor_point)) = this
1876 .read_with(cx, |this, cx| {
1877 let project_state = this.projects.get(&project.entity_id())?;
1878 let (buffer, position) = project_state.active_buffer(&project, cx)?;
1879 let snapshot = buffer.read(cx).snapshot();
1880
1881 if !Self::predictions_enabled_at(&snapshot, position, cx) {
1882 return None;
1883 }
1884
1885 let cursor_point = position
1886 .map(|pos| pos.to_point(&snapshot))
1887 .unwrap_or_default();
1888
1889 Some((buffer, snapshot, cursor_point))
1890 })
1891 .log_err()
1892 .flatten()
1893 else {
1894 return Task::ready(anyhow::Ok(None));
1895 };
1896
1897 cx.spawn(async move |cx| {
1898 let diagnostic_search_range = match scope {
1899 DiagnosticSearchScope::Local => {
1900 let diagnostic_search_start =
1901 cursor_point.row.saturating_sub(DIAGNOSTIC_LINES_RANGE);
1902 let diagnostic_search_end = cursor_point.row + DIAGNOSTIC_LINES_RANGE;
1903 Point::new(diagnostic_search_start, 0)
1904 ..Point::new(diagnostic_search_end, 0)
1905 }
1906 DiagnosticSearchScope::Global => Default::default(),
1907 };
1908
1909 let Some((jump_buffer, jump_position)) = Self::next_diagnostic_location(
1910 active_buffer,
1911 &snapshot,
1912 diagnostic_search_range,
1913 cursor_point,
1914 &project,
1915 cx,
1916 )
1917 .await?
1918 else {
1919 return anyhow::Ok(None);
1920 };
1921
1922 let Some(prediction_result) = this
1923 .update(cx, |this, cx| {
1924 this.request_prediction(
1925 &project,
1926 &jump_buffer,
1927 jump_position,
1928 PredictEditsRequestTrigger::Diagnostics,
1929 cx,
1930 )
1931 })?
1932 .await?
1933 else {
1934 return anyhow::Ok(None);
1935 };
1936
1937 this.update(cx, |this, cx| {
1938 Some((
1939 if this
1940 .get_or_init_project(&project, cx)
1941 .current_prediction
1942 .is_none()
1943 {
1944 prediction_result
1945 } else {
1946 EditPredictionResult {
1947 id: prediction_result.id,
1948 prediction: Err(EditPredictionRejectReason::CurrentPreferred),
1949 e2e_latency: prediction_result.e2e_latency,
1950 }
1951 },
1952 PredictionRequestedBy::DiagnosticsUpdate,
1953 ))
1954 })
1955 })
1956 },
1957 );
1958 }
1959
1960 fn predictions_enabled_at(
1961 snapshot: &BufferSnapshot,
1962 position: Option<language::Anchor>,
1963 cx: &App,
1964 ) -> bool {
1965 let file = snapshot.file();
1966 let all_settings = all_language_settings(file, cx);
1967 if !all_settings.show_edit_predictions(snapshot.language(), cx)
1968 || file.is_some_and(|file| !all_settings.edit_predictions_enabled_for_file(file, cx))
1969 {
1970 return false;
1971 }
1972
1973 if let Some(last_position) = position {
1974 let settings = snapshot.settings_at(last_position, cx);
1975
1976 if !settings.edit_predictions_disabled_in.is_empty()
1977 && let Some(scope) = snapshot.language_scope_at(last_position)
1978 && let Some(scope_name) = scope.override_name()
1979 && settings
1980 .edit_predictions_disabled_in
1981 .iter()
1982 .any(|s| s == scope_name)
1983 {
1984 return false;
1985 }
1986 }
1987
1988 true
1989 }
1990
1991 pub const THROTTLE_TIMEOUT: Duration = Duration::from_millis(300);
1992}
1993
1994fn currently_following(project: &Entity<Project>, cx: &App) -> bool {
1995 let Some(app_state) = AppState::try_global(cx).and_then(|app_state| app_state.upgrade()) else {
1996 return false;
1997 };
1998
1999 app_state
2000 .workspace_store
2001 .read(cx)
2002 .workspaces()
2003 .filter_map(|workspace| workspace.upgrade())
2004 .any(|workspace| {
2005 workspace.read(cx).project().entity_id() == project.entity_id()
2006 && workspace
2007 .read(cx)
2008 .leader_for_pane(workspace.read(cx).active_pane())
2009 .is_some()
2010 })
2011}
2012
2013fn is_ep_store_provider(provider: EditPredictionProvider) -> bool {
2014 match provider {
2015 EditPredictionProvider::Zed
2016 | EditPredictionProvider::Mercury
2017 | EditPredictionProvider::Ollama
2018 | EditPredictionProvider::OpenAiCompatibleApi
2019 | EditPredictionProvider::Experimental(_) => true,
2020 EditPredictionProvider::None
2021 | EditPredictionProvider::Copilot
2022 | EditPredictionProvider::Codestral => false,
2023 }
2024}
2025
2026impl EditPredictionStore {
2027 fn queue_prediction_refresh(
2028 &mut self,
2029 project: Entity<Project>,
2030 request_trigger: PredictEditsRequestTrigger,
2031 throttle_entity: EntityId,
2032 cx: &mut Context<Self>,
2033 do_refresh: impl FnOnce(
2034 WeakEntity<Self>,
2035 &mut AsyncApp,
2036 )
2037 -> Task<Result<Option<(EditPredictionResult, PredictionRequestedBy)>>>
2038 + 'static,
2039 ) {
2040 fn select_throttle(
2041 project_state: &mut ProjectState,
2042 request_trigger: PredictEditsRequestTrigger,
2043 ) -> &mut Option<(EntityId, Instant)> {
2044 match request_trigger {
2045 PredictEditsRequestTrigger::Diagnostics => {
2046 &mut project_state.last_jump_prediction_refresh
2047 }
2048 _ => &mut project_state.last_edit_prediction_refresh,
2049 }
2050 }
2051
2052 let (needs_acceptance_tracking, max_pending_predictions) =
2053 match all_language_settings(None, cx).edit_predictions.provider {
2054 EditPredictionProvider::Zed
2055 | EditPredictionProvider::Mercury
2056 | EditPredictionProvider::Experimental(_) => (true, 2),
2057 EditPredictionProvider::Ollama => (false, 1),
2058 EditPredictionProvider::OpenAiCompatibleApi => (false, 2),
2059 EditPredictionProvider::None
2060 | EditPredictionProvider::Copilot
2061 | EditPredictionProvider::Codestral => {
2062 log::error!("queue_prediction_refresh called with non-store provider");
2063 return;
2064 }
2065 };
2066
2067 let drop_on_cancel = !needs_acceptance_tracking;
2068 let throttle_timeout = Self::THROTTLE_TIMEOUT;
2069 let project_state = self.get_or_init_project(&project, cx);
2070 let pending_prediction_id = project_state.next_pending_prediction_id;
2071 project_state.next_pending_prediction_id += 1;
2072 let throttle_at_enqueue = *select_throttle(project_state, request_trigger);
2073
2074 let task = cx.spawn(async move |this, cx| {
2075 let throttle_wait = this
2076 .update(cx, |this, cx| {
2077 let project_state = this.get_or_init_project(&project, cx);
2078 let throttle = *select_throttle(project_state, request_trigger);
2079
2080 let now = cx.background_executor().now();
2081 throttle.and_then(|(last_entity, last_timestamp)| {
2082 if throttle_entity != last_entity {
2083 return None;
2084 }
2085 (last_timestamp + throttle_timeout).checked_duration_since(now)
2086 })
2087 })
2088 .ok()
2089 .flatten();
2090
2091 if let Some(timeout) = throttle_wait {
2092 cx.background_executor().timer(timeout).await;
2093 }
2094
2095 // If this task was cancelled before the throttle timeout expired,
2096 // do not perform a request. Also skip if another task already
2097 // proceeded since we were enqueued (duplicate).
2098 let mut is_cancelled = true;
2099 this.update(cx, |this, cx| {
2100 let project_state = this.get_or_init_project(&project, cx);
2101 let was_cancelled = project_state
2102 .cancelled_predictions
2103 .remove(&pending_prediction_id);
2104 if was_cancelled {
2105 return;
2106 }
2107
2108 // Another request has been already sent since this was enqueued
2109 if *select_throttle(project_state, request_trigger) != throttle_at_enqueue {
2110 return;
2111 }
2112
2113 let new_refresh = (throttle_entity, cx.background_executor().now());
2114 *select_throttle(project_state, request_trigger) = Some(new_refresh);
2115 is_cancelled = false;
2116 })
2117 .ok();
2118 if is_cancelled {
2119 return None;
2120 }
2121
2122 let new_prediction_result = do_refresh(this.clone(), cx).await.log_err().flatten();
2123 let new_prediction_id = new_prediction_result
2124 .as_ref()
2125 .map(|(prediction, _)| prediction.id.clone());
2126
2127 // When a prediction completes, remove it from the pending list, and cancel
2128 // any pending predictions that were enqueued before it.
2129 this.update(cx, |this, cx| {
2130 let project_state = this.get_or_init_project(&project, cx);
2131
2132 let is_cancelled = project_state
2133 .cancelled_predictions
2134 .remove(&pending_prediction_id);
2135
2136 let new_current_prediction = if !is_cancelled
2137 && let Some((prediction_result, requested_by)) = new_prediction_result
2138 {
2139 match prediction_result.prediction {
2140 Ok(prediction) => {
2141 let new_prediction = CurrentEditPrediction {
2142 requested_by,
2143 prediction,
2144 was_shown: false,
2145 shown_with: None,
2146 e2e_latency: prediction_result.e2e_latency,
2147 };
2148
2149 if let Some(current_prediction) =
2150 project_state.current_prediction.as_ref()
2151 {
2152 if new_prediction.should_replace_prediction(¤t_prediction, cx)
2153 {
2154 this.reject_current_prediction(
2155 EditPredictionRejectReason::Replaced,
2156 &project,
2157 cx,
2158 );
2159
2160 Some(new_prediction)
2161 } else {
2162 this.reject_prediction(
2163 new_prediction.prediction.id,
2164 EditPredictionRejectReason::CurrentPreferred,
2165 false,
2166 new_prediction.prediction.model_version,
2167 Some(new_prediction.e2e_latency),
2168 cx,
2169 );
2170 None
2171 }
2172 } else {
2173 Some(new_prediction)
2174 }
2175 }
2176 Err(reject_reason) => {
2177 this.reject_prediction(
2178 prediction_result.id,
2179 reject_reason,
2180 false,
2181 None,
2182 Some(prediction_result.e2e_latency),
2183 cx,
2184 );
2185 None
2186 }
2187 }
2188 } else {
2189 None
2190 };
2191
2192 let project_state = this.get_or_init_project(&project, cx);
2193
2194 if let Some(new_prediction) = new_current_prediction {
2195 project_state.current_prediction = Some(new_prediction);
2196 }
2197
2198 let mut pending_predictions = mem::take(&mut project_state.pending_predictions);
2199 for (ix, pending_prediction) in pending_predictions.iter().enumerate() {
2200 if pending_prediction.id == pending_prediction_id {
2201 pending_predictions.remove(ix);
2202 for pending_prediction in pending_predictions.drain(0..ix) {
2203 project_state.cancel_pending_prediction(pending_prediction, cx)
2204 }
2205 break;
2206 }
2207 }
2208 this.get_or_init_project(&project, cx).pending_predictions = pending_predictions;
2209 cx.notify();
2210 })
2211 .ok();
2212
2213 new_prediction_id
2214 });
2215
2216 if project_state.pending_predictions.len() < max_pending_predictions {
2217 project_state
2218 .pending_predictions
2219 .push(PendingPrediction {
2220 id: pending_prediction_id,
2221 task,
2222 drop_on_cancel,
2223 })
2224 .unwrap();
2225 } else {
2226 let pending_prediction = project_state.pending_predictions.pop().unwrap();
2227 project_state
2228 .pending_predictions
2229 .push(PendingPrediction {
2230 id: pending_prediction_id,
2231 task,
2232 drop_on_cancel,
2233 })
2234 .unwrap();
2235 project_state.cancel_pending_prediction(pending_prediction, cx);
2236 }
2237 }
2238
2239 pub fn request_prediction(
2240 &mut self,
2241 project: &Entity<Project>,
2242 active_buffer: &Entity<Buffer>,
2243 position: language::Anchor,
2244 trigger: PredictEditsRequestTrigger,
2245 cx: &mut Context<Self>,
2246 ) -> Task<Result<Option<EditPredictionResult>>> {
2247 self.request_prediction_internal(
2248 project.clone(),
2249 active_buffer.clone(),
2250 position,
2251 trigger,
2252 cx.has_flag::<EditPredictionJumpsFeatureFlag>(),
2253 cx,
2254 )
2255 }
2256
2257 fn request_prediction_internal(
2258 &mut self,
2259 project: Entity<Project>,
2260 active_buffer: Entity<Buffer>,
2261 position: language::Anchor,
2262 trigger: PredictEditsRequestTrigger,
2263 allow_jump: bool,
2264 cx: &mut Context<Self>,
2265 ) -> Task<Result<Option<EditPredictionResult>>> {
2266 self.get_or_init_project(&project, cx);
2267 let project_state = self.projects.get(&project.entity_id()).unwrap();
2268 let stored_events = project_state.events(cx);
2269 let has_events = !stored_events.is_empty();
2270 let events: Vec<Arc<zeta_prompt::Event>> =
2271 stored_events.iter().map(|e| e.event.clone()).collect();
2272 let debug_tx = project_state.debug_tx.clone();
2273
2274 let snapshot = active_buffer.read(cx).snapshot();
2275 let cursor_point = position.to_point(&snapshot);
2276 let diagnostic_search_start = cursor_point.row.saturating_sub(DIAGNOSTIC_LINES_RANGE);
2277 let diagnostic_search_end = cursor_point.row + DIAGNOSTIC_LINES_RANGE;
2278 let diagnostic_search_range =
2279 Point::new(diagnostic_search_start, 0)..Point::new(diagnostic_search_end, 0);
2280
2281 let related_files = self.context_for_project(&project, cx);
2282
2283 let is_open_source = snapshot
2284 .file()
2285 .map_or(false, |file| self.is_file_open_source(&project, file, cx))
2286 && events.iter().all(|event| event.in_open_source_repo())
2287 && related_files.iter().all(|file| file.in_open_source_repo);
2288
2289 let can_collect_data = !cfg!(test)
2290 && is_open_source
2291 && self.is_data_collection_enabled(cx)
2292 && matches!(self.edit_prediction_model, EditPredictionModel::Zeta);
2293
2294 let inputs = EditPredictionModelInput {
2295 project: project.clone(),
2296 buffer: active_buffer,
2297 snapshot,
2298 position,
2299 events,
2300 related_files,
2301 trigger,
2302 diagnostic_search_range: diagnostic_search_range,
2303 debug_tx,
2304 can_collect_data,
2305 is_open_source,
2306 };
2307
2308 let capture_data = (can_collect_data && rand::random_ratio(1, 1000)).then(|| stored_events);
2309
2310 let task = match self.edit_prediction_model {
2311 EditPredictionModel::Zeta => {
2312 zeta::request_prediction_with_zeta(self, inputs, capture_data, cx)
2313 }
2314 EditPredictionModel::Fim { format } => fim::request_prediction(inputs, format, cx),
2315 EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx),
2316 };
2317
2318 cx.spawn(async move |this, cx| {
2319 let prediction = task.await?;
2320
2321 // Only fall back to diagnostics-based prediction if we got a
2322 // the model had nothing to suggest for the buffer
2323 if prediction.is_none()
2324 && allow_jump
2325 && has_events
2326 && !matches!(trigger, PredictEditsRequestTrigger::Diagnostics)
2327 {
2328 this.update(cx, |this, cx| {
2329 this.refresh_prediction_from_diagnostics(
2330 project,
2331 DiagnosticSearchScope::Local,
2332 cx,
2333 );
2334 })?;
2335 return anyhow::Ok(None);
2336 }
2337
2338 Ok(prediction)
2339 })
2340 }
2341
2342 pub(crate) async fn next_diagnostic_location(
2343 active_buffer: Entity<Buffer>,
2344 active_buffer_snapshot: &BufferSnapshot,
2345 active_buffer_diagnostic_search_range: Range<Point>,
2346 active_buffer_cursor_point: Point,
2347 project: &Entity<Project>,
2348 cx: &mut AsyncApp,
2349 ) -> Result<Option<(Entity<Buffer>, language::Anchor)>> {
2350 let collaborator_cursor_rows: Vec<u32> = active_buffer_snapshot
2351 .selections_in_range(Anchor::MIN..Anchor::MAX, false)
2352 .flat_map(|(_, _, _, selections)| {
2353 selections.map(|s| s.head().to_point(active_buffer_snapshot).row)
2354 })
2355 .collect();
2356
2357 let mut jump_location = active_buffer_snapshot
2358 .diagnostic_groups(None)
2359 .into_iter()
2360 .filter_map(|(_, group)| {
2361 let range = &group.entries[group.primary_ix]
2362 .range
2363 .to_point(&active_buffer_snapshot);
2364 if range.overlaps(&active_buffer_diagnostic_search_range) {
2365 return None;
2366 }
2367 let near_collaborator = collaborator_cursor_rows.iter().any(|&collab_row| {
2368 range.start.row.abs_diff(collab_row) <= DIAGNOSTIC_LINES_RANGE
2369 });
2370 let near_local = active_buffer_cursor_point.row.abs_diff(range.start.row)
2371 <= DIAGNOSTIC_LINES_RANGE;
2372 if near_collaborator && !near_local {
2373 return None;
2374 }
2375 Some(range.start)
2376 })
2377 .min_by_key(|probe| probe.row.abs_diff(active_buffer_cursor_point.row))
2378 .map(|position| {
2379 (
2380 active_buffer.clone(),
2381 active_buffer_snapshot.anchor_before(position),
2382 )
2383 });
2384
2385 if jump_location.is_none() {
2386 let active_buffer_path = active_buffer.read_with(cx, |buffer, cx| {
2387 let file = buffer.file()?;
2388
2389 Some(ProjectPath {
2390 worktree_id: file.worktree_id(cx),
2391 path: file.path().clone(),
2392 })
2393 });
2394
2395 let mut candidates: Vec<(ProjectPath, usize)> = project.read_with(cx, |project, cx| {
2396 project
2397 .diagnostic_summaries(false, cx)
2398 .filter(|(path, _, _)| Some(path) != active_buffer_path.as_ref())
2399 .map(|(path, _, _)| {
2400 let shared_prefix = path
2401 .path
2402 .components()
2403 .zip(
2404 active_buffer_path
2405 .as_ref()
2406 .map(|p| p.path.components())
2407 .unwrap_or_default(),
2408 )
2409 .take_while(|(a, b)| a == b)
2410 .count();
2411 (path, shared_prefix)
2412 })
2413 .collect()
2414 });
2415
2416 candidates.sort_by(|a, b| b.1.cmp(&a.1));
2417
2418 for (path, _) in candidates {
2419 let candidate_buffer = project
2420 .update(cx, |project, cx| project.open_buffer(path, cx))
2421 .await?;
2422
2423 let (has_collaborators, diagnostic_position) =
2424 candidate_buffer.read_with(cx, |buffer, _cx| {
2425 let snapshot = buffer.snapshot();
2426 let has_collaborators = snapshot
2427 .selections_in_range(Anchor::MIN..Anchor::MAX, false)
2428 .next()
2429 .is_some();
2430 let position = buffer
2431 .buffer_diagnostics(None)
2432 .into_iter()
2433 .min_by_key(|entry| entry.diagnostic.severity)
2434 .map(|entry| entry.range.start);
2435 (has_collaborators, position)
2436 });
2437
2438 if has_collaborators {
2439 continue;
2440 }
2441
2442 if let Some(position) = diagnostic_position {
2443 jump_location = Some((candidate_buffer, position));
2444 break;
2445 }
2446 }
2447 }
2448
2449 anyhow::Ok(jump_location)
2450 }
2451
2452 async fn send_raw_llm_request(
2453 request: RawCompletionRequest,
2454 client: Arc<Client>,
2455 custom_url: Option<Arc<Url>>,
2456 llm_token: LlmApiToken,
2457 organization_id: Option<OrganizationId>,
2458 app_version: Version,
2459 ) -> Result<(RawCompletionResponse, Option<EditPredictionUsage>)> {
2460 let url = if let Some(custom_url) = custom_url {
2461 custom_url.as_ref().clone()
2462 } else {
2463 client
2464 .http_client()
2465 .build_zed_llm_url("/predict_edits/raw", &[])?
2466 };
2467
2468 Self::send_api_request(
2469 |builder| {
2470 let req = builder
2471 .uri(url.as_ref())
2472 .body(serde_json::to_string(&request)?.into());
2473 Ok(req?)
2474 },
2475 client,
2476 llm_token,
2477 organization_id,
2478 app_version,
2479 true,
2480 )
2481 .await
2482 }
2483
2484 pub(crate) async fn send_v3_request(
2485 input: ZetaPromptInput,
2486 client: Arc<Client>,
2487 llm_token: LlmApiToken,
2488 organization_id: Option<OrganizationId>,
2489 app_version: Version,
2490 trigger: PredictEditsRequestTrigger,
2491 ) -> Result<(PredictEditsV3Response, Option<EditPredictionUsage>)> {
2492 let url = client
2493 .http_client()
2494 .build_zed_llm_url("/predict_edits/v3", &[])?;
2495
2496 let request = PredictEditsV3Request { input, trigger };
2497
2498 let json_bytes = serde_json::to_vec(&request)?;
2499 let compressed = zstd::encode_all(&json_bytes[..], 3)?;
2500
2501 Self::send_api_request(
2502 |builder| {
2503 let req = builder
2504 .uri(url.as_ref())
2505 .header("Content-Encoding", "zstd")
2506 .body(compressed.clone().into());
2507 Ok(req?)
2508 },
2509 client,
2510 llm_token,
2511 organization_id,
2512 app_version,
2513 true,
2514 )
2515 .await
2516 }
2517
2518 async fn send_api_request<Res>(
2519 build: impl Fn(http_client::http::request::Builder) -> Result<http_client::Request<AsyncBody>>,
2520 client: Arc<Client>,
2521 llm_token: LlmApiToken,
2522 organization_id: Option<OrganizationId>,
2523 app_version: Version,
2524 require_auth: bool,
2525 ) -> Result<(Res, Option<EditPredictionUsage>)>
2526 where
2527 Res: DeserializeOwned,
2528 {
2529 let http_client = client.http_client();
2530
2531 let mut token = if require_auth {
2532 Some(llm_token.acquire(&client, organization_id.clone()).await?)
2533 } else {
2534 llm_token
2535 .acquire(&client, organization_id.clone())
2536 .await
2537 .ok()
2538 };
2539 let mut did_retry = false;
2540
2541 loop {
2542 let request_builder = http_client::Request::builder().method(Method::POST);
2543
2544 let mut request_builder = request_builder
2545 .header("Content-Type", "application/json")
2546 .header(ZED_VERSION_HEADER_NAME, app_version.to_string());
2547
2548 // Only add Authorization header if we have a token
2549 if let Some(ref token_value) = token {
2550 request_builder =
2551 request_builder.header("Authorization", format!("Bearer {}", token_value));
2552 }
2553
2554 let request = build(request_builder)?;
2555
2556 let mut response = http_client.send(request).await?;
2557
2558 if let Some(minimum_required_version) = response
2559 .headers()
2560 .get(MINIMUM_REQUIRED_VERSION_HEADER_NAME)
2561 .and_then(|version| Version::from_str(version.to_str().ok()?).ok())
2562 {
2563 anyhow::ensure!(
2564 app_version >= minimum_required_version,
2565 ZedUpdateRequiredError {
2566 minimum_version: minimum_required_version
2567 }
2568 );
2569 }
2570
2571 if response.status().is_success() {
2572 let usage = EditPredictionUsage::from_headers(response.headers()).ok();
2573
2574 let mut body = Vec::new();
2575 response.body_mut().read_to_end(&mut body).await?;
2576 return Ok((serde_json::from_slice(&body)?, usage));
2577 } else if !did_retry && token.is_some() && response.needs_llm_token_refresh() {
2578 did_retry = true;
2579 token = Some(llm_token.refresh(&client, organization_id.clone()).await?);
2580 } else {
2581 let mut body = String::new();
2582 response.body_mut().read_to_string(&mut body).await?;
2583 anyhow::bail!(
2584 "Request failed with status: {:?}\nBody: {}",
2585 response.status(),
2586 body
2587 );
2588 }
2589 }
2590 }
2591
2592 pub fn refresh_context(
2593 &mut self,
2594 project: &Entity<Project>,
2595 buffer: &Entity<language::Buffer>,
2596 cursor_position: language::Anchor,
2597 cx: &mut Context<Self>,
2598 ) {
2599 self.get_or_init_project(project, cx)
2600 .context
2601 .update(cx, |store, cx| {
2602 store.refresh(buffer.clone(), cursor_position, cx);
2603 });
2604 }
2605
2606 #[cfg(feature = "cli-support")]
2607 pub fn set_context_for_buffer(
2608 &mut self,
2609 project: &Entity<Project>,
2610 related_files: Vec<RelatedFile>,
2611 cx: &mut Context<Self>,
2612 ) {
2613 self.get_or_init_project(project, cx)
2614 .context
2615 .update(cx, |store, cx| {
2616 store.set_related_files(related_files, cx);
2617 });
2618 }
2619
2620 #[cfg(feature = "cli-support")]
2621 pub fn set_recent_paths_for_project(
2622 &mut self,
2623 project: &Entity<Project>,
2624 paths: impl IntoIterator<Item = project::ProjectPath>,
2625 cx: &mut Context<Self>,
2626 ) {
2627 let project_state = self.get_or_init_project(project, cx);
2628 project_state.recent_paths = paths.into_iter().collect();
2629 }
2630
2631 fn is_file_open_source(
2632 &self,
2633 project: &Entity<Project>,
2634 file: &Arc<dyn File>,
2635 cx: &App,
2636 ) -> bool {
2637 if !file.is_local() || file.is_private() {
2638 return false;
2639 }
2640 let Some(project_state) = self.projects.get(&project.entity_id()) else {
2641 return false;
2642 };
2643 project_state
2644 .license_detection_watchers
2645 .get(&file.worktree_id(cx))
2646 .as_ref()
2647 .is_some_and(|watcher| watcher.is_project_open_source())
2648 }
2649
2650 pub(crate) fn is_data_collection_enabled(&self, cx: &App) -> bool {
2651 self.data_collection_choice.is_enabled(cx)
2652 }
2653
2654 fn load_data_collection_choice(cx: &App) -> DataCollectionChoice {
2655 let choice = KeyValueStore::global(cx)
2656 .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE)
2657 .log_err()
2658 .flatten();
2659
2660 match choice.as_deref() {
2661 Some("true") => DataCollectionChoice::Enabled,
2662 Some("false") => DataCollectionChoice::Disabled,
2663 Some(_) => {
2664 log::error!("unknown value in '{ZED_PREDICT_DATA_COLLECTION_CHOICE}'");
2665 DataCollectionChoice::NotAnswered
2666 }
2667 None => DataCollectionChoice::NotAnswered,
2668 }
2669 }
2670
2671 fn toggle_data_collection_choice(&mut self, cx: &mut Context<Self>) {
2672 self.data_collection_choice = self.data_collection_choice.toggle();
2673 let new_choice = self.data_collection_choice;
2674 let is_enabled = new_choice.is_enabled(cx);
2675 let kvp = KeyValueStore::global(cx);
2676 db::write_and_log(cx, move || async move {
2677 kvp.write_kvp(
2678 ZED_PREDICT_DATA_COLLECTION_CHOICE.into(),
2679 is_enabled.to_string(),
2680 )
2681 .await
2682 });
2683 }
2684
2685 pub fn shown_predictions(&self) -> impl DoubleEndedIterator<Item = &EditPrediction> {
2686 self.shown_predictions.iter()
2687 }
2688
2689 pub fn shown_completions_len(&self) -> usize {
2690 self.shown_predictions.len()
2691 }
2692
2693 pub fn is_prediction_rated(&self, id: &EditPredictionId) -> bool {
2694 self.rated_predictions.contains(id)
2695 }
2696
2697 pub fn rate_prediction(
2698 &mut self,
2699 prediction: &EditPrediction,
2700 rating: EditPredictionRating,
2701 feedback: String,
2702 cx: &mut Context<Self>,
2703 ) {
2704 let organization = self.user_store.read(cx).current_organization();
2705
2706 self.rated_predictions.insert(prediction.id.clone());
2707
2708 cx.background_spawn({
2709 let client = self.client.clone();
2710 let prediction_id = prediction.id.to_string();
2711 let inputs = serde_json::to_value(&prediction.inputs);
2712 let output = prediction
2713 .edit_preview
2714 .as_unified_diff(prediction.snapshot.file(), &prediction.edits);
2715 async move {
2716 client
2717 .cloud_client()
2718 .submit_edit_prediction_feedback(SubmitEditPredictionFeedbackBody {
2719 organization_id: organization.map(|organization| organization.id.clone()),
2720 request_id: prediction_id,
2721 rating: match rating {
2722 EditPredictionRating::Positive => "positive".to_string(),
2723 EditPredictionRating::Negative => "negative".to_string(),
2724 },
2725 inputs: inputs?,
2726 output,
2727 feedback,
2728 })
2729 .await?;
2730
2731 anyhow::Ok(())
2732 }
2733 })
2734 .detach_and_log_err(cx);
2735
2736 cx.notify();
2737 }
2738}
2739
2740fn collaborator_edit_overlaps_locality_region(
2741 project_state: &ProjectState,
2742 project: &Entity<Project>,
2743 buffer: &Entity<Buffer>,
2744 snapshot: &BufferSnapshot,
2745 edit_range: &Range<Anchor>,
2746 cx: &App,
2747) -> bool {
2748 let Some((active_buffer, Some(position))) = project_state.active_buffer(project, cx) else {
2749 return false;
2750 };
2751
2752 if active_buffer.entity_id() != buffer.entity_id() {
2753 return false;
2754 }
2755
2756 let locality_point_range = expand_context_syntactically_then_linewise(
2757 snapshot,
2758 (position..position).to_point(snapshot),
2759 COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS,
2760 );
2761 let locality_anchor_range = snapshot.anchor_range_around(locality_point_range);
2762
2763 edit_range.overlaps(&locality_anchor_range, snapshot)
2764}
2765
2766fn merge_trailing_events_if_needed(
2767 events: &mut VecDeque<StoredEvent>,
2768 end_snapshot: &TextBufferSnapshot,
2769 latest_snapshot: &TextBufferSnapshot,
2770 latest_edit_range: &Range<Anchor>,
2771) {
2772 if let Some(last_event) = events.back() {
2773 if last_event.old_snapshot.remote_id() != latest_snapshot.remote_id() {
2774 return;
2775 }
2776 if !latest_snapshot
2777 .version
2778 .observed_all(&last_event.new_snapshot_version)
2779 {
2780 return;
2781 }
2782 }
2783
2784 let mut next_old_event = None;
2785 let mut mergeable_count = 0;
2786 for old_event in events.iter().rev() {
2787 if let Some(next_old_event) = next_old_event
2788 && !old_event.can_merge(next_old_event, latest_snapshot, latest_edit_range)
2789 {
2790 break;
2791 }
2792 mergeable_count += 1;
2793 next_old_event = Some(old_event);
2794 }
2795
2796 if mergeable_count <= 1 {
2797 return;
2798 }
2799
2800 let mut events_to_merge = events.range(events.len() - mergeable_count..).peekable();
2801 let oldest_event = events_to_merge.peek().unwrap();
2802 let oldest_snapshot = oldest_event.old_snapshot.clone();
2803 let newest_snapshot = end_snapshot;
2804 let mut merged_edit_range = oldest_event.total_edit_range.clone();
2805
2806 for event in events.range(events.len() - mergeable_count + 1..) {
2807 merged_edit_range =
2808 merge_anchor_ranges(&merged_edit_range, &event.total_edit_range, latest_snapshot);
2809 }
2810
2811 if let Some((diff, edit_range)) = compute_diff_between_snapshots_in_range(
2812 &oldest_snapshot,
2813 newest_snapshot,
2814 &merged_edit_range,
2815 ) {
2816 let merged_event = match oldest_event.event.as_ref() {
2817 zeta_prompt::Event::BufferChange {
2818 old_path,
2819 path,
2820 in_open_source_repo,
2821 ..
2822 } => StoredEvent {
2823 event: Arc::new(zeta_prompt::Event::BufferChange {
2824 old_path: old_path.clone(),
2825 path: path.clone(),
2826 diff,
2827 in_open_source_repo: *in_open_source_repo,
2828 predicted: events_to_merge.all(|e| {
2829 matches!(
2830 e.event.as_ref(),
2831 zeta_prompt::Event::BufferChange {
2832 predicted: true,
2833 ..
2834 }
2835 )
2836 }),
2837 }),
2838 old_snapshot: oldest_snapshot.clone(),
2839 new_snapshot_version: newest_snapshot.version.clone(),
2840 total_edit_range: newest_snapshot.anchor_before(edit_range.start)
2841 ..newest_snapshot.anchor_before(edit_range.end),
2842 },
2843 };
2844 events.truncate(events.len() - mergeable_count);
2845 events.push_back(merged_event);
2846 }
2847}
2848
2849fn merge_anchor_ranges(
2850 left: &Range<Anchor>,
2851 right: &Range<Anchor>,
2852 snapshot: &TextBufferSnapshot,
2853) -> Range<Anchor> {
2854 let start = if left.start.cmp(&right.start, snapshot).is_le() {
2855 left.start
2856 } else {
2857 right.start
2858 };
2859 let end = if left.end.cmp(&right.end, snapshot).is_ge() {
2860 left.end
2861 } else {
2862 right.end
2863 };
2864 start..end
2865}
2866
2867#[derive(Error, Debug)]
2868#[error(
2869 "You must update to Zed version {minimum_version} or higher to continue using edit predictions."
2870)]
2871pub struct ZedUpdateRequiredError {
2872 minimum_version: Version,
2873}
2874
2875#[derive(Debug, Clone, Copy)]
2876pub enum DataCollectionChoice {
2877 NotAnswered,
2878 Enabled,
2879 Disabled,
2880}
2881
2882impl DataCollectionChoice {
2883 pub fn is_enabled(self, cx: &App) -> bool {
2884 if cx.is_staff() {
2885 return true;
2886 }
2887 match self {
2888 Self::Enabled => true,
2889 Self::NotAnswered | Self::Disabled => false,
2890 }
2891 }
2892
2893 #[must_use]
2894 pub fn toggle(&self) -> DataCollectionChoice {
2895 match self {
2896 Self::Enabled => Self::Disabled,
2897 Self::Disabled => Self::Enabled,
2898 Self::NotAnswered => Self::Enabled,
2899 }
2900 }
2901}
2902
2903impl From<bool> for DataCollectionChoice {
2904 fn from(value: bool) -> Self {
2905 match value {
2906 true => DataCollectionChoice::Enabled,
2907 false => DataCollectionChoice::Disabled,
2908 }
2909 }
2910}
2911
2912struct ZedPredictUpsell;
2913
2914impl Dismissable for ZedPredictUpsell {
2915 const KEY: &'static str = "dismissed-edit-predict-upsell";
2916
2917 fn dismissed(cx: &App) -> bool {
2918 // To make this backwards compatible with older versions of Zed, we
2919 // check if the user has seen the previous Edit Prediction Onboarding
2920 // before, by checking the data collection choice which was written to
2921 // the database once the user clicked on "Accept and Enable"
2922 let kvp = KeyValueStore::global(cx);
2923 if kvp
2924 .read_kvp(ZED_PREDICT_DATA_COLLECTION_CHOICE)
2925 .log_err()
2926 .is_some_and(|s| s.is_some())
2927 {
2928 return true;
2929 }
2930
2931 kvp.read_kvp(Self::KEY)
2932 .log_err()
2933 .is_some_and(|s| s.is_some())
2934 }
2935}
2936
2937pub fn should_show_upsell_modal(cx: &App) -> bool {
2938 !ZedPredictUpsell::dismissed(cx)
2939}
2940
2941pub fn init(cx: &mut App) {
2942 cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
2943 workspace.register_action(
2944 move |workspace, _: &zed_actions::OpenZedPredictOnboarding, window, cx| {
2945 ZedPredictModal::toggle(
2946 workspace,
2947 workspace.user_store().clone(),
2948 workspace.client().clone(),
2949 window,
2950 cx,
2951 )
2952 },
2953 );
2954
2955 workspace.register_action(|workspace, _: &ResetOnboarding, _window, cx| {
2956 update_settings_file(workspace.app_state().fs.clone(), cx, move |settings, _| {
2957 settings
2958 .project
2959 .all_languages
2960 .edit_predictions
2961 .get_or_insert_default()
2962 .provider = Some(EditPredictionProvider::None)
2963 });
2964 });
2965 fn copilot_for_project(project: &Entity<Project>, cx: &mut App) -> Option<Entity<Copilot>> {
2966 EditPredictionStore::try_global(cx).and_then(|store| {
2967 store.update(cx, |this, cx| this.start_copilot_for_project(project, cx))
2968 })
2969 }
2970
2971 workspace.register_action(|workspace, _: &SignIn, window, cx| {
2972 if let Some(copilot) = copilot_for_project(workspace.project(), cx) {
2973 copilot_ui::initiate_sign_in(copilot, window, cx);
2974 }
2975 });
2976 workspace.register_action(|workspace, _: &Reinstall, window, cx| {
2977 if let Some(copilot) = copilot_for_project(workspace.project(), cx) {
2978 copilot_ui::reinstall_and_sign_in(copilot, window, cx);
2979 }
2980 });
2981 workspace.register_action(|workspace, _: &SignOut, window, cx| {
2982 if let Some(copilot) = copilot_for_project(workspace.project(), cx) {
2983 copilot_ui::initiate_sign_out(copilot, window, cx);
2984 }
2985 });
2986 })
2987 .detach();
2988}