1use crate::cursor_excerpt::{compute_excerpt_ranges, excerpt_ranges_to_byte_offsets};
2use crate::prediction::EditPredictionResult;
3use crate::zeta1::compute_edits_and_cursor_position;
4use crate::{
5 CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId,
6 EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore,
7};
8use anyhow::Result;
9use cloud_llm_client::predict_edits_v3::RawCompletionRequest;
10use cloud_llm_client::{AcceptEditPredictionBody, EditPredictionRejectReason};
11use gpui::{App, Task, prelude::*};
12use language::{OffsetRangeExt as _, ToOffset as _, ToPoint};
13use release_channel::AppVersion;
14
15use std::env;
16use std::{path::Path, sync::Arc, time::Instant};
17use zeta_prompt::{
18 CURSOR_MARKER, EditPredictionModelKind, ZetaFormat, clean_zeta2_model_output,
19 format_zeta_prompt, get_prefill, prompt_input_contains_special_tokens,
20};
21
22pub const MAX_CONTEXT_TOKENS: usize = 350;
23
24pub fn max_editable_tokens(format: ZetaFormat) -> usize {
25 match format {
26 ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered => 150,
27 ZetaFormat::V0114180EditableRegion => 180,
28 ZetaFormat::V0120GitMergeMarkers => 180,
29 ZetaFormat::V0131GitMergeMarkersPrefix => 180,
30 ZetaFormat::V0211Prefill => 180,
31 ZetaFormat::V0211SeedCoder => 180,
32 }
33}
34
35pub fn request_prediction_with_zeta2(
36 store: &mut EditPredictionStore,
37 EditPredictionModelInput {
38 buffer,
39 snapshot,
40 position,
41 related_files,
42 events,
43 debug_tx,
44 trigger,
45 project,
46 ..
47 }: EditPredictionModelInput,
48 preferred_model: Option<EditPredictionModelKind>,
49 cx: &mut Context<EditPredictionStore>,
50) -> Task<Result<Option<EditPredictionResult>>> {
51 let buffer_snapshotted_at = Instant::now();
52 let raw_config = store.zeta2_raw_config().cloned();
53
54 let excerpt_path: Arc<Path> = snapshot
55 .file()
56 .map(|file| -> Arc<Path> { file.full_path(cx).into() })
57 .unwrap_or_else(|| Arc::from(Path::new("untitled")));
58
59 let client = store.client.clone();
60 let llm_token = store.llm_token.clone();
61 let app_version = AppVersion::global(cx);
62
63 let is_open_source = snapshot
64 .file()
65 .map_or(false, |file| store.is_file_open_source(&project, file, cx))
66 && events.iter().all(|event| event.in_open_source_repo())
67 && related_files.iter().all(|file| file.in_open_source_repo);
68
69 let request_task = cx.background_spawn({
70 async move {
71 let zeta_version = raw_config
72 .as_ref()
73 .map(|config| config.format)
74 .unwrap_or(ZetaFormat::default());
75
76 let cursor_offset = position.to_offset(&snapshot);
77 let (editable_offset_range, prompt_input) = zeta2_prompt_input(
78 &snapshot,
79 related_files,
80 events,
81 excerpt_path,
82 cursor_offset,
83 zeta_version,
84 preferred_model,
85 is_open_source,
86 );
87
88 if prompt_input_contains_special_tokens(&prompt_input, zeta_version) {
89 return Ok((None, None));
90 }
91
92 if let Some(debug_tx) = &debug_tx {
93 let prompt = format_zeta_prompt(&prompt_input, zeta_version);
94 debug_tx
95 .unbounded_send(DebugEvent::EditPredictionStarted(
96 EditPredictionStartedDebugEvent {
97 buffer: buffer.downgrade(),
98 prompt: Some(prompt),
99 position,
100 },
101 ))
102 .ok();
103 }
104
105 log::trace!("Sending edit prediction request");
106
107 let (request_id, output_text, usage) = if let Some(config) = &raw_config {
108 let prompt = format_zeta_prompt(&prompt_input, config.format);
109 let prefill = get_prefill(&prompt_input, config.format);
110 let prompt = format!("{prompt}{prefill}");
111 let request = RawCompletionRequest {
112 model: config.model_id.clone().unwrap_or_default(),
113 prompt,
114 temperature: None,
115 stop: vec![],
116 max_tokens: Some(2048),
117 environment: Some(config.format.to_string().to_lowercase()),
118 };
119
120 let (mut response, usage) = EditPredictionStore::send_raw_llm_request(
121 request,
122 client,
123 None,
124 llm_token,
125 app_version,
126 )
127 .await?;
128
129 let request_id = EditPredictionId(response.id.clone().into());
130 let output_text = response.choices.pop().map(|choice| {
131 let response = &choice.text;
132 let output = format!("{prefill}{response}");
133 clean_zeta2_model_output(&output, config.format).to_string()
134 });
135
136 (request_id, output_text, usage)
137 } else {
138 // Use V3 endpoint - server handles model/version selection and suffix stripping
139 let (response, usage) = EditPredictionStore::send_v3_request(
140 prompt_input.clone(),
141 client,
142 llm_token,
143 app_version,
144 trigger,
145 )
146 .await?;
147
148 let request_id = EditPredictionId(response.request_id.into());
149 let output_text = if response.output.is_empty() {
150 None
151 } else {
152 Some(response.output)
153 };
154 (request_id, output_text, usage)
155 };
156
157 let received_response_at = Instant::now();
158
159 log::trace!("Got edit prediction response");
160
161 let Some(mut output_text) = output_text else {
162 return Ok((Some((request_id, None)), usage));
163 };
164
165 // Client-side cursor marker processing (applies to both raw and v3 responses)
166 let cursor_offset_in_output = output_text.find(CURSOR_MARKER);
167 if let Some(offset) = cursor_offset_in_output {
168 log::trace!("Stripping out {CURSOR_MARKER} from response at offset {offset}");
169 output_text.replace_range(offset..offset + CURSOR_MARKER.len(), "");
170 }
171
172 if let Some(debug_tx) = &debug_tx {
173 debug_tx
174 .unbounded_send(DebugEvent::EditPredictionFinished(
175 EditPredictionFinishedDebugEvent {
176 buffer: buffer.downgrade(),
177 position,
178 model_output: Some(output_text.clone()),
179 },
180 ))
181 .ok();
182 }
183
184 let mut old_text = snapshot
185 .text_for_range(editable_offset_range.clone())
186 .collect::<String>();
187
188 if !output_text.is_empty() && !output_text.ends_with('\n') {
189 output_text.push('\n');
190 }
191 if !old_text.is_empty() && !old_text.ends_with('\n') {
192 old_text.push('\n');
193 }
194
195 let (edits, cursor_position) = compute_edits_and_cursor_position(
196 old_text,
197 &output_text,
198 editable_offset_range.start,
199 cursor_offset_in_output,
200 &snapshot,
201 );
202
203 anyhow::Ok((
204 Some((
205 request_id,
206 Some((
207 prompt_input,
208 buffer,
209 snapshot.clone(),
210 edits,
211 cursor_position,
212 received_response_at,
213 )),
214 )),
215 usage,
216 ))
217 }
218 });
219
220 cx.spawn(async move |this, cx| {
221 let Some((id, prediction)) =
222 EditPredictionStore::handle_api_response(&this, request_task.await, cx)?
223 else {
224 return Ok(None);
225 };
226
227 let Some((
228 inputs,
229 edited_buffer,
230 edited_buffer_snapshot,
231 edits,
232 cursor_position,
233 received_response_at,
234 )) = prediction
235 else {
236 return Ok(Some(EditPredictionResult {
237 id,
238 prediction: Err(EditPredictionRejectReason::Empty),
239 }));
240 };
241
242 Ok(Some(
243 EditPredictionResult::new(
244 id,
245 &edited_buffer,
246 &edited_buffer_snapshot,
247 edits.into(),
248 cursor_position,
249 buffer_snapshotted_at,
250 received_response_at,
251 inputs,
252 cx,
253 )
254 .await,
255 ))
256 })
257}
258
259pub fn zeta2_prompt_input(
260 snapshot: &language::BufferSnapshot,
261 related_files: Vec<zeta_prompt::RelatedFile>,
262 events: Vec<Arc<zeta_prompt::Event>>,
263 excerpt_path: Arc<Path>,
264 cursor_offset: usize,
265 zeta_format: ZetaFormat,
266 preferred_model: Option<EditPredictionModelKind>,
267 is_open_source: bool,
268) -> (std::ops::Range<usize>, zeta_prompt::ZetaPromptInput) {
269 let cursor_point = cursor_offset.to_point(snapshot);
270
271 let (full_context, range_points) = compute_excerpt_ranges(cursor_point, snapshot);
272
273 let related_files = crate::filter_redundant_excerpts(
274 related_files,
275 excerpt_path.as_ref(),
276 full_context.start.row..full_context.end.row,
277 );
278
279 let full_context_start_offset = full_context.start.to_offset(snapshot);
280 let full_context_start_row = full_context.start.row;
281
282 let excerpt_ranges =
283 excerpt_ranges_to_byte_offsets(&range_points, full_context_start_offset, snapshot);
284
285 let editable_range = match preferred_model {
286 Some(EditPredictionModelKind::Zeta1) => &range_points.editable_350,
287 _ => match zeta_format {
288 ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered => &range_points.editable_150,
289 _ => &range_points.editable_180,
290 },
291 };
292
293 let editable_offset_range = editable_range.to_offset(snapshot);
294 let cursor_offset_in_excerpt = cursor_offset - full_context_start_offset;
295 let editable_range_in_excerpt = (editable_offset_range.start - full_context_start_offset)
296 ..(editable_offset_range.end - full_context_start_offset);
297
298 let prompt_input = zeta_prompt::ZetaPromptInput {
299 cursor_path: excerpt_path,
300 cursor_excerpt: snapshot
301 .text_for_range(full_context)
302 .collect::<String>()
303 .into(),
304 editable_range_in_excerpt,
305 cursor_offset_in_excerpt,
306 excerpt_start_row: Some(full_context_start_row),
307 events,
308 related_files,
309 excerpt_ranges: Some(excerpt_ranges),
310 preferred_model,
311 in_open_source_repo: is_open_source,
312 };
313 (editable_offset_range, prompt_input)
314}
315
316pub(crate) fn edit_prediction_accepted(
317 store: &EditPredictionStore,
318 current_prediction: CurrentEditPrediction,
319 cx: &App,
320) {
321 let custom_accept_url = env::var("ZED_ACCEPT_PREDICTION_URL").ok();
322 if store.zeta2_raw_config().is_some() && custom_accept_url.is_none() {
323 return;
324 }
325
326 let request_id = current_prediction.prediction.id.to_string();
327 let require_auth = custom_accept_url.is_none();
328 let client = store.client.clone();
329 let llm_token = store.llm_token.clone();
330 let app_version = AppVersion::global(cx);
331
332 cx.background_spawn(async move {
333 let url = if let Some(accept_edits_url) = custom_accept_url {
334 gpui::http_client::Url::parse(&accept_edits_url)?
335 } else {
336 client
337 .http_client()
338 .build_zed_llm_url("/predict_edits/accept", &[])?
339 };
340
341 let response = EditPredictionStore::send_api_request::<()>(
342 move |builder| {
343 let req = builder.uri(url.as_ref()).body(
344 serde_json::to_string(&AcceptEditPredictionBody {
345 request_id: request_id.clone(),
346 })?
347 .into(),
348 );
349 Ok(req?)
350 },
351 client,
352 llm_token,
353 app_version,
354 require_auth,
355 )
356 .await;
357
358 response?;
359 anyhow::Ok(())
360 })
361 .detach_and_log_err(cx);
362}