1use crate::{
2 DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId, EditPredictionModelInput,
3 EditPredictionStartedDebugEvent, open_ai_response::text_from_response,
4 prediction::EditPredictionResult, zeta1::compute_edits,
5};
6use anyhow::{Context as _, Result};
7use futures::AsyncReadExt as _;
8use gpui::{
9 App, AppContext as _, Entity, Global, SharedString, Task,
10 http_client::{self, AsyncBody, Method},
11};
12use language::{OffsetRangeExt as _, ToOffset, ToPoint as _};
13use language_model::{ApiKeyState, EnvVar, env_var};
14use std::{mem, ops::Range, path::Path, sync::Arc, time::Instant};
15use zeta_prompt::ZetaPromptInput;
16
17const MERCURY_API_URL: &str = "https://api.inceptionlabs.ai/v1/edit/completions";
18const MAX_REWRITE_TOKENS: usize = 150;
19const MAX_CONTEXT_TOKENS: usize = 350;
20
21pub struct Mercury {
22 pub api_token: Entity<ApiKeyState>,
23}
24
25impl Mercury {
26 pub fn new(cx: &mut App) -> Self {
27 Mercury {
28 api_token: mercury_api_token(cx),
29 }
30 }
31
32 pub(crate) fn request_prediction(
33 &self,
34 EditPredictionModelInput {
35 buffer,
36 snapshot,
37 position,
38 events,
39 related_files,
40 debug_tx,
41 ..
42 }: EditPredictionModelInput,
43 cx: &mut App,
44 ) -> Task<Result<Option<EditPredictionResult>>> {
45 self.api_token.update(cx, |key_state, cx| {
46 _ = key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx);
47 });
48 let Some(api_token) = self.api_token.read(cx).key(&MERCURY_CREDENTIALS_URL) else {
49 return Task::ready(Ok(None));
50 };
51 let full_path: Arc<Path> = snapshot
52 .file()
53 .map(|file| file.full_path(cx))
54 .unwrap_or_else(|| "untitled".into())
55 .into();
56
57 let http_client = cx.http_client();
58 let cursor_point = position.to_point(&snapshot);
59 let buffer_snapshotted_at = Instant::now();
60 let active_buffer = buffer.clone();
61
62 let result = cx.background_spawn(async move {
63 let (editable_range, context_range) =
64 crate::cursor_excerpt::editable_and_context_ranges_for_cursor_position(
65 cursor_point,
66 &snapshot,
67 MAX_CONTEXT_TOKENS,
68 MAX_REWRITE_TOKENS,
69 );
70
71 let related_files = crate::filter_redundant_excerpts(
72 related_files,
73 full_path.as_ref(),
74 context_range.start.row..context_range.end.row,
75 );
76
77 let context_offset_range = context_range.to_offset(&snapshot);
78 let context_start_row = context_range.start.row;
79
80 let editable_offset_range = editable_range.to_offset(&snapshot);
81
82 let inputs = zeta_prompt::ZetaPromptInput {
83 events,
84 related_files,
85 cursor_offset_in_excerpt: cursor_point.to_offset(&snapshot)
86 - context_offset_range.start,
87 cursor_path: full_path.clone(),
88 cursor_excerpt: snapshot
89 .text_for_range(context_range)
90 .collect::<String>()
91 .into(),
92 editable_range_in_excerpt: (editable_offset_range.start
93 - context_offset_range.start)
94 ..(editable_offset_range.end - context_offset_range.start),
95 excerpt_start_row: Some(context_start_row),
96 };
97
98 let prompt = build_prompt(&inputs);
99
100 if let Some(debug_tx) = &debug_tx {
101 debug_tx
102 .unbounded_send(DebugEvent::EditPredictionStarted(
103 EditPredictionStartedDebugEvent {
104 buffer: active_buffer.downgrade(),
105 prompt: Some(prompt.clone()),
106 position,
107 },
108 ))
109 .ok();
110 }
111
112 let request_body = open_ai::Request {
113 model: "mercury-coder".into(),
114 messages: vec![open_ai::RequestMessage::User {
115 content: open_ai::MessageContent::Plain(prompt),
116 }],
117 stream: false,
118 max_completion_tokens: None,
119 stop: vec![],
120 temperature: None,
121 tool_choice: None,
122 parallel_tool_calls: None,
123 tools: vec![],
124 prompt_cache_key: None,
125 reasoning_effort: None,
126 };
127
128 let buf = serde_json::to_vec(&request_body)?;
129 let body: AsyncBody = buf.into();
130
131 let request = http_client::Request::builder()
132 .uri(MERCURY_API_URL)
133 .header("Content-Type", "application/json")
134 .header("Authorization", format!("Bearer {}", api_token))
135 .header("Connection", "keep-alive")
136 .method(Method::POST)
137 .body(body)
138 .context("Failed to create request")?;
139
140 let mut response = http_client
141 .send(request)
142 .await
143 .context("Failed to send request")?;
144
145 let mut body: Vec<u8> = Vec::new();
146 response
147 .body_mut()
148 .read_to_end(&mut body)
149 .await
150 .context("Failed to read response body")?;
151
152 let response_received_at = Instant::now();
153 if !response.status().is_success() {
154 anyhow::bail!(
155 "Request failed with status: {:?}\nBody: {}",
156 response.status(),
157 String::from_utf8_lossy(&body),
158 );
159 };
160
161 let mut response: open_ai::Response =
162 serde_json::from_slice(&body).context("Failed to parse response")?;
163
164 let id = mem::take(&mut response.id);
165 let response_str = text_from_response(response).unwrap_or_default();
166
167 if let Some(debug_tx) = &debug_tx {
168 debug_tx
169 .unbounded_send(DebugEvent::EditPredictionFinished(
170 EditPredictionFinishedDebugEvent {
171 buffer: active_buffer.downgrade(),
172 model_output: Some(response_str.clone()),
173 position,
174 },
175 ))
176 .ok();
177 }
178
179 let response_str = response_str.strip_prefix("```\n").unwrap_or(&response_str);
180 let response_str = response_str.strip_suffix("\n```").unwrap_or(&response_str);
181
182 let mut edits = Vec::new();
183 const NO_PREDICTION_OUTPUT: &str = "None";
184
185 if response_str != NO_PREDICTION_OUTPUT {
186 let old_text = snapshot
187 .text_for_range(editable_offset_range.clone())
188 .collect::<String>();
189 edits = compute_edits(
190 old_text,
191 &response_str,
192 editable_offset_range.start,
193 &snapshot,
194 );
195 }
196
197 anyhow::Ok((id, edits, snapshot, response_received_at, inputs))
198 });
199
200 cx.spawn(async move |cx| {
201 let (id, edits, old_snapshot, response_received_at, inputs) =
202 result.await.context("Mercury edit prediction failed")?;
203 anyhow::Ok(Some(
204 EditPredictionResult::new(
205 EditPredictionId(id.into()),
206 &buffer,
207 &old_snapshot,
208 edits.into(),
209 None,
210 buffer_snapshotted_at,
211 response_received_at,
212 inputs,
213 cx,
214 )
215 .await,
216 ))
217 })
218 }
219}
220
221fn build_prompt(inputs: &ZetaPromptInput) -> String {
222 const RECENTLY_VIEWED_SNIPPETS_START: &str = "<|recently_viewed_code_snippets|>\n";
223 const RECENTLY_VIEWED_SNIPPETS_END: &str = "<|/recently_viewed_code_snippets|>\n";
224 const RECENTLY_VIEWED_SNIPPET_START: &str = "<|recently_viewed_code_snippet|>\n";
225 const RECENTLY_VIEWED_SNIPPET_END: &str = "<|/recently_viewed_code_snippet|>\n";
226 const CURRENT_FILE_CONTENT_START: &str = "<|current_file_content|>\n";
227 const CURRENT_FILE_CONTENT_END: &str = "<|/current_file_content|>\n";
228 const CODE_TO_EDIT_START: &str = "<|code_to_edit|>\n";
229 const CODE_TO_EDIT_END: &str = "<|/code_to_edit|>\n";
230 const EDIT_DIFF_HISTORY_START: &str = "<|edit_diff_history|>\n";
231 const EDIT_DIFF_HISTORY_END: &str = "<|/edit_diff_history|>\n";
232 const CURSOR_TAG: &str = "<|cursor|>";
233 const CODE_SNIPPET_FILE_PATH_PREFIX: &str = "code_snippet_file_path: ";
234 const CURRENT_FILE_PATH_PREFIX: &str = "current_file_path: ";
235
236 let mut prompt = String::new();
237
238 push_delimited(
239 &mut prompt,
240 RECENTLY_VIEWED_SNIPPETS_START..RECENTLY_VIEWED_SNIPPETS_END,
241 |prompt| {
242 for related_file in inputs.related_files.iter() {
243 for related_excerpt in &related_file.excerpts {
244 push_delimited(
245 prompt,
246 RECENTLY_VIEWED_SNIPPET_START..RECENTLY_VIEWED_SNIPPET_END,
247 |prompt| {
248 prompt.push_str(CODE_SNIPPET_FILE_PATH_PREFIX);
249 prompt.push_str(related_file.path.to_string_lossy().as_ref());
250 prompt.push('\n');
251 prompt.push_str(related_excerpt.text.as_ref());
252 },
253 );
254 }
255 }
256 },
257 );
258
259 push_delimited(
260 &mut prompt,
261 CURRENT_FILE_CONTENT_START..CURRENT_FILE_CONTENT_END,
262 |prompt| {
263 prompt.push_str(CURRENT_FILE_PATH_PREFIX);
264 prompt.push_str(inputs.cursor_path.as_os_str().to_string_lossy().as_ref());
265 prompt.push('\n');
266
267 prompt.push_str(&inputs.cursor_excerpt[0..inputs.editable_range_in_excerpt.start]);
268 push_delimited(prompt, CODE_TO_EDIT_START..CODE_TO_EDIT_END, |prompt| {
269 prompt.push_str(
270 &inputs.cursor_excerpt
271 [inputs.editable_range_in_excerpt.start..inputs.cursor_offset_in_excerpt],
272 );
273 prompt.push_str(CURSOR_TAG);
274 prompt.push_str(
275 &inputs.cursor_excerpt
276 [inputs.cursor_offset_in_excerpt..inputs.editable_range_in_excerpt.end],
277 );
278 });
279 prompt.push_str(&inputs.cursor_excerpt[inputs.editable_range_in_excerpt.end..]);
280 },
281 );
282
283 push_delimited(
284 &mut prompt,
285 EDIT_DIFF_HISTORY_START..EDIT_DIFF_HISTORY_END,
286 |prompt| {
287 for event in inputs.events.iter() {
288 zeta_prompt::write_event(prompt, &event);
289 }
290 },
291 );
292
293 prompt
294}
295
296fn push_delimited(prompt: &mut String, delimiters: Range<&str>, cb: impl FnOnce(&mut String)) {
297 prompt.push_str(delimiters.start);
298 cb(prompt);
299 prompt.push('\n');
300 prompt.push_str(delimiters.end);
301}
302
303pub const MERCURY_CREDENTIALS_URL: SharedString =
304 SharedString::new_static("https://api.inceptionlabs.ai/v1/edit/completions");
305pub const MERCURY_CREDENTIALS_USERNAME: &str = "mercury-api-token";
306pub static MERCURY_TOKEN_ENV_VAR: std::sync::LazyLock<EnvVar> = env_var!("MERCURY_AI_TOKEN");
307
308struct GlobalMercuryApiKey(Entity<ApiKeyState>);
309
310impl Global for GlobalMercuryApiKey {}
311
312pub fn mercury_api_token(cx: &mut App) -> Entity<ApiKeyState> {
313 if let Some(global) = cx.try_global::<GlobalMercuryApiKey>() {
314 return global.0.clone();
315 }
316 let entity =
317 cx.new(|_| ApiKeyState::new(MERCURY_CREDENTIALS_URL, MERCURY_TOKEN_ENV_VAR.clone()));
318 cx.set_global(GlobalMercuryApiKey(entity.clone()));
319 entity
320}
321
322pub fn load_mercury_api_token(cx: &mut App) -> Task<Result<(), language_model::AuthenticateError>> {
323 mercury_api_token(cx).update(cx, |key_state, cx| {
324 key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx)
325 })
326}