predict.rs

  1use crate::example::{ActualExcerpt, NamedExample};
  2use crate::headless::ZetaCliAppState;
  3use crate::paths::{CACHE_DIR, LATEST_EXAMPLE_RUN_DIR, RUN_DIR, print_run_data_dir};
  4use crate::{
  5    CacheMode, PredictArguments, PredictionOptions, PredictionProvider, PredictionsOutputFormat,
  6};
  7use ::serde::Serialize;
  8use anyhow::{Context, Result, anyhow};
  9use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock};
 10use futures::StreamExt as _;
 11use gpui::{AppContext, AsyncApp, Entity};
 12use project::Project;
 13use project::buffer_store::BufferStoreEvent;
 14use serde::Deserialize;
 15use std::fs;
 16use std::io::{IsTerminal, Write};
 17use std::path::PathBuf;
 18use std::sync::Arc;
 19use std::sync::Mutex;
 20use std::time::{Duration, Instant};
 21use zeta::{EvalCache, EvalCacheEntryKind, EvalCacheKey, Zeta};
 22
 23pub async fn run_predict(
 24    args: PredictArguments,
 25    app_state: &Arc<ZetaCliAppState>,
 26    cx: &mut AsyncApp,
 27) {
 28    let example = NamedExample::load(args.example_path).unwrap();
 29    let project = example.setup_project(app_state, cx).await.unwrap();
 30    let zeta = setup_zeta(args.options.provider, &project, app_state, cx).unwrap();
 31    let _edited_buffers = example.apply_edit_history(&project, cx).await.unwrap();
 32    let result = perform_predict(example, project, zeta, None, args.options, cx)
 33        .await
 34        .unwrap();
 35    result.write(args.format, std::io::stdout()).unwrap();
 36
 37    print_run_data_dir(true, std::io::stdout().is_terminal());
 38}
 39
 40pub fn setup_zeta(
 41    provider: PredictionProvider,
 42    project: &Entity<Project>,
 43    app_state: &Arc<ZetaCliAppState>,
 44    cx: &mut AsyncApp,
 45) -> Result<Entity<Zeta>> {
 46    let zeta =
 47        cx.new(|cx| zeta::Zeta::new(app_state.client.clone(), app_state.user_store.clone(), cx))?;
 48
 49    zeta.update(cx, |zeta, _cx| {
 50        let model = match provider {
 51            PredictionProvider::Zeta1 => zeta::ZetaEditPredictionModel::Zeta1,
 52            PredictionProvider::Zeta2 => zeta::ZetaEditPredictionModel::Zeta2,
 53            PredictionProvider::Sweep => zeta::ZetaEditPredictionModel::Sweep,
 54        };
 55        zeta.set_edit_prediction_model(model);
 56    })?;
 57
 58    let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?;
 59
 60    cx.subscribe(&buffer_store, {
 61        let project = project.clone();
 62        let zeta = zeta.clone();
 63        move |_, event, cx| match event {
 64            BufferStoreEvent::BufferAdded(buffer) => {
 65                zeta.update(cx, |zeta, cx| zeta.register_buffer(&buffer, &project, cx));
 66            }
 67            _ => {}
 68        }
 69    })?
 70    .detach();
 71
 72    anyhow::Ok(zeta)
 73}
 74
 75pub async fn perform_predict(
 76    example: NamedExample,
 77    project: Entity<Project>,
 78    zeta: Entity<Zeta>,
 79    repetition_ix: Option<u16>,
 80    options: PredictionOptions,
 81    cx: &mut AsyncApp,
 82) -> Result<PredictionDetails> {
 83    let mut cache_mode = options.cache;
 84    if repetition_ix.is_some() {
 85        if cache_mode != CacheMode::Auto && cache_mode != CacheMode::Skip {
 86            panic!("Repetitions are not supported in Auto cache mode");
 87        } else {
 88            cache_mode = CacheMode::Skip;
 89        }
 90    } else if cache_mode == CacheMode::Auto {
 91        cache_mode = CacheMode::Requests;
 92    }
 93
 94    let mut example_run_dir = RUN_DIR.join(&example.file_name());
 95    if let Some(repetition_ix) = repetition_ix {
 96        example_run_dir = example_run_dir.join(format!("{:03}", repetition_ix));
 97    }
 98    fs::create_dir_all(&example_run_dir)?;
 99    if LATEST_EXAMPLE_RUN_DIR.is_symlink() {
100        fs::remove_file(&*LATEST_EXAMPLE_RUN_DIR)?;
101    }
102
103    #[cfg(unix)]
104    std::os::unix::fs::symlink(&example_run_dir, &*LATEST_EXAMPLE_RUN_DIR)
105        .context("creating latest link")?;
106
107    #[cfg(windows)]
108    std::os::windows::fs::symlink_dir(&example_run_dir, &*LATEST_EXAMPLE_RUN_DIR)
109        .context("creating latest link")?;
110
111    zeta.update(cx, |zeta, _cx| {
112        zeta.with_eval_cache(Arc::new(RunCache {
113            example_run_dir: example_run_dir.clone(),
114            cache_mode,
115        }));
116    })?;
117
118    let (cursor_buffer, cursor_anchor) = example.cursor_position(&project, cx).await?;
119
120    let result = Arc::new(Mutex::new(PredictionDetails::new(example_run_dir.clone())));
121
122    let prompt_format = options.zeta2.prompt_format;
123
124    zeta.update(cx, |zeta, _cx| {
125        let mut options = zeta.options().clone();
126        options.prompt_format = prompt_format.into();
127        zeta.set_options(options);
128    })?;
129
130    let mut debug_task = gpui::Task::ready(Ok(()));
131
132    if options.provider == crate::PredictionProvider::Zeta2 {
133        let mut debug_rx = zeta.update(cx, |zeta, _| zeta.debug_info())?;
134
135        debug_task = cx.background_spawn({
136            let result = result.clone();
137            async move {
138                let mut start_time = None;
139                let mut retrieval_finished_at = None;
140                while let Some(event) = debug_rx.next().await {
141                    match event {
142                        zeta::ZetaDebugInfo::ContextRetrievalStarted(info) => {
143                            start_time = Some(info.timestamp);
144                            fs::write(
145                                example_run_dir.join("search_prompt.md"),
146                                &info.search_prompt,
147                            )?;
148                        }
149                        zeta::ZetaDebugInfo::ContextRetrievalFinished(info) => {
150                            retrieval_finished_at = Some(info.timestamp);
151                            for (key, value) in &info.metadata {
152                                if *key == "search_queries" {
153                                    fs::write(
154                                        example_run_dir.join("search_queries.json"),
155                                        value.as_bytes(),
156                                    )?;
157                                }
158                            }
159                        }
160                        zeta::ZetaDebugInfo::EditPredictionRequested(request) => {
161                            let prediction_started_at = Instant::now();
162                            start_time.get_or_insert(prediction_started_at);
163                            let prompt = request.local_prompt.unwrap_or_default();
164                            fs::write(example_run_dir.join("prediction_prompt.md"), &prompt)?;
165
166                            {
167                                let mut result = result.lock().unwrap();
168                                result.prompt_len = prompt.chars().count();
169
170                                for included_file in request.inputs.included_files {
171                                    let insertions =
172                                        vec![(request.inputs.cursor_point, CURSOR_MARKER)];
173                                    result.excerpts.extend(included_file.excerpts.iter().map(
174                                        |excerpt| ActualExcerpt {
175                                            path: included_file.path.components().skip(1).collect(),
176                                            text: String::from(excerpt.text.as_ref()),
177                                        },
178                                    ));
179                                    write_codeblock(
180                                        &included_file.path,
181                                        included_file.excerpts.iter(),
182                                        if included_file.path == request.inputs.cursor_path {
183                                            &insertions
184                                        } else {
185                                            &[]
186                                        },
187                                        included_file.max_row,
188                                        false,
189                                        &mut result.excerpts_text,
190                                    );
191                                }
192                            }
193
194                            let response =
195                                request.response_rx.await?.0.map_err(|err| anyhow!(err))?;
196                            let response = zeta::text_from_response(response).unwrap_or_default();
197                            let prediction_finished_at = Instant::now();
198                            fs::write(example_run_dir.join("prediction_response.md"), &response)?;
199
200                            let mut result = result.lock().unwrap();
201                            result.generated_len = response.chars().count();
202                            result.retrieval_time =
203                                retrieval_finished_at.unwrap() - start_time.unwrap();
204                            result.prediction_time = prediction_finished_at - prediction_started_at;
205                            result.total_time = prediction_finished_at - start_time.unwrap();
206
207                            break;
208                        }
209                    }
210                }
211                anyhow::Ok(())
212            }
213        });
214
215        zeta.update(cx, |zeta, cx| {
216            zeta.refresh_context_with_agentic_retrieval(
217                project.clone(),
218                cursor_buffer.clone(),
219                cursor_anchor,
220                cx,
221            )
222        })?
223        .await?;
224    }
225
226    let prediction = zeta
227        .update(cx, |zeta, cx| {
228            zeta.request_prediction(
229                &project,
230                &cursor_buffer,
231                cursor_anchor,
232                cloud_llm_client::PredictEditsRequestTrigger::Cli,
233                cx,
234            )
235        })?
236        .await?;
237
238    debug_task.await?;
239
240    let mut result = Arc::into_inner(result).unwrap().into_inner().unwrap();
241
242    result.diff = prediction
243        .and_then(|prediction| {
244            let prediction = prediction.prediction.ok()?;
245            prediction.edit_preview.as_unified_diff(&prediction.edits)
246        })
247        .unwrap_or_default();
248
249    anyhow::Ok(result)
250}
251
252struct RunCache {
253    cache_mode: CacheMode,
254    example_run_dir: PathBuf,
255}
256
257impl RunCache {
258    fn output_cache_path((kind, key): &EvalCacheKey) -> PathBuf {
259        CACHE_DIR.join(format!("{kind}_out_{key:x}.json",))
260    }
261
262    fn input_cache_path((kind, key): &EvalCacheKey) -> PathBuf {
263        CACHE_DIR.join(format!("{kind}_in_{key:x}.json",))
264    }
265
266    fn link_to_run(&self, key: &EvalCacheKey) {
267        let output_link_path = self.example_run_dir.join(format!("{}_out.json", key.0));
268        fs::hard_link(Self::output_cache_path(key), &output_link_path).unwrap();
269
270        let input_link_path = self.example_run_dir.join(format!("{}_in.json", key.0));
271        fs::hard_link(Self::input_cache_path(key), &input_link_path).unwrap();
272    }
273}
274
275impl EvalCache for RunCache {
276    fn read(&self, key: EvalCacheKey) -> Option<String> {
277        let path = RunCache::output_cache_path(&key);
278
279        if path.exists() {
280            let use_cache = match key.0 {
281                EvalCacheEntryKind::Search => self.cache_mode.use_cached_search_results(),
282                EvalCacheEntryKind::Context | EvalCacheEntryKind::Prediction => {
283                    self.cache_mode.use_cached_llm_responses()
284                }
285            };
286            if use_cache {
287                log::info!("Using cache entry: {}", path.display());
288                self.link_to_run(&key);
289                Some(fs::read_to_string(path).unwrap())
290            } else {
291                log::trace!("Skipping cached entry: {}", path.display());
292                None
293            }
294        } else if matches!(self.cache_mode, CacheMode::Force) {
295            panic!(
296                "No cached entry found for {:?}. Run without `--cache force` at least once.",
297                key.0
298            );
299        } else {
300            None
301        }
302    }
303
304    fn write(&self, key: EvalCacheKey, input: &str, output: &str) {
305        fs::create_dir_all(&*CACHE_DIR).unwrap();
306
307        let input_path = RunCache::input_cache_path(&key);
308        fs::write(&input_path, input).unwrap();
309
310        let output_path = RunCache::output_cache_path(&key);
311        log::trace!("Writing cache entry: {}", output_path.display());
312        fs::write(&output_path, output).unwrap();
313
314        self.link_to_run(&key);
315    }
316}
317
318#[derive(Clone, Debug, Serialize, Deserialize)]
319pub struct PredictionDetails {
320    pub diff: String,
321    pub excerpts: Vec<ActualExcerpt>,
322    pub excerpts_text: String, // TODO: contains the worktree root path. Drop this field and compute it on the fly
323    pub retrieval_time: Duration,
324    pub prediction_time: Duration,
325    pub total_time: Duration,
326    pub run_example_dir: PathBuf,
327    pub prompt_len: usize,
328    pub generated_len: usize,
329}
330
331impl PredictionDetails {
332    pub fn new(run_example_dir: PathBuf) -> Self {
333        Self {
334            diff: Default::default(),
335            excerpts: Default::default(),
336            excerpts_text: Default::default(),
337            retrieval_time: Default::default(),
338            prediction_time: Default::default(),
339            total_time: Default::default(),
340            run_example_dir,
341            prompt_len: 0,
342            generated_len: 0,
343        }
344    }
345
346    pub fn write(&self, format: PredictionsOutputFormat, mut out: impl Write) -> Result<()> {
347        let formatted = match format {
348            PredictionsOutputFormat::Md => self.to_markdown(),
349            PredictionsOutputFormat::Json => serde_json::to_string_pretty(self)?,
350            PredictionsOutputFormat::Diff => self.diff.clone(),
351        };
352
353        Ok(out.write_all(formatted.as_bytes())?)
354    }
355
356    pub fn to_markdown(&self) -> String {
357        format!(
358            "## Excerpts\n\n\
359            {}\n\n\
360            ## Prediction\n\n\
361            {}\n\n\
362            ## Time\n\n\
363            Retrieval: {}ms\n\
364            Prediction: {}ms\n\n\
365            Total: {}ms\n",
366            self.excerpts_text,
367            self.diff,
368            self.retrieval_time.as_millis(),
369            self.prediction_time.as_millis(),
370            self.total_time.as_millis(),
371        )
372    }
373}