edit prediction cli: Cargo-style progress output (#44675)

Agus Zubiaga created

Release Notes:

- N/A

Change summary

Cargo.lock                                         |   1 
crates/edit_prediction_cli/Cargo.toml              |   1 
crates/edit_prediction_cli/src/format_prompt.rs    |   8 
crates/edit_prediction_cli/src/load_project.rs     |  49 +
crates/edit_prediction_cli/src/main.rs             |  47 +
crates/edit_prediction_cli/src/predict.rs          |  46 +
crates/edit_prediction_cli/src/progress.rs         | 372 ++++++++++++++++
crates/edit_prediction_cli/src/retrieve_context.rs |  37 
crates/edit_prediction_cli/src/score.rs            |   5 
9 files changed, 509 insertions(+), 57 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -5179,6 +5179,7 @@ dependencies = [
  "language_model",
  "language_models",
  "languages",
+ "libc",
  "log",
  "node_runtime",
  "paths",

crates/edit_prediction_cli/Cargo.toml 🔗

@@ -34,6 +34,7 @@ language_extension.workspace = true
 language_model.workspace = true
 language_models.workspace = true
 languages = { workspace = true, features = ["load-grammars"] }
+libc.workspace = true
 log.workspace = true
 node_runtime.workspace = true
 paths.workspace = true

crates/edit_prediction_cli/src/format_prompt.rs 🔗

@@ -3,6 +3,7 @@ use crate::{
     example::{Example, ExamplePrompt},
     headless::EpAppState,
     load_project::run_load_project,
+    progress::{Progress, Step},
     retrieve_context::run_context_retrieval,
 };
 use edit_prediction::{
@@ -17,9 +18,12 @@ pub async fn run_format_prompt(
     example: &mut Example,
     prompt_format: PromptFormat,
     app_state: Arc<EpAppState>,
+    progress: Arc<Progress>,
     mut cx: AsyncApp,
 ) {
-    run_context_retrieval(example, app_state.clone(), cx.clone()).await;
+    run_context_retrieval(example, app_state.clone(), progress.clone(), cx.clone()).await;
+
+    let _step_progress = progress.start(Step::FormatPrompt, &example.name);
 
     match prompt_format {
         PromptFormat::Teacher => {
@@ -31,7 +35,7 @@ pub async fn run_format_prompt(
             });
         }
         PromptFormat::Zeta2 => {
-            run_load_project(example, app_state, cx.clone()).await;
+            run_load_project(example, app_state, progress.clone(), cx.clone()).await;
 
             let ep_store = cx
                 .update(|cx| EditPredictionStore::try_global(cx).unwrap())

crates/edit_prediction_cli/src/load_project.rs 🔗

@@ -2,6 +2,7 @@ use crate::{
     example::{Example, ExampleBuffer, ExampleState},
     headless::EpAppState,
     paths::{REPOS_DIR, WORKTREES_DIR},
+    progress::{InfoStyle, Progress, Step, StepProgress},
 };
 use anyhow::{Result, anyhow};
 use collections::HashMap;
@@ -24,30 +25,47 @@ use std::{
 use util::{paths::PathStyle, rel_path::RelPath};
 use zeta_prompt::CURSOR_MARKER;
 
-pub async fn run_load_project(example: &mut Example, app_state: Arc<EpAppState>, mut cx: AsyncApp) {
+pub async fn run_load_project(
+    example: &mut Example,
+    app_state: Arc<EpAppState>,
+    progress: Arc<Progress>,
+    mut cx: AsyncApp,
+) {
     if example.state.is_some() {
         return;
     }
 
-    let project = setup_project(example, &app_state, &mut cx).await;
+    let progress = progress.start(Step::LoadProject, &example.name);
+
+    let project = setup_project(example, &app_state, &progress, &mut cx).await;
 
     let _open_buffers = apply_edit_history(example, &project, &mut cx)
         .await
         .unwrap();
 
     let (buffer, cursor_position) = cursor_position(example, &project, &mut cx).await;
-    example.buffer = buffer
+    let (example_buffer, language_name) = buffer
         .read_with(&cx, |buffer, _cx| {
             let cursor_point = cursor_position.to_point(&buffer);
-            Some(ExampleBuffer {
-                content: buffer.text(),
-                cursor_row: cursor_point.row,
-                cursor_column: cursor_point.column,
-                cursor_offset: cursor_position.to_offset(&buffer),
-            })
+            let language_name = buffer
+                .language()
+                .map(|l| l.name().to_string())
+                .unwrap_or_else(|| "Unknown".to_string());
+            (
+                ExampleBuffer {
+                    content: buffer.text(),
+                    cursor_row: cursor_point.row,
+                    cursor_column: cursor_point.column,
+                    cursor_offset: cursor_position.to_offset(&buffer),
+                },
+                language_name,
+            )
         })
         .unwrap();
 
+    progress.set_info(language_name, InfoStyle::Normal);
+
+    example.buffer = Some(example_buffer);
     example.state = Some(ExampleState {
         buffer,
         project,
@@ -131,13 +149,14 @@ async fn cursor_position(
 async fn setup_project(
     example: &mut Example,
     app_state: &Arc<EpAppState>,
+    step_progress: &Arc<StepProgress>,
     cx: &mut AsyncApp,
 ) -> Entity<Project> {
     let ep_store = cx
         .update(|cx| EditPredictionStore::try_global(cx).unwrap())
         .unwrap();
 
-    let worktree_path = setup_worktree(example).await;
+    let worktree_path = setup_worktree(example, step_progress).await;
 
     if let Some(project) = app_state.project_cache.get(&example.repository_url) {
         ep_store
@@ -158,7 +177,7 @@ async fn setup_project(
                 .update(cx, |buffer, cx| buffer.reload(cx))
                 .unwrap()
                 .await
-                .unwrap();
+                .ok();
         }
         return project;
     }
@@ -208,7 +227,7 @@ async fn setup_project(
     project
 }
 
-pub async fn setup_worktree(example: &Example) -> PathBuf {
+async fn setup_worktree(example: &Example, step_progress: &Arc<StepProgress>) -> PathBuf {
     let (repo_owner, repo_name) = example.repo_name().expect("failed to get repo name");
     let repo_dir = REPOS_DIR.join(repo_owner.as_ref()).join(repo_name.as_ref());
     let worktree_path = WORKTREES_DIR
@@ -217,7 +236,7 @@ pub async fn setup_worktree(example: &Example) -> PathBuf {
     let repo_lock = lock_repo(&repo_dir).await;
 
     if !repo_dir.is_dir() {
-        eprintln!("Cloning repository {}", example.repository_url);
+        step_progress.set_substatus(format!("cloning {}", repo_name));
         fs::create_dir_all(&repo_dir).unwrap();
         run_git(&repo_dir, &["init"]).await.unwrap();
         run_git(
@@ -237,6 +256,7 @@ pub async fn setup_worktree(example: &Example) -> PathBuf {
     let revision = if let Ok(revision) = revision {
         revision
     } else {
+        step_progress.set_substatus("fetching");
         if run_git(
             &repo_dir,
             &["fetch", "--depth", "1", "origin", &example.revision],
@@ -253,6 +273,7 @@ pub async fn setup_worktree(example: &Example) -> PathBuf {
     };
 
     // Create the worktree for this example if needed.
+    step_progress.set_substatus("preparing worktree");
     if worktree_path.is_dir() {
         run_git(&worktree_path, &["clean", "--force", "-d"])
             .await
@@ -288,6 +309,7 @@ pub async fn setup_worktree(example: &Example) -> PathBuf {
 
     // Apply the uncommitted diff for this example.
     if !example.uncommitted_diff.is_empty() {
+        step_progress.set_substatus("applying diff");
         let mut apply_process = smol::process::Command::new("git")
             .current_dir(&worktree_path)
             .args(&["apply", "-"])
@@ -314,6 +336,7 @@ pub async fn setup_worktree(example: &Example) -> PathBuf {
         }
     }
 
+    step_progress.clear_substatus();
     worktree_path
 }
 

crates/edit_prediction_cli/src/main.rs 🔗

@@ -7,6 +7,7 @@ mod load_project;
 mod metrics;
 mod paths;
 mod predict;
+mod progress;
 mod retrieve_context;
 mod score;
 
@@ -15,8 +16,6 @@ use edit_prediction::EditPredictionStore;
 use gpui::Application;
 use reqwest_client::ReqwestClient;
 use serde::{Deserialize, Serialize};
-use std::sync::atomic::AtomicUsize;
-use std::sync::atomic::Ordering::SeqCst;
 use std::{path::PathBuf, sync::Arc};
 
 use crate::distill::run_distill;
@@ -24,6 +23,7 @@ use crate::example::{group_examples_by_repo, read_examples, write_examples};
 use crate::format_prompt::run_format_prompt;
 use crate::load_project::run_load_project;
 use crate::predict::run_prediction;
+use crate::progress::Progress;
 use crate::retrieve_context::run_context_retrieval;
 use crate::score::run_scoring;
 
@@ -112,7 +112,7 @@ impl EpArgs {
 }
 
 fn main() {
-    zlog::init();
+    let _ = zlog::try_init(Some("error".into()));
     zlog::init_output_stderr();
     let args = EpArgs::parse();
 
@@ -151,33 +151,41 @@ fn main() {
                 predict::sync_batches(&args.provider).await
             };
 
-            let example_count = examples.len();
-            let example_ix = AtomicUsize::new(0);
-            let mut grouped_examples = group_examples_by_repo(&mut examples);
+            let total_examples = examples.len();
+            let progress = Progress::new(total_examples);
 
+            let mut grouped_examples = group_examples_by_repo(&mut examples);
             let example_batches = grouped_examples.chunks_mut(args.max_parallelism);
+
             for example_batch in example_batches {
                 let futures = example_batch.into_iter().map(|repo_examples| async {
                     for example in repo_examples.iter_mut() {
-                        eprintln!(
-                            "Processing example: {}/{}",
-                            example_ix.load(SeqCst) + 1,
-                            example_count
-                        );
-                        example_ix.fetch_add(1, SeqCst);
                         match &command {
                             Command::ParseExample => {}
                             Command::LoadProject => {
-                                run_load_project(example, app_state.clone(), cx.clone()).await;
+                                run_load_project(
+                                    example,
+                                    app_state.clone(),
+                                    progress.clone(),
+                                    cx.clone(),
+                                )
+                                .await;
                             }
                             Command::Context => {
-                                run_context_retrieval(example, app_state.clone(), cx.clone()).await;
+                                run_context_retrieval(
+                                    example,
+                                    app_state.clone(),
+                                    progress.clone(),
+                                    cx.clone(),
+                                )
+                                .await;
                             }
                             Command::FormatPrompt(args) => {
                                 run_format_prompt(
                                     example,
                                     args.prompt_format,
                                     app_state.clone(),
+                                    progress.clone(),
                                     cx.clone(),
                                 )
                                 .await;
@@ -188,6 +196,7 @@ fn main() {
                                     Some(args.provider),
                                     args.repetitions,
                                     app_state.clone(),
+                                    progress.clone(),
                                     cx.clone(),
                                 )
                                 .await;
@@ -196,7 +205,14 @@ fn main() {
                                 run_distill(example).await;
                             }
                             Command::Score(args) | Command::Eval(args) => {
-                                run_scoring(example, &args, app_state.clone(), cx.clone()).await;
+                                run_scoring(
+                                    example,
+                                    &args,
+                                    app_state.clone(),
+                                    progress.clone(),
+                                    cx.clone(),
+                                )
+                                .await;
                             }
                             Command::Clean => {
                                 unreachable!()
@@ -206,6 +222,7 @@ fn main() {
                 });
                 futures::future::join_all(futures).await;
             }
+            progress.clear();
 
             if args.output.is_some() || !matches!(command, Command::Eval(_)) {
                 write_examples(&examples, output.as_ref());

crates/edit_prediction_cli/src/predict.rs 🔗

@@ -6,6 +6,7 @@ use crate::{
     headless::EpAppState,
     load_project::run_load_project,
     paths::{LATEST_EXAMPLE_RUN_DIR, RUN_DIR},
+    progress::{InfoStyle, Progress, Step},
     retrieve_context::run_context_retrieval,
 };
 use edit_prediction::{DebugEvent, EditPredictionStore};
@@ -24,29 +25,41 @@ pub async fn run_prediction(
     provider: Option<PredictionProvider>,
     repetition_count: usize,
     app_state: Arc<EpAppState>,
+    progress: Arc<Progress>,
     mut cx: AsyncApp,
 ) {
     if !example.predictions.is_empty() {
         return;
     }
 
-    run_context_retrieval(example, app_state.clone(), cx.clone()).await;
-
     let provider = provider.unwrap();
 
+    run_context_retrieval(example, app_state.clone(), progress.clone(), cx.clone()).await;
+
     if matches!(
         provider,
         PredictionProvider::Teacher | PredictionProvider::TeacherNonBatching
     ) {
+        let _step_progress = progress.start(Step::Predict, &example.name);
+
         if example.prompt.is_none() {
-            run_format_prompt(example, PromptFormat::Teacher, app_state.clone(), cx).await;
+            run_format_prompt(
+                example,
+                PromptFormat::Teacher,
+                app_state.clone(),
+                progress,
+                cx,
+            )
+            .await;
         }
 
         let batched = matches!(provider, PredictionProvider::Teacher);
         return predict_anthropic(example, repetition_count, batched).await;
     }
 
-    run_load_project(example, app_state.clone(), cx.clone()).await;
+    run_load_project(example, app_state.clone(), progress.clone(), cx.clone()).await;
+
+    let _step_progress = progress.start(Step::Predict, &example.name);
 
     if matches!(
         provider,
@@ -181,18 +194,31 @@ pub async fn run_prediction(
             .await
             .unwrap();
 
+        let actual_patch = prediction
+            .and_then(|prediction| {
+                let prediction = prediction.prediction.ok()?;
+                prediction.edit_preview.as_unified_diff(&prediction.edits)
+            })
+            .unwrap_or_default();
+
+        let has_prediction = !actual_patch.is_empty();
+
         updated_example
             .lock()
             .unwrap()
             .predictions
             .last_mut()
             .unwrap()
-            .actual_patch = prediction
-            .and_then(|prediction| {
-                let prediction = prediction.prediction.ok()?;
-                prediction.edit_preview.as_unified_diff(&prediction.edits)
-            })
-            .unwrap_or_default();
+            .actual_patch = actual_patch;
+
+        if ix == repetition_count - 1 {
+            let (info, style) = if has_prediction {
+                ("predicted", InfoStyle::Normal)
+            } else {
+                ("no prediction", InfoStyle::Warning)
+            };
+            _step_progress.set_info(info, style);
+        }
     }
 
     ep_store

crates/edit_prediction_cli/src/progress.rs 🔗

@@ -0,0 +1,372 @@
+use std::{
+    borrow::Cow,
+    collections::HashMap,
+    io::{IsTerminal, Write},
+    sync::{Arc, Mutex},
+    time::{Duration, Instant},
+};
+
+pub struct Progress {
+    inner: Mutex<ProgressInner>,
+}
+
+struct ProgressInner {
+    completed: Vec<CompletedTask>,
+    in_progress: HashMap<String, InProgressTask>,
+    is_tty: bool,
+    terminal_width: usize,
+    max_example_name_len: usize,
+    status_lines_displayed: usize,
+    total_examples: usize,
+}
+
+#[derive(Clone)]
+struct InProgressTask {
+    step: Step,
+    started_at: Instant,
+    substatus: Option<String>,
+    info: Option<(String, InfoStyle)>,
+}
+
+struct CompletedTask {
+    step: Step,
+    example_name: String,
+    duration: Duration,
+    info: Option<(String, InfoStyle)>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Step {
+    LoadProject,
+    Context,
+    FormatPrompt,
+    Predict,
+    Score,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum InfoStyle {
+    Normal,
+    Warning,
+}
+
+impl Step {
+    pub fn label(&self) -> &'static str {
+        match self {
+            Step::LoadProject => "Load",
+            Step::Context => "Context",
+            Step::FormatPrompt => "Format",
+            Step::Predict => "Predict",
+            Step::Score => "Score",
+        }
+    }
+
+    fn color_code(&self) -> &'static str {
+        match self {
+            Step::LoadProject => "\x1b[33m",
+            Step::Context => "\x1b[35m",
+            Step::FormatPrompt => "\x1b[34m",
+            Step::Predict => "\x1b[32m",
+            Step::Score => "\x1b[31m",
+        }
+    }
+}
+
+const RIGHT_MARGIN: usize = 4;
+
+impl Progress {
+    pub fn new(total_examples: usize) -> Arc<Self> {
+        Arc::new(Self {
+            inner: Mutex::new(ProgressInner {
+                completed: Vec::new(),
+                in_progress: HashMap::new(),
+                is_tty: std::io::stderr().is_terminal(),
+                terminal_width: get_terminal_width(),
+                max_example_name_len: 0,
+                status_lines_displayed: 0,
+                total_examples,
+            }),
+        })
+    }
+
+    pub fn start(self: &Arc<Self>, step: Step, example_name: &str) -> Arc<StepProgress> {
+        {
+            let mut inner = self.inner.lock().unwrap();
+
+            Self::clear_status_lines(&mut inner);
+
+            inner.max_example_name_len = inner.max_example_name_len.max(example_name.len());
+
+            inner.in_progress.insert(
+                example_name.to_string(),
+                InProgressTask {
+                    step,
+                    started_at: Instant::now(),
+                    substatus: None,
+                    info: None,
+                },
+            );
+
+            Self::print_status_lines(&mut inner);
+        }
+
+        Arc::new(StepProgress {
+            progress: self.clone(),
+            step,
+            example_name: example_name.to_string(),
+        })
+    }
+
+    pub fn finish(&self, step: Step, example_name: &str) {
+        let mut inner = self.inner.lock().unwrap();
+
+        let task = inner.in_progress.remove(example_name);
+        if let Some(task) = task {
+            if task.step == step {
+                inner.completed.push(CompletedTask {
+                    step: task.step,
+                    example_name: example_name.to_string(),
+                    duration: task.started_at.elapsed(),
+                    info: task.info,
+                });
+
+                Self::clear_status_lines(&mut inner);
+                Self::print_completed(&inner, inner.completed.last().unwrap());
+                Self::print_status_lines(&mut inner);
+            } else {
+                inner.in_progress.insert(example_name.to_string(), task);
+            }
+        }
+    }
+
+    fn clear_status_lines(inner: &mut ProgressInner) {
+        if inner.is_tty && inner.status_lines_displayed > 0 {
+            // Move up and clear each line we previously displayed
+            for _ in 0..inner.status_lines_displayed {
+                eprint!("\x1b[A\x1b[K");
+            }
+            let _ = std::io::stderr().flush();
+            inner.status_lines_displayed = 0;
+        }
+    }
+
+    fn print_completed(inner: &ProgressInner, task: &CompletedTask) {
+        let duration = format_duration(task.duration);
+        let name_width = inner.max_example_name_len;
+
+        if inner.is_tty {
+            let reset = "\x1b[0m";
+            let bold = "\x1b[1m";
+            let dim = "\x1b[2m";
+
+            let yellow = "\x1b[33m";
+            let info_part = task
+                .info
+                .as_ref()
+                .map(|(s, style)| {
+                    if *style == InfoStyle::Warning {
+                        format!("{yellow}{s}{reset}")
+                    } else {
+                        s.to_string()
+                    }
+                })
+                .unwrap_or_default();
+
+            let prefix = format!(
+                "{bold}{color}{label:>12}{reset} {name:<name_width$} {dim}│{reset} {info_part}",
+                color = task.step.color_code(),
+                label = task.step.label(),
+                name = task.example_name,
+            );
+
+            let duration_with_margin = format!("{duration} ");
+            let padding_needed = inner
+                .terminal_width
+                .saturating_sub(RIGHT_MARGIN)
+                .saturating_sub(duration_with_margin.len())
+                .saturating_sub(strip_ansi_len(&prefix));
+            let padding = " ".repeat(padding_needed);
+
+            eprintln!("{prefix}{padding}{dim}{duration_with_margin}{reset}");
+        } else {
+            let info_part = task
+                .info
+                .as_ref()
+                .map(|(s, _)| format!(" | {}", s))
+                .unwrap_or_default();
+
+            eprintln!(
+                "{label:>12} {name:<name_width$}{info_part} {duration}",
+                label = task.step.label(),
+                name = task.example_name,
+            );
+        }
+    }
+
+    fn print_status_lines(inner: &mut ProgressInner) {
+        if !inner.is_tty || inner.in_progress.is_empty() {
+            inner.status_lines_displayed = 0;
+            return;
+        }
+
+        let reset = "\x1b[0m";
+        let bold = "\x1b[1m";
+        let dim = "\x1b[2m";
+
+        // Build the done/in-progress/total label
+        let done_count = inner.completed.len();
+        let in_progress_count = inner.in_progress.len();
+        let range_label = format!(
+            " {}/{}/{} ",
+            done_count, in_progress_count, inner.total_examples
+        );
+
+        // Print a divider line with range label aligned with timestamps
+        let range_visible_len = range_label.len();
+        let left_divider_len = inner
+            .terminal_width
+            .saturating_sub(RIGHT_MARGIN)
+            .saturating_sub(range_visible_len);
+        let left_divider = "─".repeat(left_divider_len);
+        let right_divider = "─".repeat(RIGHT_MARGIN);
+        eprintln!("{dim}{left_divider}{reset}{range_label}{dim}{right_divider}{reset}");
+
+        let mut tasks: Vec<_> = inner.in_progress.iter().collect();
+        tasks.sort_by_key(|(name, _)| *name);
+
+        let mut lines_printed = 0;
+
+        for (name, task) in tasks.iter() {
+            let elapsed = format_duration(task.started_at.elapsed());
+            let substatus_part = task
+                .substatus
+                .as_ref()
+                .map(|s| truncate_with_ellipsis(s, 30))
+                .unwrap_or_default();
+
+            let step_label = task.step.label();
+            let step_color = task.step.color_code();
+            let name_width = inner.max_example_name_len;
+
+            let prefix = format!(
+                "{bold}{step_color}{step_label:>12}{reset} {name:<name_width$} {dim}│{reset} {substatus_part}",
+                name = name,
+            );
+
+            let duration_with_margin = format!("{elapsed} ");
+            let padding_needed = inner
+                .terminal_width
+                .saturating_sub(RIGHT_MARGIN)
+                .saturating_sub(duration_with_margin.len())
+                .saturating_sub(strip_ansi_len(&prefix));
+            let padding = " ".repeat(padding_needed);
+
+            eprintln!("{prefix}{padding}{dim}{duration_with_margin}{reset}");
+            lines_printed += 1;
+        }
+
+        inner.status_lines_displayed = lines_printed + 1; // +1 for the divider line
+        let _ = std::io::stderr().flush();
+    }
+
+    pub fn clear(&self) {
+        let mut inner = self.inner.lock().unwrap();
+        Self::clear_status_lines(&mut inner);
+    }
+}
+
+pub struct StepProgress {
+    progress: Arc<Progress>,
+    step: Step,
+    example_name: String,
+}
+
+impl StepProgress {
+    pub fn set_substatus(&self, substatus: impl Into<Cow<'static, str>>) {
+        let mut inner = self.progress.inner.lock().unwrap();
+        if let Some(task) = inner.in_progress.get_mut(&self.example_name) {
+            task.substatus = Some(substatus.into().into_owned());
+            Progress::clear_status_lines(&mut inner);
+            Progress::print_status_lines(&mut inner);
+        }
+    }
+
+    pub fn clear_substatus(&self) {
+        let mut inner = self.progress.inner.lock().unwrap();
+        if let Some(task) = inner.in_progress.get_mut(&self.example_name) {
+            task.substatus = None;
+            Progress::clear_status_lines(&mut inner);
+            Progress::print_status_lines(&mut inner);
+        }
+    }
+
+    pub fn set_info(&self, info: impl Into<String>, style: InfoStyle) {
+        let mut inner = self.progress.inner.lock().unwrap();
+        if let Some(task) = inner.in_progress.get_mut(&self.example_name) {
+            task.info = Some((info.into(), style));
+        }
+    }
+}
+
+impl Drop for StepProgress {
+    fn drop(&mut self) {
+        self.progress.finish(self.step, &self.example_name);
+    }
+}
+
+#[cfg(unix)]
+fn get_terminal_width() -> usize {
+    unsafe {
+        let mut winsize: libc::winsize = std::mem::zeroed();
+        if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) == 0
+            && winsize.ws_col > 0
+        {
+            winsize.ws_col as usize
+        } else {
+            80
+        }
+    }
+}
+
+#[cfg(not(unix))]
+fn get_terminal_width() -> usize {
+    80
+}
+
+fn strip_ansi_len(s: &str) -> usize {
+    let mut len = 0;
+    let mut in_escape = false;
+    for c in s.chars() {
+        if c == '\x1b' {
+            in_escape = true;
+        } else if in_escape {
+            if c == 'm' {
+                in_escape = false;
+            }
+        } else {
+            len += 1;
+        }
+    }
+    len
+}
+
+fn truncate_with_ellipsis(s: &str, max_len: usize) -> String {
+    if s.len() <= max_len {
+        s.to_string()
+    } else {
+        format!("{}…", &s[..max_len.saturating_sub(1)])
+    }
+}
+
+fn format_duration(duration: Duration) -> String {
+    const MINUTE_IN_MILLIS: f32 = 60. * 1000.;
+
+    let millis = duration.as_millis() as f32;
+    if millis < 1000.0 {
+        format!("{}ms", millis)
+    } else if millis < MINUTE_IN_MILLIS {
+        format!("{:.1}s", millis / 1_000.0)
+    } else {
+        format!("{:.1}m", millis / MINUTE_IN_MILLIS)
+    }
+}

crates/edit_prediction_cli/src/retrieve_context.rs 🔗

@@ -2,6 +2,7 @@ use crate::{
     example::{Example, ExampleContext},
     headless::EpAppState,
     load_project::run_load_project,
+    progress::{InfoStyle, Progress, Step, StepProgress},
 };
 use collections::HashSet;
 use edit_prediction::{DebugEvent, EditPredictionStore};
@@ -9,18 +10,22 @@ use futures::{FutureExt as _, StreamExt as _, channel::mpsc};
 use gpui::{AsyncApp, Entity};
 use language::Buffer;
 use project::Project;
-use std::{sync::Arc, time::Duration};
+use std::sync::Arc;
+use std::time::Duration;
 
 pub async fn run_context_retrieval(
     example: &mut Example,
     app_state: Arc<EpAppState>,
+    progress: Arc<Progress>,
     mut cx: AsyncApp,
 ) {
     if example.context.is_some() {
         return;
     }
 
-    run_load_project(example, app_state.clone(), cx.clone()).await;
+    run_load_project(example, app_state.clone(), progress.clone(), cx.clone()).await;
+
+    let step_progress = progress.start(Step::Context, &example.name);
 
     let state = example.state.as_ref().unwrap();
     let project = state.project.clone();
@@ -30,7 +35,7 @@ pub async fn run_context_retrieval(
             project.register_buffer_with_language_servers(&state.buffer, cx)
         })
         .unwrap();
-    wait_for_language_servers_to_start(example, &project, &state.buffer, &mut cx).await;
+    wait_for_language_servers_to_start(&project, &state.buffer, &step_progress, &mut cx).await;
 
     let ep_store = cx
         .update(|cx| EditPredictionStore::try_global(cx).unwrap())
@@ -58,19 +63,20 @@ pub async fn run_context_retrieval(
         .update(&mut cx, |store, cx| store.context_for_project(&project, cx))
         .unwrap();
 
+    let excerpt_count: usize = context_files.iter().map(|f| f.excerpts.len()).sum();
+    step_progress.set_info(format!("{} excerpts", excerpt_count), InfoStyle::Normal);
+
     example.context = Some(ExampleContext {
         files: context_files,
     });
 }
 
 async fn wait_for_language_servers_to_start(
-    example: &Example,
     project: &Entity<Project>,
     buffer: &Entity<Buffer>,
+    step_progress: &Arc<StepProgress>,
     cx: &mut AsyncApp,
 ) {
-    let log_prefix = format!("{} | ", example.name);
-
     let lsp_store = project
         .read_with(cx, |project, _| project.lsp_store())
         .unwrap();
@@ -89,11 +95,7 @@ async fn wait_for_language_servers_to_start(
         })
         .unwrap_or_default();
 
-    eprintln!(
-        "{}⏵ Waiting for {} language servers",
-        log_prefix,
-        language_server_ids.len()
-    );
+    step_progress.set_substatus(format!("waiting for {} LSPs", language_server_ids.len()));
 
     let timeout = cx
         .background_executor()
@@ -102,10 +104,10 @@ async fn wait_for_language_servers_to_start(
 
     let (mut tx, mut rx) = mpsc::channel(language_server_ids.len());
     let added_subscription = cx.subscribe(project, {
-        let log_prefix = log_prefix.clone();
+        let step_progress = step_progress.clone();
         move |_, event, _| match event {
             project::Event::LanguageServerAdded(language_server_id, name, _) => {
-                eprintln!("{}+ Language server started: {}", log_prefix, name);
+                step_progress.set_substatus(format!("LSP started: {}", name));
                 tx.try_send(*language_server_id).ok();
             }
             _ => {}
@@ -137,7 +139,7 @@ async fn wait_for_language_servers_to_start(
     let (mut tx, mut rx) = mpsc::channel(language_server_ids.len());
     let subscriptions = [
         cx.subscribe(&lsp_store, {
-            let log_prefix = log_prefix.clone();
+            let step_progress = step_progress.clone();
             move |_, event, _| {
                 if let project::LspStoreEvent::LanguageServerUpdate {
                     message:
@@ -150,12 +152,12 @@ async fn wait_for_language_servers_to_start(
                     ..
                 } = event
                 {
-                    eprintln!("{}⟲ {message}", log_prefix)
+                    step_progress.set_substatus(message.clone());
                 }
             }
         }),
         cx.subscribe(project, {
-            let log_prefix = log_prefix.clone();
+            let step_progress = step_progress.clone();
             move |_, event, cx| match event {
                 project::Event::DiskBasedDiagnosticsFinished { language_server_id } => {
                     let lsp_store = lsp_store.read(cx);
@@ -163,7 +165,7 @@ async fn wait_for_language_servers_to_start(
                         .language_server_adapter_for_id(*language_server_id)
                         .unwrap()
                         .name();
-                    eprintln!("{}⚑ Language server idle: {}", log_prefix, name);
+                    step_progress.set_substatus(format!("LSP idle: {}", name));
                     tx.try_send(*language_server_id).ok();
                 }
                 _ => {}
@@ -192,4 +194,5 @@ async fn wait_for_language_servers_to_start(
     }
 
     drop(subscriptions);
+    step_progress.clear_substatus();
 }

crates/edit_prediction_cli/src/score.rs 🔗

@@ -4,6 +4,7 @@ use crate::{
     headless::EpAppState,
     metrics::{self, ClassificationMetrics},
     predict::run_prediction,
+    progress::{Progress, Step},
 };
 use edit_prediction::udiff::DiffLine;
 use gpui::AsyncApp;
@@ -13,6 +14,7 @@ pub async fn run_scoring(
     example: &mut Example,
     args: &PredictArgs,
     app_state: Arc<EpAppState>,
+    progress: Arc<Progress>,
     cx: AsyncApp,
 ) {
     run_prediction(
@@ -20,10 +22,13 @@ pub async fn run_scoring(
         Some(args.provider),
         args.repetitions,
         app_state,
+        progress.clone(),
         cx,
     )
     .await;
 
+    let _progress = progress.start(Step::Score, &example.name);
+
     let expected_patch = parse_patch(&example.expected_patch);
 
     let mut scores = vec![];