Detailed changes
@@ -3,10 +3,9 @@ use arrayvec::ArrayVec;
use client::{Client, EditPredictionUsage, UserStore};
use cloud_llm_client::predict_edits_v3::{self, PromptFormat};
use cloud_llm_client::{
- AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, EditPredictionRejectReason,
- EditPredictionRejection, MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST,
- MINIMUM_REQUIRED_VERSION_HEADER_NAME, PredictEditsRequestTrigger, RejectEditPredictionsBodyRef,
- ZED_VERSION_HEADER_NAME,
+ EXPIRED_LLM_TOKEN_HEADER_NAME, EditPredictionRejectReason, EditPredictionRejection,
+ MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST, MINIMUM_REQUIRED_VERSION_HEADER_NAME,
+ PredictEditsRequestTrigger, RejectEditPredictionsBodyRef, ZED_VERSION_HEADER_NAME,
};
use collections::{HashMap, HashSet};
use db::kvp::{Dismissable, KEY_VALUE_STORE};
@@ -331,6 +330,7 @@ struct CurrentEditPrediction {
pub requested_by: PredictionRequestedBy,
pub prediction: EditPrediction,
pub was_shown: bool,
+ pub shown_with: Option<edit_prediction_types::SuggestionDisplayType>,
}
impl CurrentEditPrediction {
@@ -1098,65 +1098,27 @@ impl EditPredictionStore {
}
fn accept_current_prediction(&mut self, project: &Entity<Project>, cx: &mut Context<Self>) {
- let custom_accept_url = env::var("ZED_ACCEPT_PREDICTION_URL").ok();
- match self.edit_prediction_model {
- EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => {
- if self.custom_predict_edits_url.is_some() && custom_accept_url.is_none() {
- return;
- }
- }
- EditPredictionModel::Sweep | EditPredictionModel::Mercury => return,
- }
-
let Some(project_state) = self.projects.get_mut(&project.entity_id()) else {
return;
};
- let Some(prediction) = project_state.current_prediction.take() else {
+ let Some(current_prediction) = project_state.current_prediction.take() else {
return;
};
- let request_id = prediction.prediction.id.to_string();
+
for pending_prediction in mem::take(&mut project_state.pending_predictions) {
project_state.cancel_pending_prediction(pending_prediction, cx);
}
- let client = self.client.clone();
- let llm_token = self.llm_token.clone();
- let app_version = AppVersion::global(cx);
- cx.spawn(async move |this, cx| {
- let (url, require_auth) = if let Some(accept_edits_url) = custom_accept_url {
- (http_client::Url::parse(&accept_edits_url)?, false)
- } else {
- (
- client
- .http_client()
- .build_zed_llm_url("/predict_edits/accept", &[])?,
- true,
- )
- };
-
- let response = cx
- .background_spawn(Self::send_api_request::<()>(
- move |builder| {
- let req = builder.uri(url.as_ref()).body(
- serde_json::to_string(&AcceptEditPredictionBody {
- request_id: request_id.clone(),
- })?
- .into(),
- );
- Ok(req?)
- },
- client,
- llm_token,
- app_version,
- require_auth,
- ))
- .await;
-
- Self::handle_api_response(&this, response, cx)?;
- anyhow::Ok(())
- })
- .detach_and_log_err(cx);
+ match self.edit_prediction_model {
+ EditPredictionModel::Sweep => {
+ sweep_ai::edit_prediction_accepted(self, current_prediction, cx)
+ }
+ EditPredictionModel::Mercury => {}
+ EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => {
+ zeta2::edit_prediction_accepted(self, current_prediction, cx)
+ }
+ }
}
async fn handle_rejected_predictions(
@@ -1231,18 +1193,51 @@ impl EditPredictionStore {
};
}
- fn did_show_current_prediction(&mut self, project: &Entity<Project>, _cx: &mut Context<Self>) {
- if let Some(project_state) = self.projects.get_mut(&project.entity_id()) {
- if let Some(current_prediction) = project_state.current_prediction.as_mut() {
- if !current_prediction.was_shown {
- current_prediction.was_shown = true;
- self.shown_predictions
- .push_front(current_prediction.prediction.clone());
- if self.shown_predictions.len() > 50 {
- let completion = self.shown_predictions.pop_back().unwrap();
- self.rated_predictions.remove(&completion.id);
- }
- }
+ fn did_show_current_prediction(
+ &mut self,
+ project: &Entity<Project>,
+ display_type: edit_prediction_types::SuggestionDisplayType,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(project_state) = self.projects.get_mut(&project.entity_id()) else {
+ return;
+ };
+
+ let Some(current_prediction) = project_state.current_prediction.as_mut() else {
+ return;
+ };
+
+ let is_jump = display_type == edit_prediction_types::SuggestionDisplayType::Jump;
+ let previous_shown_with = current_prediction.shown_with;
+
+ if previous_shown_with.is_none() || !is_jump {
+ current_prediction.shown_with = Some(display_type);
+ }
+
+ let is_first_non_jump_show = !current_prediction.was_shown && !is_jump;
+
+ if is_first_non_jump_show {
+ current_prediction.was_shown = true;
+ }
+
+ let display_type_changed = previous_shown_with != Some(display_type);
+
+ if self.edit_prediction_model == EditPredictionModel::Sweep && display_type_changed {
+ sweep_ai::edit_prediction_shown(
+ &self.sweep_ai,
+ self.client.clone(),
+ ¤t_prediction.prediction,
+ display_type,
+ cx,
+ );
+ }
+
+ if is_first_non_jump_show {
+ self.shown_predictions
+ .push_front(current_prediction.prediction.clone());
+ if self.shown_predictions.len() > 50 {
+ let completion = self.shown_predictions.pop_back().unwrap();
+ self.rated_predictions.remove(&completion.id);
}
}
}
@@ -1503,6 +1498,7 @@ impl EditPredictionStore {
requested_by,
prediction,
was_shown: false,
+ shown_with: None,
};
if let Some(current_prediction) =
@@ -1,23 +1,30 @@
use anyhow::Result;
+use client::Client;
use futures::AsyncReadExt as _;
use gpui::{
App, AppContext as _, Entity, Global, SharedString, Task,
http_client::{self, AsyncBody, Method},
};
-use language::{Point, ToOffset as _};
+use language::{Anchor, BufferSnapshot, Point, ToOffset as _};
use language_model::{ApiKeyState, EnvVar, env_var};
use lsp::DiagnosticSeverity;
use serde::{Deserialize, Serialize};
use std::{
fmt::{self, Write as _},
+ ops::Range,
path::Path,
sync::Arc,
time::Instant,
};
-use crate::{EditPredictionId, EditPredictionModelInput, prediction::EditPredictionResult};
+use crate::{
+ CurrentEditPrediction, EditPrediction, EditPredictionId, EditPredictionModelInput,
+ EditPredictionStore, prediction::EditPredictionResult,
+};
+use edit_prediction_types::SuggestionDisplayType;
const SWEEP_API_URL: &str = "https://autocomplete.sweep.dev/backend/next_edit_autocomplete";
+const SWEEP_METRICS_URL: &str = "https://backend.app.sweep.dev/backend/track_autocomplete_metrics";
pub struct SweepAi {
pub api_token: Entity<ApiKeyState>,
@@ -404,3 +411,174 @@ fn debug_info(cx: &gpui::App) -> Arc<str> {
)
.into()
}
+
+#[derive(Debug, Clone, Copy, Serialize)]
+#[serde(rename_all = "snake_case")]
+pub enum SweepEventType {
+ AutocompleteSuggestionShown,
+ AutocompleteSuggestionAccepted,
+}
+
+#[derive(Debug, Clone, Copy, Serialize)]
+#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
+pub enum SweepSuggestionType {
+ GhostText,
+ Popup,
+ JumpToEdit,
+}
+
+#[derive(Debug, Clone, Serialize)]
+struct AutocompleteMetricsRequest {
+ event_type: SweepEventType,
+ suggestion_type: SweepSuggestionType,
+ additions: u32,
+ deletions: u32,
+ autocomplete_id: String,
+ edit_tracking: String,
+ edit_tracking_line: Option<u32>,
+ lifespan: Option<u64>,
+ debug_info: Arc<str>,
+ device_id: String,
+ privacy_mode_enabled: bool,
+}
+
+fn send_autocomplete_metrics_request(
+ cx: &App,
+ client: Arc<Client>,
+ api_token: Arc<str>,
+ request_body: AutocompleteMetricsRequest,
+) {
+ let http_client = client.http_client();
+ cx.background_spawn(async move {
+ let body: AsyncBody = serde_json::to_string(&request_body)?.into();
+
+ let request = http_client::Request::builder()
+ .uri(SWEEP_METRICS_URL)
+ .header("Content-Type", "application/json")
+ .header("Authorization", format!("Bearer {}", api_token))
+ .method(Method::POST)
+ .body(body)?;
+
+ let mut response = http_client.send(request).await?;
+
+ if !response.status().is_success() {
+ let mut body = String::new();
+ response.body_mut().read_to_string(&mut body).await?;
+ anyhow::bail!(
+ "Failed to send autocomplete metrics for sweep_ai: {:?}\nBody: {}",
+ response.status(),
+ body,
+ );
+ }
+
+ Ok(())
+ })
+ .detach_and_log_err(cx);
+}
+
+pub(crate) fn edit_prediction_accepted(
+ store: &EditPredictionStore,
+ current_prediction: CurrentEditPrediction,
+ cx: &App,
+) {
+ let Some(api_token) = store
+ .sweep_ai
+ .api_token
+ .read(cx)
+ .key(&SWEEP_CREDENTIALS_URL)
+ else {
+ return;
+ };
+ let debug_info = store.sweep_ai.debug_info.clone();
+
+ let prediction = current_prediction.prediction;
+
+ let (additions, deletions) = compute_edit_metrics(&prediction.edits, &prediction.snapshot);
+ let autocomplete_id = prediction.id.to_string();
+
+ let device_id = store
+ .client
+ .user_id()
+ .as_ref()
+ .map(ToString::to_string)
+ .unwrap_or_default();
+
+ let suggestion_type = match current_prediction.shown_with {
+ Some(SuggestionDisplayType::DiffPopover) => SweepSuggestionType::Popup,
+ Some(SuggestionDisplayType::Jump) => return, // should'nt happen
+ Some(SuggestionDisplayType::GhostText) | None => SweepSuggestionType::GhostText,
+ };
+
+ let request_body = AutocompleteMetricsRequest {
+ event_type: SweepEventType::AutocompleteSuggestionAccepted,
+ suggestion_type,
+ additions,
+ deletions,
+ autocomplete_id,
+ edit_tracking: String::new(),
+ edit_tracking_line: None,
+ lifespan: None,
+ debug_info,
+ device_id,
+ privacy_mode_enabled: false,
+ };
+
+ send_autocomplete_metrics_request(cx, store.client.clone(), api_token, request_body);
+}
+
+pub fn edit_prediction_shown(
+ sweep_ai: &SweepAi,
+ client: Arc<Client>,
+ prediction: &EditPrediction,
+ display_type: SuggestionDisplayType,
+ cx: &App,
+) {
+ let Some(api_token) = sweep_ai.api_token.read(cx).key(&SWEEP_CREDENTIALS_URL) else {
+ return;
+ };
+ let debug_info = sweep_ai.debug_info.clone();
+
+ let (additions, deletions) = compute_edit_metrics(&prediction.edits, &prediction.snapshot);
+ let autocomplete_id = prediction.id.to_string();
+
+ let suggestion_type = match display_type {
+ SuggestionDisplayType::GhostText => SweepSuggestionType::GhostText,
+ SuggestionDisplayType::DiffPopover => SweepSuggestionType::Popup,
+ SuggestionDisplayType::Jump => SweepSuggestionType::JumpToEdit,
+ };
+
+ let request_body = AutocompleteMetricsRequest {
+ event_type: SweepEventType::AutocompleteSuggestionShown,
+ suggestion_type,
+ additions,
+ deletions,
+ autocomplete_id,
+ edit_tracking: String::new(),
+ edit_tracking_line: None,
+ lifespan: None,
+ debug_info,
+ device_id: String::new(),
+ privacy_mode_enabled: false,
+ };
+
+ send_autocomplete_metrics_request(cx, client, api_token, request_body);
+}
+
+fn compute_edit_metrics(
+ edits: &[(Range<Anchor>, Arc<str>)],
+ snapshot: &BufferSnapshot,
+) -> (u32, u32) {
+ let mut additions = 0u32;
+ let mut deletions = 0u32;
+
+ for (range, new_text) in edits {
+ let old_text = snapshot.text_for_range(range.clone());
+ deletions += old_text
+ .map(|chunk| chunk.lines().count())
+ .sum::<usize>()
+ .max(1) as u32;
+ additions += new_text.lines().count().max(1) as u32;
+ }
+
+ (additions, deletions)
+}
@@ -2,7 +2,7 @@ use std::{cmp, sync::Arc};
use client::{Client, UserStore};
use cloud_llm_client::EditPredictionRejectReason;
-use edit_prediction_types::{DataCollectionState, EditPredictionDelegate};
+use edit_prediction_types::{DataCollectionState, EditPredictionDelegate, SuggestionDisplayType};
use gpui::{App, Entity, prelude::*};
use language::{Buffer, ToPoint as _};
use project::Project;
@@ -151,9 +151,9 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
});
}
- fn did_show(&mut self, cx: &mut Context<Self>) {
+ fn did_show(&mut self, display_type: SuggestionDisplayType, cx: &mut Context<Self>) {
self.store.update(cx, |store, cx| {
- store.did_show_current_prediction(&self.project, cx);
+ store.did_show_current_prediction(&self.project, display_type, cx);
});
}
@@ -3,14 +3,17 @@ use crate::EvalCacheEntryKind;
use crate::open_ai_response::text_from_response;
use crate::prediction::EditPredictionResult;
use crate::{
- DebugEvent, EDIT_PREDICTIONS_MODEL_ID, EditPredictionFinishedDebugEvent, EditPredictionId,
- EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore,
+ CurrentEditPrediction, DebugEvent, EDIT_PREDICTIONS_MODEL_ID, EditPredictionFinishedDebugEvent,
+ EditPredictionId, EditPredictionModelInput, EditPredictionStartedDebugEvent,
+ EditPredictionStore,
};
use anyhow::{Result, anyhow};
-use cloud_llm_client::EditPredictionRejectReason;
-use gpui::{Task, prelude::*};
+use cloud_llm_client::{AcceptEditPredictionBody, EditPredictionRejectReason};
+use gpui::{App, Task, prelude::*};
use language::{OffsetRangeExt as _, ToOffset as _, ToPoint};
use release_channel::AppVersion;
+
+use std::env;
use std::{path::Path, sync::Arc, time::Instant};
use zeta_prompt::CURSOR_MARKER;
use zeta_prompt::format_zeta_prompt;
@@ -227,6 +230,54 @@ pub fn zeta2_prompt_input(
(editable_offset_range, prompt_input)
}
+pub(crate) fn edit_prediction_accepted(
+ store: &EditPredictionStore,
+ current_prediction: CurrentEditPrediction,
+ cx: &App,
+) {
+ let custom_accept_url = env::var("ZED_ACCEPT_PREDICTION_URL").ok();
+ if store.custom_predict_edits_url.is_some() && custom_accept_url.is_none() {
+ return;
+ }
+
+ let request_id = current_prediction.prediction.id.to_string();
+ let require_auth = custom_accept_url.is_none();
+ let client = store.client.clone();
+ let llm_token = store.llm_token.clone();
+ let app_version = AppVersion::global(cx);
+
+ cx.background_spawn(async move {
+ let url = if let Some(accept_edits_url) = custom_accept_url {
+ gpui::http_client::Url::parse(&accept_edits_url)?
+ } else {
+ client
+ .http_client()
+ .build_zed_llm_url("/predict_edits/accept", &[])?
+ };
+
+ let response = EditPredictionStore::send_api_request::<()>(
+ move |builder| {
+ let req = builder.uri(url.as_ref()).body(
+ serde_json::to_string(&AcceptEditPredictionBody {
+ request_id: request_id.clone(),
+ })?
+ .into(),
+ );
+ Ok(req?)
+ },
+ client,
+ llm_token,
+ app_version,
+ require_auth,
+ )
+ .await;
+
+ response?;
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+}
+
#[cfg(feature = "cli-support")]
pub fn zeta2_output_for_patch(input: &zeta_prompt::ZetaPromptInput, patch: &str) -> Result<String> {
let text = &input.cursor_excerpt;
@@ -4,6 +4,15 @@ use client::EditPredictionUsage;
use gpui::{App, Context, Entity, SharedString};
use language::{Anchor, Buffer, OffsetRangeExt};
+/// The display mode used when showing an edit prediction to the user.
+/// Used for metrics tracking.
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum SuggestionDisplayType {
+ GhostText,
+ DiffPopover,
+ Jump,
+}
+
// TODO: Find a better home for `Direction`.
//
// This should live in an ancestor crate of `editor` and `edit_prediction`,
@@ -97,7 +106,7 @@ pub trait EditPredictionDelegate: 'static + Sized {
);
fn accept(&mut self, cx: &mut Context<Self>);
fn discard(&mut self, cx: &mut Context<Self>);
- fn did_show(&mut self, _cx: &mut Context<Self>) {}
+ fn did_show(&mut self, _display_type: SuggestionDisplayType, _cx: &mut Context<Self>) {}
fn suggest(
&mut self,
buffer: &Entity<Buffer>,
@@ -129,7 +138,7 @@ pub trait EditPredictionDelegateHandle {
debounce: bool,
cx: &mut App,
);
- fn did_show(&self, cx: &mut App);
+ fn did_show(&self, display_type: SuggestionDisplayType, cx: &mut App);
fn accept(&self, cx: &mut App);
fn discard(&self, cx: &mut App);
fn suggest(
@@ -209,8 +218,8 @@ where
self.update(cx, |this, cx| this.discard(cx))
}
- fn did_show(&self, cx: &mut App) {
- self.update(cx, |this, cx| this.did_show(cx))
+ fn did_show(&self, display_type: SuggestionDisplayType, cx: &mut App) {
+ self.update(cx, |this, cx| this.did_show(display_type, cx))
}
fn suggest(
@@ -91,6 +91,7 @@ use dap::TelemetrySpawnLocation;
use display_map::*;
use edit_prediction_types::{
EditPredictionDelegate, EditPredictionDelegateHandle, EditPredictionGranularity,
+ SuggestionDisplayType,
};
use editor_settings::{GoToDefinitionFallback, Minimap as MinimapSettings};
use element::{AcceptEditPredictionBinding, LineWithInvisibles, PositionMap, layout_line};
@@ -8077,10 +8078,6 @@ impl Editor {
self.edit_prediction_preview,
EditPredictionPreview::Inactive { .. }
) {
- if let Some(provider) = self.edit_prediction_provider.as_ref() {
- provider.provider.did_show(cx)
- }
-
self.edit_prediction_preview = EditPredictionPreview::Active {
previous_scroll_position: None,
since: Instant::now(),
@@ -8203,6 +8200,9 @@ impl Editor {
snapshot,
target,
} => {
+ if let Some(provider) = &self.edit_prediction_provider {
+ provider.provider.did_show(SuggestionDisplayType::Jump, cx);
+ }
self.stale_edit_prediction_in_menu = None;
self.active_edit_prediction = Some(EditPredictionState {
inlay_ids: vec![],
@@ -8258,6 +8258,9 @@ impl Editor {
let is_move = supports_jump
&& (move_invalidation_row_range.is_some() || self.edit_predictions_hidden_for_vim_mode);
let completion = if is_move {
+ if let Some(provider) = &self.edit_prediction_provider {
+ provider.provider.did_show(SuggestionDisplayType::Jump, cx);
+ }
invalidation_row_range =
move_invalidation_row_range.unwrap_or(edit_start_row..edit_end_row);
let target = first_edit_start;
@@ -8266,9 +8269,25 @@ impl Editor {
let show_completions_in_buffer = !self.edit_prediction_visible_in_cursor_popover(true)
&& !self.edit_predictions_hidden_for_vim_mode;
+ let display_mode = if all_edits_insertions_or_deletions(&edits, &multibuffer) {
+ if provider.show_tab_accept_marker() {
+ EditDisplayMode::TabAccept
+ } else {
+ EditDisplayMode::Inline
+ }
+ } else {
+ EditDisplayMode::DiffPopover
+ };
+
if show_completions_in_buffer {
if let Some(provider) = &self.edit_prediction_provider {
- provider.provider.did_show(cx);
+ let suggestion_display_type = match display_mode {
+ EditDisplayMode::DiffPopover => SuggestionDisplayType::DiffPopover,
+ EditDisplayMode::Inline | EditDisplayMode::TabAccept => {
+ SuggestionDisplayType::GhostText
+ }
+ };
+ provider.provider.did_show(suggestion_display_type, cx);
}
if edits
.iter()
@@ -8301,16 +8320,6 @@ impl Editor {
invalidation_row_range = edit_start_row..edit_end_row;
- let display_mode = if all_edits_insertions_or_deletions(&edits, &multibuffer) {
- if provider.show_tab_accept_marker() {
- EditDisplayMode::TabAccept
- } else {
- EditDisplayMode::Inline
- }
- } else {
- EditDisplayMode::DiffPopover
- };
-
EditPrediction::Edit {
edits,
edit_preview,