llm.rs

 1use serde::{Deserialize, Serialize};
 2use strum::{Display, EnumIter, EnumString};
 3
 4pub const EXPIRED_LLM_TOKEN_HEADER_NAME: &str = "x-zed-expired-token";
 5
 6pub const MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME: &str = "x-zed-llm-max-monthly-spend-reached";
 7
 8#[derive(
 9    Debug, PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize, EnumString, EnumIter, Display,
10)]
11#[serde(rename_all = "snake_case")]
12#[strum(serialize_all = "snake_case")]
13pub enum LanguageModelProvider {
14    Anthropic,
15    OpenAi,
16    Google,
17}
18
19#[derive(Debug, Serialize, Deserialize)]
20pub struct LanguageModel {
21    pub provider: LanguageModelProvider,
22    pub name: String,
23}
24
25#[derive(Debug, Serialize, Deserialize)]
26pub struct ListModelsResponse {
27    pub models: Vec<LanguageModel>,
28}
29
30#[derive(Debug, Serialize, Deserialize)]
31pub struct PerformCompletionParams {
32    pub provider: LanguageModelProvider,
33    pub model: String,
34    pub provider_request: Box<serde_json::value::RawValue>,
35}
36
37#[derive(Debug, Serialize, Deserialize)]
38pub struct PredictEditsParams {
39    pub outline: Option<String>,
40    pub input_events: String,
41    pub input_excerpt: String,
42    /// Whether the user provided consent for sampling this interaction.
43    #[serde(default)]
44    pub can_collect_data: bool,
45}
46
47#[derive(Debug, Serialize, Deserialize)]
48pub struct PredictEditsResponse {
49    pub output_excerpt: String,
50}