open_ai.rs

  1use anyhow::{Context as _, Result, anyhow};
  2use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
  3use http_client::{
  4    AsyncBody, HttpClient, Method, Request as HttpRequest, StatusCode,
  5    http::{HeaderMap, HeaderValue},
  6};
  7use serde::{Deserialize, Serialize};
  8use serde_json::Value;
  9pub use settings::OpenAiReasoningEffort as ReasoningEffort;
 10use std::{convert::TryFrom, future::Future};
 11use strum::EnumIter;
 12use thiserror::Error;
 13
 14pub const OPEN_AI_API_URL: &str = "https://api.openai.com/v1";
 15
 16fn is_none_or_empty<T: AsRef<[U]>, U>(opt: &Option<T>) -> bool {
 17    opt.as_ref().is_none_or(|v| v.as_ref().is_empty())
 18}
 19
 20#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
 21#[serde(rename_all = "lowercase")]
 22pub enum Role {
 23    User,
 24    Assistant,
 25    System,
 26    Tool,
 27}
 28
 29impl TryFrom<String> for Role {
 30    type Error = anyhow::Error;
 31
 32    fn try_from(value: String) -> Result<Self> {
 33        match value.as_str() {
 34            "user" => Ok(Self::User),
 35            "assistant" => Ok(Self::Assistant),
 36            "system" => Ok(Self::System),
 37            "tool" => Ok(Self::Tool),
 38            _ => anyhow::bail!("invalid role '{value}'"),
 39        }
 40    }
 41}
 42
 43impl From<Role> for String {
 44    fn from(val: Role) -> Self {
 45        match val {
 46            Role::User => "user".to_owned(),
 47            Role::Assistant => "assistant".to_owned(),
 48            Role::System => "system".to_owned(),
 49            Role::Tool => "tool".to_owned(),
 50        }
 51    }
 52}
 53
 54#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
 55#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
 56pub enum Model {
 57    #[serde(rename = "gpt-3.5-turbo")]
 58    ThreePointFiveTurbo,
 59    #[serde(rename = "gpt-4")]
 60    Four,
 61    #[serde(rename = "gpt-4-turbo")]
 62    FourTurbo,
 63    #[serde(rename = "gpt-4o")]
 64    #[default]
 65    FourOmni,
 66    #[serde(rename = "gpt-4o-mini")]
 67    FourOmniMini,
 68    #[serde(rename = "gpt-4.1")]
 69    FourPointOne,
 70    #[serde(rename = "gpt-4.1-mini")]
 71    FourPointOneMini,
 72    #[serde(rename = "gpt-4.1-nano")]
 73    FourPointOneNano,
 74    #[serde(rename = "o1")]
 75    O1,
 76    #[serde(rename = "o3-mini")]
 77    O3Mini,
 78    #[serde(rename = "o3")]
 79    O3,
 80    #[serde(rename = "o4-mini")]
 81    O4Mini,
 82    #[serde(rename = "gpt-5")]
 83    Five,
 84    #[serde(rename = "gpt-5-mini")]
 85    FiveMini,
 86    #[serde(rename = "gpt-5-nano")]
 87    FiveNano,
 88    #[serde(rename = "gpt-5.1")]
 89    FivePointOne,
 90    #[serde(rename = "custom")]
 91    Custom {
 92        name: String,
 93        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
 94        display_name: Option<String>,
 95        max_tokens: u64,
 96        max_output_tokens: Option<u64>,
 97        max_completion_tokens: Option<u64>,
 98        reasoning_effort: Option<ReasoningEffort>,
 99    },
100}
101
102impl Model {
103    pub fn default_fast() -> Self {
104        // TODO: Replace with FiveMini since all other models are deprecated
105        Self::FourPointOneMini
106    }
107
108    pub fn from_id(id: &str) -> Result<Self> {
109        match id {
110            "gpt-3.5-turbo" => Ok(Self::ThreePointFiveTurbo),
111            "gpt-4" => Ok(Self::Four),
112            "gpt-4-turbo-preview" => Ok(Self::FourTurbo),
113            "gpt-4o" => Ok(Self::FourOmni),
114            "gpt-4o-mini" => Ok(Self::FourOmniMini),
115            "gpt-4.1" => Ok(Self::FourPointOne),
116            "gpt-4.1-mini" => Ok(Self::FourPointOneMini),
117            "gpt-4.1-nano" => Ok(Self::FourPointOneNano),
118            "o1" => Ok(Self::O1),
119            "o3-mini" => Ok(Self::O3Mini),
120            "o3" => Ok(Self::O3),
121            "o4-mini" => Ok(Self::O4Mini),
122            "gpt-5" => Ok(Self::Five),
123            "gpt-5-mini" => Ok(Self::FiveMini),
124            "gpt-5-nano" => Ok(Self::FiveNano),
125            "gpt-5.1" => Ok(Self::FivePointOne),
126            invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
127        }
128    }
129
130    pub fn id(&self) -> &str {
131        match self {
132            Self::ThreePointFiveTurbo => "gpt-3.5-turbo",
133            Self::Four => "gpt-4",
134            Self::FourTurbo => "gpt-4-turbo",
135            Self::FourOmni => "gpt-4o",
136            Self::FourOmniMini => "gpt-4o-mini",
137            Self::FourPointOne => "gpt-4.1",
138            Self::FourPointOneMini => "gpt-4.1-mini",
139            Self::FourPointOneNano => "gpt-4.1-nano",
140            Self::O1 => "o1",
141            Self::O3Mini => "o3-mini",
142            Self::O3 => "o3",
143            Self::O4Mini => "o4-mini",
144            Self::Five => "gpt-5",
145            Self::FiveMini => "gpt-5-mini",
146            Self::FiveNano => "gpt-5-nano",
147            Self::FivePointOne => "gpt-5.1",
148            Self::Custom { name, .. } => name,
149        }
150    }
151
152    pub fn display_name(&self) -> &str {
153        match self {
154            Self::ThreePointFiveTurbo => "gpt-3.5-turbo",
155            Self::Four => "gpt-4",
156            Self::FourTurbo => "gpt-4-turbo",
157            Self::FourOmni => "gpt-4o",
158            Self::FourOmniMini => "gpt-4o-mini",
159            Self::FourPointOne => "gpt-4.1",
160            Self::FourPointOneMini => "gpt-4.1-mini",
161            Self::FourPointOneNano => "gpt-4.1-nano",
162            Self::O1 => "o1",
163            Self::O3Mini => "o3-mini",
164            Self::O3 => "o3",
165            Self::O4Mini => "o4-mini",
166            Self::Five => "gpt-5",
167            Self::FiveMini => "gpt-5-mini",
168            Self::FiveNano => "gpt-5-nano",
169            Self::FivePointOne => "gpt-5.1",
170            Self::Custom {
171                name, display_name, ..
172            } => display_name.as_ref().unwrap_or(name),
173        }
174    }
175
176    pub fn max_token_count(&self) -> u64 {
177        match self {
178            Self::ThreePointFiveTurbo => 16_385,
179            Self::Four => 8_192,
180            Self::FourTurbo => 128_000,
181            Self::FourOmni => 128_000,
182            Self::FourOmniMini => 128_000,
183            Self::FourPointOne => 1_047_576,
184            Self::FourPointOneMini => 1_047_576,
185            Self::FourPointOneNano => 1_047_576,
186            Self::O1 => 200_000,
187            Self::O3Mini => 200_000,
188            Self::O3 => 200_000,
189            Self::O4Mini => 200_000,
190            Self::Five => 272_000,
191            Self::FiveMini => 272_000,
192            Self::FiveNano => 272_000,
193            Self::FivePointOne => 400_000,
194            Self::Custom { max_tokens, .. } => *max_tokens,
195        }
196    }
197
198    pub fn max_output_tokens(&self) -> Option<u64> {
199        match self {
200            Self::Custom {
201                max_output_tokens, ..
202            } => *max_output_tokens,
203            Self::ThreePointFiveTurbo => Some(4_096),
204            Self::Four => Some(8_192),
205            Self::FourTurbo => Some(4_096),
206            Self::FourOmni => Some(16_384),
207            Self::FourOmniMini => Some(16_384),
208            Self::FourPointOne => Some(32_768),
209            Self::FourPointOneMini => Some(32_768),
210            Self::FourPointOneNano => Some(32_768),
211            Self::O1 => Some(100_000),
212            Self::O3Mini => Some(100_000),
213            Self::O3 => Some(100_000),
214            Self::O4Mini => Some(100_000),
215            Self::Five => Some(128_000),
216            Self::FiveMini => Some(128_000),
217            Self::FiveNano => Some(128_000),
218            Self::FivePointOne => Some(128_000),
219        }
220    }
221
222    pub fn reasoning_effort(&self) -> Option<ReasoningEffort> {
223        match self {
224            Self::Custom {
225                reasoning_effort, ..
226            } => reasoning_effort.to_owned(),
227            _ => None,
228        }
229    }
230
231    /// Returns whether the given model supports the `parallel_tool_calls` parameter.
232    ///
233    /// If the model does not support the parameter, do not pass it up, or the API will return an error.
234    pub fn supports_parallel_tool_calls(&self) -> bool {
235        match self {
236            Self::ThreePointFiveTurbo
237            | Self::Four
238            | Self::FourTurbo
239            | Self::FourOmni
240            | Self::FourOmniMini
241            | Self::FourPointOne
242            | Self::FourPointOneMini
243            | Self::FourPointOneNano
244            | Self::Five
245            | Self::FiveMini
246            | Self::FivePointOne
247            | Self::FiveNano => true,
248            Self::O1 | Self::O3 | Self::O3Mini | Self::O4Mini | Model::Custom { .. } => false,
249        }
250    }
251
252    /// Returns whether the given model supports the `prompt_cache_key` parameter.
253    ///
254    /// If the model does not support the parameter, do not pass it up.
255    pub fn supports_prompt_cache_key(&self) -> bool {
256        true
257    }
258}
259
260#[derive(Debug, Serialize, Deserialize)]
261pub struct Request {
262    pub model: String,
263    pub messages: Vec<RequestMessage>,
264    pub stream: bool,
265    #[serde(default, skip_serializing_if = "Option::is_none")]
266    pub max_completion_tokens: Option<u64>,
267    #[serde(default, skip_serializing_if = "Vec::is_empty")]
268    pub stop: Vec<String>,
269    #[serde(default, skip_serializing_if = "Option::is_none")]
270    pub temperature: Option<f32>,
271    #[serde(default, skip_serializing_if = "Option::is_none")]
272    pub tool_choice: Option<ToolChoice>,
273    /// Whether to enable parallel function calling during tool use.
274    #[serde(default, skip_serializing_if = "Option::is_none")]
275    pub parallel_tool_calls: Option<bool>,
276    #[serde(default, skip_serializing_if = "Vec::is_empty")]
277    pub tools: Vec<ToolDefinition>,
278    #[serde(default, skip_serializing_if = "Option::is_none")]
279    pub prompt_cache_key: Option<String>,
280    #[serde(default, skip_serializing_if = "Option::is_none")]
281    pub reasoning_effort: Option<ReasoningEffort>,
282}
283
284#[derive(Debug, Serialize, Deserialize)]
285#[serde(rename_all = "lowercase")]
286pub enum ToolChoice {
287    Auto,
288    Required,
289    None,
290    #[serde(untagged)]
291    Other(ToolDefinition),
292}
293
294#[derive(Clone, Deserialize, Serialize, Debug)]
295#[serde(tag = "type", rename_all = "snake_case")]
296pub enum ToolDefinition {
297    #[allow(dead_code)]
298    Function { function: FunctionDefinition },
299}
300
301#[derive(Clone, Debug, Serialize, Deserialize)]
302pub struct FunctionDefinition {
303    pub name: String,
304    pub description: Option<String>,
305    pub parameters: Option<Value>,
306}
307
308#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
309#[serde(tag = "role", rename_all = "lowercase")]
310pub enum RequestMessage {
311    Assistant {
312        content: Option<MessageContent>,
313        #[serde(default, skip_serializing_if = "Vec::is_empty")]
314        tool_calls: Vec<ToolCall>,
315    },
316    User {
317        content: MessageContent,
318    },
319    System {
320        content: MessageContent,
321    },
322    Tool {
323        content: MessageContent,
324        tool_call_id: String,
325    },
326}
327
328#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
329#[serde(untagged)]
330pub enum MessageContent {
331    Plain(String),
332    Multipart(Vec<MessagePart>),
333}
334
335impl MessageContent {
336    pub fn empty() -> Self {
337        MessageContent::Multipart(vec![])
338    }
339
340    pub fn push_part(&mut self, part: MessagePart) {
341        match self {
342            MessageContent::Plain(text) => {
343                *self =
344                    MessageContent::Multipart(vec![MessagePart::Text { text: text.clone() }, part]);
345            }
346            MessageContent::Multipart(parts) if parts.is_empty() => match part {
347                MessagePart::Text { text } => *self = MessageContent::Plain(text),
348                MessagePart::Image { .. } => *self = MessageContent::Multipart(vec![part]),
349            },
350            MessageContent::Multipart(parts) => parts.push(part),
351        }
352    }
353}
354
355impl From<Vec<MessagePart>> for MessageContent {
356    fn from(mut parts: Vec<MessagePart>) -> Self {
357        if let [MessagePart::Text { text }] = parts.as_mut_slice() {
358            MessageContent::Plain(std::mem::take(text))
359        } else {
360            MessageContent::Multipart(parts)
361        }
362    }
363}
364
365#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
366#[serde(tag = "type")]
367pub enum MessagePart {
368    #[serde(rename = "text")]
369    Text { text: String },
370    #[serde(rename = "image_url")]
371    Image { image_url: ImageUrl },
372}
373
374#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
375pub struct ImageUrl {
376    pub url: String,
377    #[serde(skip_serializing_if = "Option::is_none")]
378    pub detail: Option<String>,
379}
380
381#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
382pub struct ToolCall {
383    pub id: String,
384    #[serde(flatten)]
385    pub content: ToolCallContent,
386}
387
388#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
389#[serde(tag = "type", rename_all = "lowercase")]
390pub enum ToolCallContent {
391    Function { function: FunctionContent },
392}
393
394#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
395pub struct FunctionContent {
396    pub name: String,
397    pub arguments: String,
398}
399
400#[derive(Clone, Serialize, Deserialize, Debug)]
401pub struct Response {
402    pub id: String,
403    pub object: String,
404    pub created: u64,
405    pub model: String,
406    pub choices: Vec<Choice>,
407    pub usage: Usage,
408}
409
410#[derive(Clone, Serialize, Deserialize, Debug)]
411pub struct Choice {
412    pub index: u32,
413    pub message: RequestMessage,
414    pub finish_reason: Option<String>,
415}
416
417#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
418pub struct ResponseMessageDelta {
419    pub role: Option<Role>,
420    pub content: Option<String>,
421    #[serde(default, skip_serializing_if = "is_none_or_empty")]
422    pub tool_calls: Option<Vec<ToolCallChunk>>,
423}
424
425#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
426pub struct ToolCallChunk {
427    pub index: usize,
428    pub id: Option<String>,
429
430    // There is also an optional `type` field that would determine if a
431    // function is there. Sometimes this streams in with the `function` before
432    // it streams in the `type`
433    pub function: Option<FunctionChunk>,
434}
435
436#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
437pub struct FunctionChunk {
438    pub name: Option<String>,
439    pub arguments: Option<String>,
440}
441
442#[derive(Clone, Serialize, Deserialize, Debug)]
443pub struct Usage {
444    pub prompt_tokens: u64,
445    pub completion_tokens: u64,
446    pub total_tokens: u64,
447}
448
449#[derive(Serialize, Deserialize, Debug)]
450pub struct ChoiceDelta {
451    pub index: u32,
452    pub delta: Option<ResponseMessageDelta>,
453    pub finish_reason: Option<String>,
454}
455
456#[derive(Error, Debug)]
457pub enum RequestError {
458    #[error("HTTP response error from {provider}'s API: status {status_code} - {body:?}")]
459    HttpResponseError {
460        provider: String,
461        status_code: StatusCode,
462        body: String,
463        headers: HeaderMap<HeaderValue>,
464    },
465    #[error(transparent)]
466    Other(#[from] anyhow::Error),
467}
468
469#[derive(Serialize, Deserialize, Debug)]
470pub struct ResponseStreamError {
471    message: String,
472}
473
474#[derive(Serialize, Deserialize, Debug)]
475#[serde(untagged)]
476pub enum ResponseStreamResult {
477    Ok(ResponseStreamEvent),
478    Err { error: ResponseStreamError },
479}
480
481#[derive(Serialize, Deserialize, Debug)]
482pub struct ResponseStreamEvent {
483    pub choices: Vec<ChoiceDelta>,
484    pub usage: Option<Usage>,
485}
486
487pub async fn stream_completion(
488    client: &dyn HttpClient,
489    provider_name: &str,
490    api_url: &str,
491    api_key: &str,
492    request: Request,
493) -> Result<BoxStream<'static, Result<ResponseStreamEvent>>, RequestError> {
494    let uri = format!("{api_url}/chat/completions");
495    let request_builder = HttpRequest::builder()
496        .method(Method::POST)
497        .uri(uri)
498        .header("Content-Type", "application/json")
499        .header("Authorization", format!("Bearer {}", api_key.trim()));
500
501    let request = request_builder
502        .body(AsyncBody::from(
503            serde_json::to_string(&request).map_err(|e| RequestError::Other(e.into()))?,
504        ))
505        .map_err(|e| RequestError::Other(e.into()))?;
506
507    let mut response = client.send(request).await?;
508    if response.status().is_success() {
509        let reader = BufReader::new(response.into_body());
510        Ok(reader
511            .lines()
512            .filter_map(|line| async move {
513                match line {
514                    Ok(line) => {
515                        let line = line.strip_prefix("data: ").or_else(|| line.strip_prefix("data:"))?;
516                        if line == "[DONE]" {
517                            None
518                        } else {
519                            match serde_json::from_str(line) {
520                                Ok(ResponseStreamResult::Ok(response)) => Some(Ok(response)),
521                                Ok(ResponseStreamResult::Err { error }) => {
522                                    Some(Err(anyhow!(error.message)))
523                                }
524                                Err(error) => {
525                                    log::error!(
526                                        "Failed to parse OpenAI response into ResponseStreamResult: `{}`\n\
527                                        Response: `{}`",
528                                        error,
529                                        line,
530                                    );
531                                    Some(Err(anyhow!(error)))
532                                }
533                            }
534                        }
535                    }
536                    Err(error) => Some(Err(anyhow!(error))),
537                }
538            })
539            .boxed())
540    } else {
541        let mut body = String::new();
542        response
543            .body_mut()
544            .read_to_string(&mut body)
545            .await
546            .map_err(|e| RequestError::Other(e.into()))?;
547
548        Err(RequestError::HttpResponseError {
549            provider: provider_name.to_owned(),
550            status_code: response.status(),
551            body,
552            headers: response.headers().clone(),
553        })
554    }
555}
556
557#[derive(Copy, Clone, Serialize, Deserialize)]
558pub enum OpenAiEmbeddingModel {
559    #[serde(rename = "text-embedding-3-small")]
560    TextEmbedding3Small,
561    #[serde(rename = "text-embedding-3-large")]
562    TextEmbedding3Large,
563}
564
565#[derive(Serialize)]
566struct OpenAiEmbeddingRequest<'a> {
567    model: OpenAiEmbeddingModel,
568    input: Vec<&'a str>,
569}
570
571#[derive(Deserialize)]
572pub struct OpenAiEmbeddingResponse {
573    pub data: Vec<OpenAiEmbedding>,
574}
575
576#[derive(Deserialize)]
577pub struct OpenAiEmbedding {
578    pub embedding: Vec<f32>,
579}
580
581pub fn embed<'a>(
582    client: &dyn HttpClient,
583    api_url: &str,
584    api_key: &str,
585    model: OpenAiEmbeddingModel,
586    texts: impl IntoIterator<Item = &'a str>,
587) -> impl 'static + Future<Output = Result<OpenAiEmbeddingResponse>> {
588    let uri = format!("{api_url}/embeddings");
589
590    let request = OpenAiEmbeddingRequest {
591        model,
592        input: texts.into_iter().collect(),
593    };
594    let body = AsyncBody::from(serde_json::to_string(&request).unwrap());
595    let request = HttpRequest::builder()
596        .method(Method::POST)
597        .uri(uri)
598        .header("Content-Type", "application/json")
599        .header("Authorization", format!("Bearer {}", api_key.trim()))
600        .body(body)
601        .map(|request| client.send(request));
602
603    async move {
604        let mut response = request?.await?;
605        let mut body = String::new();
606        response.body_mut().read_to_string(&mut body).await?;
607
608        anyhow::ensure!(
609            response.status().is_success(),
610            "error during embedding, status: {:?}, body: {:?}",
611            response.status(),
612            body
613        );
614        let response: OpenAiEmbeddingResponse =
615            serde_json::from_str(&body).context("failed to parse OpenAI embedding response")?;
616        Ok(response)
617    }
618}