open_ai.rs

  1use anyhow::{Context as _, Result, anyhow};
  2use futures::{
  3    AsyncBufReadExt, AsyncReadExt, StreamExt,
  4    io::BufReader,
  5    stream::{self, BoxStream},
  6};
  7use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
  8use serde::{Deserialize, Serialize};
  9use serde_json::Value;
 10use std::{
 11    convert::TryFrom,
 12    future::{self, Future},
 13};
 14use strum::EnumIter;
 15
 16pub const OPEN_AI_API_URL: &str = "https://api.openai.com/v1";
 17
 18fn is_none_or_empty<T: AsRef<[U]>, U>(opt: &Option<T>) -> bool {
 19    opt.as_ref().map_or(true, |v| v.as_ref().is_empty())
 20}
 21
 22#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
 23#[serde(rename_all = "lowercase")]
 24pub enum Role {
 25    User,
 26    Assistant,
 27    System,
 28    Tool,
 29}
 30
 31impl TryFrom<String> for Role {
 32    type Error = anyhow::Error;
 33
 34    fn try_from(value: String) -> Result<Self> {
 35        match value.as_str() {
 36            "user" => Ok(Self::User),
 37            "assistant" => Ok(Self::Assistant),
 38            "system" => Ok(Self::System),
 39            "tool" => Ok(Self::Tool),
 40            _ => Err(anyhow!("invalid role '{value}'")),
 41        }
 42    }
 43}
 44
 45impl From<Role> for String {
 46    fn from(val: Role) -> Self {
 47        match val {
 48            Role::User => "user".to_owned(),
 49            Role::Assistant => "assistant".to_owned(),
 50            Role::System => "system".to_owned(),
 51            Role::Tool => "tool".to_owned(),
 52        }
 53    }
 54}
 55
 56#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
 57#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
 58pub enum Model {
 59    #[serde(rename = "gpt-3.5-turbo", alias = "gpt-3.5-turbo")]
 60    ThreePointFiveTurbo,
 61    #[serde(rename = "gpt-4", alias = "gpt-4")]
 62    Four,
 63    #[serde(rename = "gpt-4-turbo", alias = "gpt-4-turbo")]
 64    FourTurbo,
 65    #[serde(rename = "gpt-4o", alias = "gpt-4o")]
 66    #[default]
 67    FourOmni,
 68    #[serde(rename = "gpt-4o-mini", alias = "gpt-4o-mini")]
 69    FourOmniMini,
 70    #[serde(rename = "gpt-4.1", alias = "gpt-4.1")]
 71    FourPointOne,
 72    #[serde(rename = "gpt-4.1-mini", alias = "gpt-4.1-mini")]
 73    FourPointOneMini,
 74    #[serde(rename = "gpt-4.1-nano", alias = "gpt-4.1-nano")]
 75    FourPointOneNano,
 76    #[serde(rename = "o1", alias = "o1")]
 77    O1,
 78    #[serde(rename = "o1-preview", alias = "o1-preview")]
 79    O1Preview,
 80    #[serde(rename = "o1-mini", alias = "o1-mini")]
 81    O1Mini,
 82    #[serde(rename = "o3-mini", alias = "o3-mini")]
 83    O3Mini,
 84    #[serde(rename = "o3", alias = "o3")]
 85    O3,
 86    #[serde(rename = "o4-mini", alias = "o4-mini")]
 87    O4Mini,
 88
 89    #[serde(rename = "custom")]
 90    Custom {
 91        name: String,
 92        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
 93        display_name: Option<String>,
 94        max_tokens: usize,
 95        max_output_tokens: Option<u32>,
 96        max_completion_tokens: Option<u32>,
 97    },
 98}
 99
100impl Model {
101    pub fn default_fast() -> Self {
102        Self::FourPointOneMini
103    }
104
105    pub fn from_id(id: &str) -> Result<Self> {
106        match id {
107            "gpt-3.5-turbo" => Ok(Self::ThreePointFiveTurbo),
108            "gpt-4" => Ok(Self::Four),
109            "gpt-4-turbo-preview" => Ok(Self::FourTurbo),
110            "gpt-4o" => Ok(Self::FourOmni),
111            "gpt-4o-mini" => Ok(Self::FourOmniMini),
112            "gpt-4.1" => Ok(Self::FourPointOne),
113            "gpt-4.1-mini" => Ok(Self::FourPointOneMini),
114            "gpt-4.1-nano" => Ok(Self::FourPointOneNano),
115            "o1" => Ok(Self::O1),
116            "o1-preview" => Ok(Self::O1Preview),
117            "o1-mini" => Ok(Self::O1Mini),
118            "o3-mini" => Ok(Self::O3Mini),
119            "o3" => Ok(Self::O3),
120            "o4-mini" => Ok(Self::O4Mini),
121            _ => Err(anyhow!("invalid model id")),
122        }
123    }
124
125    pub fn id(&self) -> &str {
126        match self {
127            Self::ThreePointFiveTurbo => "gpt-3.5-turbo",
128            Self::Four => "gpt-4",
129            Self::FourTurbo => "gpt-4-turbo",
130            Self::FourOmni => "gpt-4o",
131            Self::FourOmniMini => "gpt-4o-mini",
132            Self::FourPointOne => "gpt-4.1",
133            Self::FourPointOneMini => "gpt-4.1-mini",
134            Self::FourPointOneNano => "gpt-4.1-nano",
135            Self::O1 => "o1",
136            Self::O1Preview => "o1-preview",
137            Self::O1Mini => "o1-mini",
138            Self::O3Mini => "o3-mini",
139            Self::O3 => "o3",
140            Self::O4Mini => "o4-mini",
141            Self::Custom { name, .. } => name,
142        }
143    }
144
145    pub fn display_name(&self) -> &str {
146        match self {
147            Self::ThreePointFiveTurbo => "gpt-3.5-turbo",
148            Self::Four => "gpt-4",
149            Self::FourTurbo => "gpt-4-turbo",
150            Self::FourOmni => "gpt-4o",
151            Self::FourOmniMini => "gpt-4o-mini",
152            Self::FourPointOne => "gpt-4.1",
153            Self::FourPointOneMini => "gpt-4.1-mini",
154            Self::FourPointOneNano => "gpt-4.1-nano",
155            Self::O1 => "o1",
156            Self::O1Preview => "o1-preview",
157            Self::O1Mini => "o1-mini",
158            Self::O3Mini => "o3-mini",
159            Self::O3 => "o3",
160            Self::O4Mini => "o4-mini",
161            Self::Custom {
162                name, display_name, ..
163            } => display_name.as_ref().unwrap_or(name),
164        }
165    }
166
167    pub fn max_token_count(&self) -> usize {
168        match self {
169            Self::ThreePointFiveTurbo => 16_385,
170            Self::Four => 8_192,
171            Self::FourTurbo => 128_000,
172            Self::FourOmni => 128_000,
173            Self::FourOmniMini => 128_000,
174            Self::FourPointOne => 1_047_576,
175            Self::FourPointOneMini => 1_047_576,
176            Self::FourPointOneNano => 1_047_576,
177            Self::O1 => 200_000,
178            Self::O1Preview => 128_000,
179            Self::O1Mini => 128_000,
180            Self::O3Mini => 200_000,
181            Self::O3 => 200_000,
182            Self::O4Mini => 200_000,
183            Self::Custom { max_tokens, .. } => *max_tokens,
184        }
185    }
186
187    pub fn max_output_tokens(&self) -> Option<u32> {
188        match self {
189            Self::Custom {
190                max_output_tokens, ..
191            } => *max_output_tokens,
192            _ => None,
193        }
194    }
195
196    /// Returns whether the given model supports the `parallel_tool_calls` parameter.
197    ///
198    /// If the model does not support the parameter, do not pass it up, or the API will return an error.
199    pub fn supports_parallel_tool_calls(&self) -> bool {
200        match self {
201            Self::ThreePointFiveTurbo
202            | Self::Four
203            | Self::FourTurbo
204            | Self::FourOmni
205            | Self::FourOmniMini
206            | Self::FourPointOne
207            | Self::FourPointOneMini
208            | Self::FourPointOneNano
209            | Self::O1
210            | Self::O1Preview
211            | Self::O1Mini => true,
212            _ => false,
213        }
214    }
215}
216
217#[derive(Debug, Serialize, Deserialize)]
218pub struct Request {
219    pub model: String,
220    pub messages: Vec<RequestMessage>,
221    pub stream: bool,
222    #[serde(default, skip_serializing_if = "Option::is_none")]
223    pub max_tokens: Option<u32>,
224    #[serde(default, skip_serializing_if = "Vec::is_empty")]
225    pub stop: Vec<String>,
226    pub temperature: f32,
227    #[serde(default, skip_serializing_if = "Option::is_none")]
228    pub tool_choice: Option<ToolChoice>,
229    /// Whether to enable parallel function calling during tool use.
230    #[serde(default, skip_serializing_if = "Option::is_none")]
231    pub parallel_tool_calls: Option<bool>,
232    #[serde(default, skip_serializing_if = "Vec::is_empty")]
233    pub tools: Vec<ToolDefinition>,
234}
235
236#[derive(Debug, Serialize, Deserialize)]
237pub struct CompletionRequest {
238    pub model: String,
239    pub prompt: String,
240    pub max_tokens: u32,
241    pub temperature: f32,
242    #[serde(default, skip_serializing_if = "Option::is_none")]
243    pub prediction: Option<Prediction>,
244    #[serde(default, skip_serializing_if = "Option::is_none")]
245    pub rewrite_speculation: Option<bool>,
246}
247
248#[derive(Clone, Deserialize, Serialize, Debug)]
249#[serde(tag = "type", rename_all = "snake_case")]
250pub enum Prediction {
251    Content { content: String },
252}
253
254#[derive(Debug, Serialize, Deserialize)]
255#[serde(untagged)]
256pub enum ToolChoice {
257    Auto,
258    Required,
259    None,
260    Other(ToolDefinition),
261}
262
263#[derive(Clone, Deserialize, Serialize, Debug)]
264#[serde(tag = "type", rename_all = "snake_case")]
265pub enum ToolDefinition {
266    #[allow(dead_code)]
267    Function { function: FunctionDefinition },
268}
269
270#[derive(Clone, Debug, Serialize, Deserialize)]
271pub struct FunctionDefinition {
272    pub name: String,
273    pub description: Option<String>,
274    pub parameters: Option<Value>,
275}
276
277#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
278#[serde(tag = "role", rename_all = "lowercase")]
279pub enum RequestMessage {
280    Assistant {
281        content: Option<String>,
282        #[serde(default, skip_serializing_if = "Vec::is_empty")]
283        tool_calls: Vec<ToolCall>,
284    },
285    User {
286        content: String,
287    },
288    System {
289        content: String,
290    },
291    Tool {
292        content: String,
293        tool_call_id: String,
294    },
295}
296
297#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
298pub struct ToolCall {
299    pub id: String,
300    #[serde(flatten)]
301    pub content: ToolCallContent,
302}
303
304#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
305#[serde(tag = "type", rename_all = "lowercase")]
306pub enum ToolCallContent {
307    Function { function: FunctionContent },
308}
309
310#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
311pub struct FunctionContent {
312    pub name: String,
313    pub arguments: String,
314}
315
316#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
317pub struct ResponseMessageDelta {
318    pub role: Option<Role>,
319    pub content: Option<String>,
320    #[serde(default, skip_serializing_if = "is_none_or_empty")]
321    pub tool_calls: Option<Vec<ToolCallChunk>>,
322}
323
324#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
325pub struct ToolCallChunk {
326    pub index: usize,
327    pub id: Option<String>,
328
329    // There is also an optional `type` field that would determine if a
330    // function is there. Sometimes this streams in with the `function` before
331    // it streams in the `type`
332    pub function: Option<FunctionChunk>,
333}
334
335#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
336pub struct FunctionChunk {
337    pub name: Option<String>,
338    pub arguments: Option<String>,
339}
340
341#[derive(Serialize, Deserialize, Debug)]
342pub struct Usage {
343    pub prompt_tokens: u32,
344    pub completion_tokens: u32,
345    pub total_tokens: u32,
346}
347
348#[derive(Serialize, Deserialize, Debug)]
349pub struct ChoiceDelta {
350    pub index: u32,
351    pub delta: ResponseMessageDelta,
352    pub finish_reason: Option<String>,
353}
354
355#[derive(Serialize, Deserialize, Debug)]
356#[serde(untagged)]
357pub enum ResponseStreamResult {
358    Ok(ResponseStreamEvent),
359    Err { error: String },
360}
361
362#[derive(Serialize, Deserialize, Debug)]
363pub struct ResponseStreamEvent {
364    pub created: u32,
365    pub model: String,
366    pub choices: Vec<ChoiceDelta>,
367    pub usage: Option<Usage>,
368}
369
370#[derive(Serialize, Deserialize, Debug)]
371pub struct CompletionResponse {
372    pub id: String,
373    pub object: String,
374    pub created: u64,
375    pub model: String,
376    pub choices: Vec<CompletionChoice>,
377    pub usage: Usage,
378}
379
380#[derive(Serialize, Deserialize, Debug)]
381pub struct CompletionChoice {
382    pub text: String,
383}
384
385#[derive(Serialize, Deserialize, Debug)]
386pub struct Response {
387    pub id: String,
388    pub object: String,
389    pub created: u64,
390    pub model: String,
391    pub choices: Vec<Choice>,
392    pub usage: Usage,
393}
394
395#[derive(Serialize, Deserialize, Debug)]
396pub struct Choice {
397    pub index: u32,
398    pub message: RequestMessage,
399    pub finish_reason: Option<String>,
400}
401
402pub async fn complete(
403    client: &dyn HttpClient,
404    api_url: &str,
405    api_key: &str,
406    request: Request,
407) -> Result<Response> {
408    let uri = format!("{api_url}/chat/completions");
409    let request_builder = HttpRequest::builder()
410        .method(Method::POST)
411        .uri(uri)
412        .header("Content-Type", "application/json")
413        .header("Authorization", format!("Bearer {}", api_key));
414
415    let mut request_body = request;
416    request_body.stream = false;
417
418    let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request_body)?))?;
419    let mut response = client.send(request).await?;
420
421    if response.status().is_success() {
422        let mut body = String::new();
423        response.body_mut().read_to_string(&mut body).await?;
424        let response: Response = serde_json::from_str(&body)?;
425        Ok(response)
426    } else {
427        let mut body = String::new();
428        response.body_mut().read_to_string(&mut body).await?;
429
430        #[derive(Deserialize)]
431        struct OpenAiResponse {
432            error: OpenAiError,
433        }
434
435        #[derive(Deserialize)]
436        struct OpenAiError {
437            message: String,
438        }
439
440        match serde_json::from_str::<OpenAiResponse>(&body) {
441            Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
442                "Failed to connect to OpenAI API: {}",
443                response.error.message,
444            )),
445
446            _ => Err(anyhow!(
447                "Failed to connect to OpenAI API: {} {}",
448                response.status(),
449                body,
450            )),
451        }
452    }
453}
454
455pub async fn complete_text(
456    client: &dyn HttpClient,
457    api_url: &str,
458    api_key: &str,
459    request: CompletionRequest,
460) -> Result<CompletionResponse> {
461    let uri = format!("{api_url}/completions");
462    let request_builder = HttpRequest::builder()
463        .method(Method::POST)
464        .uri(uri)
465        .header("Content-Type", "application/json")
466        .header("Authorization", format!("Bearer {}", api_key));
467
468    let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
469    let mut response = client.send(request).await?;
470
471    if response.status().is_success() {
472        let mut body = String::new();
473        response.body_mut().read_to_string(&mut body).await?;
474        let response = serde_json::from_str(&body)?;
475        Ok(response)
476    } else {
477        let mut body = String::new();
478        response.body_mut().read_to_string(&mut body).await?;
479
480        #[derive(Deserialize)]
481        struct OpenAiResponse {
482            error: OpenAiError,
483        }
484
485        #[derive(Deserialize)]
486        struct OpenAiError {
487            message: String,
488        }
489
490        match serde_json::from_str::<OpenAiResponse>(&body) {
491            Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
492                "Failed to connect to OpenAI API: {}",
493                response.error.message,
494            )),
495
496            _ => Err(anyhow!(
497                "Failed to connect to OpenAI API: {} {}",
498                response.status(),
499                body,
500            )),
501        }
502    }
503}
504
505fn adapt_response_to_stream(response: Response) -> ResponseStreamEvent {
506    ResponseStreamEvent {
507        created: response.created as u32,
508        model: response.model,
509        choices: response
510            .choices
511            .into_iter()
512            .map(|choice| ChoiceDelta {
513                index: choice.index,
514                delta: ResponseMessageDelta {
515                    role: Some(match choice.message {
516                        RequestMessage::Assistant { .. } => Role::Assistant,
517                        RequestMessage::User { .. } => Role::User,
518                        RequestMessage::System { .. } => Role::System,
519                        RequestMessage::Tool { .. } => Role::Tool,
520                    }),
521                    content: match choice.message {
522                        RequestMessage::Assistant { content, .. } => content,
523                        RequestMessage::User { content } => Some(content),
524                        RequestMessage::System { content } => Some(content),
525                        RequestMessage::Tool { content, .. } => Some(content),
526                    },
527                    tool_calls: None,
528                },
529                finish_reason: choice.finish_reason,
530            })
531            .collect(),
532        usage: Some(response.usage),
533    }
534}
535
536pub async fn stream_completion(
537    client: &dyn HttpClient,
538    api_url: &str,
539    api_key: &str,
540    request: Request,
541) -> Result<BoxStream<'static, Result<ResponseStreamEvent>>> {
542    if request.model.starts_with("o1") {
543        let response = complete(client, api_url, api_key, request).await;
544        let response_stream_event = response.map(adapt_response_to_stream);
545        return Ok(stream::once(future::ready(response_stream_event)).boxed());
546    }
547
548    let uri = format!("{api_url}/chat/completions");
549    let request_builder = HttpRequest::builder()
550        .method(Method::POST)
551        .uri(uri)
552        .header("Content-Type", "application/json")
553        .header("Authorization", format!("Bearer {}", api_key));
554
555    let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
556    let mut response = client.send(request).await?;
557    if response.status().is_success() {
558        let reader = BufReader::new(response.into_body());
559        Ok(reader
560            .lines()
561            .filter_map(|line| async move {
562                match line {
563                    Ok(line) => {
564                        let line = line.strip_prefix("data: ")?;
565                        if line == "[DONE]" {
566                            None
567                        } else {
568                            match serde_json::from_str(line) {
569                                Ok(ResponseStreamResult::Ok(response)) => Some(Ok(response)),
570                                Ok(ResponseStreamResult::Err { error }) => {
571                                    Some(Err(anyhow!(error)))
572                                }
573                                Err(error) => Some(Err(anyhow!(error))),
574                            }
575                        }
576                    }
577                    Err(error) => Some(Err(anyhow!(error))),
578                }
579            })
580            .boxed())
581    } else {
582        let mut body = String::new();
583        response.body_mut().read_to_string(&mut body).await?;
584
585        #[derive(Deserialize)]
586        struct OpenAiResponse {
587            error: OpenAiError,
588        }
589
590        #[derive(Deserialize)]
591        struct OpenAiError {
592            message: String,
593        }
594
595        match serde_json::from_str::<OpenAiResponse>(&body) {
596            Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
597                "Failed to connect to OpenAI API: {}",
598                response.error.message,
599            )),
600
601            _ => Err(anyhow!(
602                "Failed to connect to OpenAI API: {} {}",
603                response.status(),
604                body,
605            )),
606        }
607    }
608}
609
610#[derive(Copy, Clone, Serialize, Deserialize)]
611pub enum OpenAiEmbeddingModel {
612    #[serde(rename = "text-embedding-3-small")]
613    TextEmbedding3Small,
614    #[serde(rename = "text-embedding-3-large")]
615    TextEmbedding3Large,
616}
617
618#[derive(Serialize)]
619struct OpenAiEmbeddingRequest<'a> {
620    model: OpenAiEmbeddingModel,
621    input: Vec<&'a str>,
622}
623
624#[derive(Deserialize)]
625pub struct OpenAiEmbeddingResponse {
626    pub data: Vec<OpenAiEmbedding>,
627}
628
629#[derive(Deserialize)]
630pub struct OpenAiEmbedding {
631    pub embedding: Vec<f32>,
632}
633
634pub fn embed<'a>(
635    client: &dyn HttpClient,
636    api_url: &str,
637    api_key: &str,
638    model: OpenAiEmbeddingModel,
639    texts: impl IntoIterator<Item = &'a str>,
640) -> impl 'static + Future<Output = Result<OpenAiEmbeddingResponse>> {
641    let uri = format!("{api_url}/embeddings");
642
643    let request = OpenAiEmbeddingRequest {
644        model,
645        input: texts.into_iter().collect(),
646    };
647    let body = AsyncBody::from(serde_json::to_string(&request).unwrap());
648    let request = HttpRequest::builder()
649        .method(Method::POST)
650        .uri(uri)
651        .header("Content-Type", "application/json")
652        .header("Authorization", format!("Bearer {}", api_key))
653        .body(body)
654        .map(|request| client.send(request));
655
656    async move {
657        let mut response = request?.await?;
658        let mut body = String::new();
659        response.body_mut().read_to_string(&mut body).await?;
660
661        if response.status().is_success() {
662            let response: OpenAiEmbeddingResponse =
663                serde_json::from_str(&body).context("failed to parse OpenAI embedding response")?;
664            Ok(response)
665        } else {
666            Err(anyhow!(
667                "error during embedding, status: {:?}, body: {:?}",
668                response.status(),
669                body
670            ))
671        }
672    }
673}