open_ai.rs

  1use anyhow::{Context as _, Result, anyhow};
  2use futures::{
  3    AsyncBufReadExt, AsyncReadExt, StreamExt,
  4    io::BufReader,
  5    stream::{self, BoxStream},
  6};
  7use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
  8use serde::{Deserialize, Serialize};
  9use serde_json::Value;
 10use std::{
 11    convert::TryFrom,
 12    future::{self, Future},
 13};
 14use strum::EnumIter;
 15
 16pub const OPEN_AI_API_URL: &str = "https://api.openai.com/v1";
 17
 18fn is_none_or_empty<T: AsRef<[U]>, U>(opt: &Option<T>) -> bool {
 19    opt.as_ref().map_or(true, |v| v.as_ref().is_empty())
 20}
 21
 22#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
 23#[serde(rename_all = "lowercase")]
 24pub enum Role {
 25    User,
 26    Assistant,
 27    System,
 28    Tool,
 29}
 30
 31impl TryFrom<String> for Role {
 32    type Error = anyhow::Error;
 33
 34    fn try_from(value: String) -> Result<Self> {
 35        match value.as_str() {
 36            "user" => Ok(Self::User),
 37            "assistant" => Ok(Self::Assistant),
 38            "system" => Ok(Self::System),
 39            "tool" => Ok(Self::Tool),
 40            _ => anyhow::bail!("invalid role '{value}'"),
 41        }
 42    }
 43}
 44
 45impl From<Role> for String {
 46    fn from(val: Role) -> Self {
 47        match val {
 48            Role::User => "user".to_owned(),
 49            Role::Assistant => "assistant".to_owned(),
 50            Role::System => "system".to_owned(),
 51            Role::Tool => "tool".to_owned(),
 52        }
 53    }
 54}
 55
 56#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
 57#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
 58pub enum Model {
 59    #[serde(rename = "gpt-3.5-turbo")]
 60    ThreePointFiveTurbo,
 61    #[serde(rename = "gpt-4")]
 62    Four,
 63    #[serde(rename = "gpt-4-turbo")]
 64    FourTurbo,
 65    #[serde(rename = "gpt-4o")]
 66    #[default]
 67    FourOmni,
 68    #[serde(rename = "gpt-4o-mini")]
 69    FourOmniMini,
 70    #[serde(rename = "gpt-4.1")]
 71    FourPointOne,
 72    #[serde(rename = "gpt-4.1-mini")]
 73    FourPointOneMini,
 74    #[serde(rename = "gpt-4.1-nano")]
 75    FourPointOneNano,
 76    #[serde(rename = "o1")]
 77    O1,
 78    #[serde(rename = "o1-preview")]
 79    O1Preview,
 80    #[serde(rename = "o1-mini")]
 81    O1Mini,
 82    #[serde(rename = "o3-mini")]
 83    O3Mini,
 84    #[serde(rename = "o3")]
 85    O3,
 86    #[serde(rename = "o4-mini")]
 87    O4Mini,
 88
 89    #[serde(rename = "custom")]
 90    Custom {
 91        name: String,
 92        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
 93        display_name: Option<String>,
 94        max_tokens: usize,
 95        max_output_tokens: Option<u32>,
 96        max_completion_tokens: Option<u32>,
 97    },
 98}
 99
100impl Model {
101    pub fn default_fast() -> Self {
102        Self::FourPointOneMini
103    }
104
105    pub fn from_id(id: &str) -> Result<Self> {
106        match id {
107            "gpt-3.5-turbo" => Ok(Self::ThreePointFiveTurbo),
108            "gpt-4" => Ok(Self::Four),
109            "gpt-4-turbo-preview" => Ok(Self::FourTurbo),
110            "gpt-4o" => Ok(Self::FourOmni),
111            "gpt-4o-mini" => Ok(Self::FourOmniMini),
112            "gpt-4.1" => Ok(Self::FourPointOne),
113            "gpt-4.1-mini" => Ok(Self::FourPointOneMini),
114            "gpt-4.1-nano" => Ok(Self::FourPointOneNano),
115            "o1" => Ok(Self::O1),
116            "o1-preview" => Ok(Self::O1Preview),
117            "o1-mini" => Ok(Self::O1Mini),
118            "o3-mini" => Ok(Self::O3Mini),
119            "o3" => Ok(Self::O3),
120            "o4-mini" => Ok(Self::O4Mini),
121            invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
122        }
123    }
124
125    pub fn id(&self) -> &str {
126        match self {
127            Self::ThreePointFiveTurbo => "gpt-3.5-turbo",
128            Self::Four => "gpt-4",
129            Self::FourTurbo => "gpt-4-turbo",
130            Self::FourOmni => "gpt-4o",
131            Self::FourOmniMini => "gpt-4o-mini",
132            Self::FourPointOne => "gpt-4.1",
133            Self::FourPointOneMini => "gpt-4.1-mini",
134            Self::FourPointOneNano => "gpt-4.1-nano",
135            Self::O1 => "o1",
136            Self::O1Preview => "o1-preview",
137            Self::O1Mini => "o1-mini",
138            Self::O3Mini => "o3-mini",
139            Self::O3 => "o3",
140            Self::O4Mini => "o4-mini",
141            Self::Custom { name, .. } => name,
142        }
143    }
144
145    pub fn display_name(&self) -> &str {
146        match self {
147            Self::ThreePointFiveTurbo => "gpt-3.5-turbo",
148            Self::Four => "gpt-4",
149            Self::FourTurbo => "gpt-4-turbo",
150            Self::FourOmni => "gpt-4o",
151            Self::FourOmniMini => "gpt-4o-mini",
152            Self::FourPointOne => "gpt-4.1",
153            Self::FourPointOneMini => "gpt-4.1-mini",
154            Self::FourPointOneNano => "gpt-4.1-nano",
155            Self::O1 => "o1",
156            Self::O1Preview => "o1-preview",
157            Self::O1Mini => "o1-mini",
158            Self::O3Mini => "o3-mini",
159            Self::O3 => "o3",
160            Self::O4Mini => "o4-mini",
161            Self::Custom {
162                name, display_name, ..
163            } => display_name.as_ref().unwrap_or(name),
164        }
165    }
166
167    pub fn max_token_count(&self) -> usize {
168        match self {
169            Self::ThreePointFiveTurbo => 16_385,
170            Self::Four => 8_192,
171            Self::FourTurbo => 128_000,
172            Self::FourOmni => 128_000,
173            Self::FourOmniMini => 128_000,
174            Self::FourPointOne => 1_047_576,
175            Self::FourPointOneMini => 1_047_576,
176            Self::FourPointOneNano => 1_047_576,
177            Self::O1 => 200_000,
178            Self::O1Preview => 128_000,
179            Self::O1Mini => 128_000,
180            Self::O3Mini => 200_000,
181            Self::O3 => 200_000,
182            Self::O4Mini => 200_000,
183            Self::Custom { max_tokens, .. } => *max_tokens,
184        }
185    }
186
187    pub fn max_output_tokens(&self) -> Option<u32> {
188        match self {
189            Self::Custom {
190                max_output_tokens, ..
191            } => *max_output_tokens,
192            Self::ThreePointFiveTurbo => Some(4_096),
193            Self::Four => Some(8_192),
194            Self::FourTurbo => Some(4_096),
195            Self::FourOmni => Some(16_384),
196            Self::FourOmniMini => Some(16_384),
197            Self::FourPointOne => Some(32_768),
198            Self::FourPointOneMini => Some(32_768),
199            Self::FourPointOneNano => Some(32_768),
200            Self::O1 => Some(100_000),
201            Self::O1Preview => Some(32_768),
202            Self::O1Mini => Some(65_536),
203            Self::O3Mini => Some(100_000),
204            Self::O3 => Some(100_000),
205            Self::O4Mini => Some(100_000),
206        }
207    }
208
209    /// Returns whether the given model supports the `parallel_tool_calls` parameter.
210    ///
211    /// If the model does not support the parameter, do not pass it up, or the API will return an error.
212    pub fn supports_parallel_tool_calls(&self) -> bool {
213        match self {
214            Self::ThreePointFiveTurbo
215            | Self::Four
216            | Self::FourTurbo
217            | Self::FourOmni
218            | Self::FourOmniMini
219            | Self::FourPointOne
220            | Self::FourPointOneMini
221            | Self::FourPointOneNano => true,
222            Self::O1
223            | Self::O1Preview
224            | Self::O1Mini
225            | Self::O3
226            | Self::O3Mini
227            | Self::O4Mini
228            | Model::Custom { .. } => false,
229        }
230    }
231}
232
233#[derive(Debug, Serialize, Deserialize)]
234pub struct Request {
235    pub model: String,
236    pub messages: Vec<RequestMessage>,
237    pub stream: bool,
238    #[serde(default, skip_serializing_if = "Option::is_none")]
239    pub max_tokens: Option<u32>,
240    #[serde(default, skip_serializing_if = "Vec::is_empty")]
241    pub stop: Vec<String>,
242    pub temperature: f32,
243    #[serde(default, skip_serializing_if = "Option::is_none")]
244    pub tool_choice: Option<ToolChoice>,
245    /// Whether to enable parallel function calling during tool use.
246    #[serde(default, skip_serializing_if = "Option::is_none")]
247    pub parallel_tool_calls: Option<bool>,
248    #[serde(default, skip_serializing_if = "Vec::is_empty")]
249    pub tools: Vec<ToolDefinition>,
250}
251
252#[derive(Debug, Serialize, Deserialize)]
253pub struct CompletionRequest {
254    pub model: String,
255    pub prompt: String,
256    pub max_tokens: u32,
257    pub temperature: f32,
258    #[serde(default, skip_serializing_if = "Option::is_none")]
259    pub prediction: Option<Prediction>,
260    #[serde(default, skip_serializing_if = "Option::is_none")]
261    pub rewrite_speculation: Option<bool>,
262}
263
264#[derive(Clone, Deserialize, Serialize, Debug)]
265#[serde(tag = "type", rename_all = "snake_case")]
266pub enum Prediction {
267    Content { content: String },
268}
269
270#[derive(Debug, Serialize, Deserialize)]
271#[serde(untagged)]
272pub enum ToolChoice {
273    Auto,
274    Required,
275    None,
276    Other(ToolDefinition),
277}
278
279#[derive(Clone, Deserialize, Serialize, Debug)]
280#[serde(tag = "type", rename_all = "snake_case")]
281pub enum ToolDefinition {
282    #[allow(dead_code)]
283    Function { function: FunctionDefinition },
284}
285
286#[derive(Clone, Debug, Serialize, Deserialize)]
287pub struct FunctionDefinition {
288    pub name: String,
289    pub description: Option<String>,
290    pub parameters: Option<Value>,
291}
292
293#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
294#[serde(tag = "role", rename_all = "lowercase")]
295pub enum RequestMessage {
296    Assistant {
297        content: Option<MessageContent>,
298        #[serde(default, skip_serializing_if = "Vec::is_empty")]
299        tool_calls: Vec<ToolCall>,
300    },
301    User {
302        content: MessageContent,
303    },
304    System {
305        content: MessageContent,
306    },
307    Tool {
308        content: MessageContent,
309        tool_call_id: String,
310    },
311}
312
313#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
314#[serde(untagged)]
315pub enum MessageContent {
316    Plain(String),
317    Multipart(Vec<MessagePart>),
318}
319
320impl MessageContent {
321    pub fn empty() -> Self {
322        MessageContent::Multipart(vec![])
323    }
324
325    pub fn push_part(&mut self, part: MessagePart) {
326        match self {
327            MessageContent::Plain(text) => {
328                *self =
329                    MessageContent::Multipart(vec![MessagePart::Text { text: text.clone() }, part]);
330            }
331            MessageContent::Multipart(parts) if parts.is_empty() => match part {
332                MessagePart::Text { text } => *self = MessageContent::Plain(text),
333                MessagePart::Image { .. } => *self = MessageContent::Multipart(vec![part]),
334            },
335            MessageContent::Multipart(parts) => parts.push(part),
336        }
337    }
338}
339
340impl From<Vec<MessagePart>> for MessageContent {
341    fn from(mut parts: Vec<MessagePart>) -> Self {
342        if let [MessagePart::Text { text }] = parts.as_mut_slice() {
343            MessageContent::Plain(std::mem::take(text))
344        } else {
345            MessageContent::Multipart(parts)
346        }
347    }
348}
349
350#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
351#[serde(tag = "type")]
352pub enum MessagePart {
353    #[serde(rename = "text")]
354    Text { text: String },
355    #[serde(rename = "image_url")]
356    Image { image_url: ImageUrl },
357}
358
359#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
360pub struct ImageUrl {
361    pub url: String,
362    #[serde(skip_serializing_if = "Option::is_none")]
363    pub detail: Option<String>,
364}
365
366#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
367pub struct ToolCall {
368    pub id: String,
369    #[serde(flatten)]
370    pub content: ToolCallContent,
371}
372
373#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
374#[serde(tag = "type", rename_all = "lowercase")]
375pub enum ToolCallContent {
376    Function { function: FunctionContent },
377}
378
379#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
380pub struct FunctionContent {
381    pub name: String,
382    pub arguments: String,
383}
384
385#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
386pub struct ResponseMessageDelta {
387    pub role: Option<Role>,
388    pub content: Option<String>,
389    #[serde(default, skip_serializing_if = "is_none_or_empty")]
390    pub tool_calls: Option<Vec<ToolCallChunk>>,
391}
392
393#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
394pub struct ToolCallChunk {
395    pub index: usize,
396    pub id: Option<String>,
397
398    // There is also an optional `type` field that would determine if a
399    // function is there. Sometimes this streams in with the `function` before
400    // it streams in the `type`
401    pub function: Option<FunctionChunk>,
402}
403
404#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
405pub struct FunctionChunk {
406    pub name: Option<String>,
407    pub arguments: Option<String>,
408}
409
410#[derive(Serialize, Deserialize, Debug)]
411pub struct Usage {
412    pub prompt_tokens: u32,
413    pub completion_tokens: u32,
414    pub total_tokens: u32,
415}
416
417#[derive(Serialize, Deserialize, Debug)]
418pub struct ChoiceDelta {
419    pub index: u32,
420    pub delta: ResponseMessageDelta,
421    pub finish_reason: Option<String>,
422}
423
424#[derive(Serialize, Deserialize, Debug)]
425#[serde(untagged)]
426pub enum ResponseStreamResult {
427    Ok(ResponseStreamEvent),
428    Err { error: String },
429}
430
431#[derive(Serialize, Deserialize, Debug)]
432pub struct ResponseStreamEvent {
433    pub created: u32,
434    pub model: String,
435    pub choices: Vec<ChoiceDelta>,
436    pub usage: Option<Usage>,
437}
438
439#[derive(Serialize, Deserialize, Debug)]
440pub struct CompletionResponse {
441    pub id: String,
442    pub object: String,
443    pub created: u64,
444    pub model: String,
445    pub choices: Vec<CompletionChoice>,
446    pub usage: Usage,
447}
448
449#[derive(Serialize, Deserialize, Debug)]
450pub struct CompletionChoice {
451    pub text: String,
452}
453
454#[derive(Serialize, Deserialize, Debug)]
455pub struct Response {
456    pub id: String,
457    pub object: String,
458    pub created: u64,
459    pub model: String,
460    pub choices: Vec<Choice>,
461    pub usage: Usage,
462}
463
464#[derive(Serialize, Deserialize, Debug)]
465pub struct Choice {
466    pub index: u32,
467    pub message: RequestMessage,
468    pub finish_reason: Option<String>,
469}
470
471pub async fn complete(
472    client: &dyn HttpClient,
473    api_url: &str,
474    api_key: &str,
475    request: Request,
476) -> Result<Response> {
477    let uri = format!("{api_url}/chat/completions");
478    let request_builder = HttpRequest::builder()
479        .method(Method::POST)
480        .uri(uri)
481        .header("Content-Type", "application/json")
482        .header("Authorization", format!("Bearer {}", api_key));
483
484    let mut request_body = request;
485    request_body.stream = false;
486
487    let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request_body)?))?;
488    let mut response = client.send(request).await?;
489
490    if response.status().is_success() {
491        let mut body = String::new();
492        response.body_mut().read_to_string(&mut body).await?;
493        let response: Response = serde_json::from_str(&body)?;
494        Ok(response)
495    } else {
496        let mut body = String::new();
497        response.body_mut().read_to_string(&mut body).await?;
498
499        #[derive(Deserialize)]
500        struct OpenAiResponse {
501            error: OpenAiError,
502        }
503
504        #[derive(Deserialize)]
505        struct OpenAiError {
506            message: String,
507        }
508
509        match serde_json::from_str::<OpenAiResponse>(&body) {
510            Ok(response) if !response.error.message.is_empty() => anyhow::bail!(
511                "Failed to connect to OpenAI API: {}",
512                response.error.message,
513            ),
514            _ => anyhow::bail!(
515                "Failed to connect to OpenAI API: {} {}",
516                response.status(),
517                body,
518            ),
519        }
520    }
521}
522
523pub async fn complete_text(
524    client: &dyn HttpClient,
525    api_url: &str,
526    api_key: &str,
527    request: CompletionRequest,
528) -> Result<CompletionResponse> {
529    let uri = format!("{api_url}/completions");
530    let request_builder = HttpRequest::builder()
531        .method(Method::POST)
532        .uri(uri)
533        .header("Content-Type", "application/json")
534        .header("Authorization", format!("Bearer {}", api_key));
535
536    let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
537    let mut response = client.send(request).await?;
538
539    if response.status().is_success() {
540        let mut body = String::new();
541        response.body_mut().read_to_string(&mut body).await?;
542        let response = serde_json::from_str(&body)?;
543        Ok(response)
544    } else {
545        let mut body = String::new();
546        response.body_mut().read_to_string(&mut body).await?;
547
548        #[derive(Deserialize)]
549        struct OpenAiResponse {
550            error: OpenAiError,
551        }
552
553        #[derive(Deserialize)]
554        struct OpenAiError {
555            message: String,
556        }
557
558        match serde_json::from_str::<OpenAiResponse>(&body) {
559            Ok(response) if !response.error.message.is_empty() => anyhow::bail!(
560                "Failed to connect to OpenAI API: {}",
561                response.error.message,
562            ),
563            _ => anyhow::bail!(
564                "Failed to connect to OpenAI API: {} {}",
565                response.status(),
566                body,
567            ),
568        }
569    }
570}
571
572fn adapt_response_to_stream(response: Response) -> ResponseStreamEvent {
573    ResponseStreamEvent {
574        created: response.created as u32,
575        model: response.model,
576        choices: response
577            .choices
578            .into_iter()
579            .map(|choice| {
580                let content = match &choice.message {
581                    RequestMessage::Assistant { content, .. } => content.as_ref(),
582                    RequestMessage::User { content } => Some(content),
583                    RequestMessage::System { content } => Some(content),
584                    RequestMessage::Tool { content, .. } => Some(content),
585                };
586
587                let mut text_content = String::new();
588                match content {
589                    Some(MessageContent::Plain(text)) => text_content.push_str(&text),
590                    Some(MessageContent::Multipart(parts)) => {
591                        for part in parts {
592                            match part {
593                                MessagePart::Text { text } => text_content.push_str(&text),
594                                MessagePart::Image { .. } => {}
595                            }
596                        }
597                    }
598                    None => {}
599                };
600
601                ChoiceDelta {
602                    index: choice.index,
603                    delta: ResponseMessageDelta {
604                        role: Some(match choice.message {
605                            RequestMessage::Assistant { .. } => Role::Assistant,
606                            RequestMessage::User { .. } => Role::User,
607                            RequestMessage::System { .. } => Role::System,
608                            RequestMessage::Tool { .. } => Role::Tool,
609                        }),
610                        content: if text_content.is_empty() {
611                            None
612                        } else {
613                            Some(text_content)
614                        },
615                        tool_calls: None,
616                    },
617                    finish_reason: choice.finish_reason,
618                }
619            })
620            .collect(),
621        usage: Some(response.usage),
622    }
623}
624
625pub async fn stream_completion(
626    client: &dyn HttpClient,
627    api_url: &str,
628    api_key: &str,
629    request: Request,
630) -> Result<BoxStream<'static, Result<ResponseStreamEvent>>> {
631    if request.model.starts_with("o1") {
632        let response = complete(client, api_url, api_key, request).await;
633        let response_stream_event = response.map(adapt_response_to_stream);
634        return Ok(stream::once(future::ready(response_stream_event)).boxed());
635    }
636
637    let uri = format!("{api_url}/chat/completions");
638    let request_builder = HttpRequest::builder()
639        .method(Method::POST)
640        .uri(uri)
641        .header("Content-Type", "application/json")
642        .header("Authorization", format!("Bearer {}", api_key));
643
644    let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
645    let mut response = client.send(request).await?;
646    if response.status().is_success() {
647        let reader = BufReader::new(response.into_body());
648        Ok(reader
649            .lines()
650            .filter_map(|line| async move {
651                match line {
652                    Ok(line) => {
653                        let line = line.strip_prefix("data: ")?;
654                        if line == "[DONE]" {
655                            None
656                        } else {
657                            match serde_json::from_str(line) {
658                                Ok(ResponseStreamResult::Ok(response)) => Some(Ok(response)),
659                                Ok(ResponseStreamResult::Err { error }) => {
660                                    Some(Err(anyhow!(error)))
661                                }
662                                Err(error) => Some(Err(anyhow!(error))),
663                            }
664                        }
665                    }
666                    Err(error) => Some(Err(anyhow!(error))),
667                }
668            })
669            .boxed())
670    } else {
671        let mut body = String::new();
672        response.body_mut().read_to_string(&mut body).await?;
673
674        #[derive(Deserialize)]
675        struct OpenAiResponse {
676            error: OpenAiError,
677        }
678
679        #[derive(Deserialize)]
680        struct OpenAiError {
681            message: String,
682        }
683
684        match serde_json::from_str::<OpenAiResponse>(&body) {
685            Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
686                "Failed to connect to OpenAI API: {}",
687                response.error.message,
688            )),
689
690            _ => anyhow::bail!(
691                "Failed to connect to OpenAI API: {} {}",
692                response.status(),
693                body,
694            ),
695        }
696    }
697}
698
699#[derive(Copy, Clone, Serialize, Deserialize)]
700pub enum OpenAiEmbeddingModel {
701    #[serde(rename = "text-embedding-3-small")]
702    TextEmbedding3Small,
703    #[serde(rename = "text-embedding-3-large")]
704    TextEmbedding3Large,
705}
706
707#[derive(Serialize)]
708struct OpenAiEmbeddingRequest<'a> {
709    model: OpenAiEmbeddingModel,
710    input: Vec<&'a str>,
711}
712
713#[derive(Deserialize)]
714pub struct OpenAiEmbeddingResponse {
715    pub data: Vec<OpenAiEmbedding>,
716}
717
718#[derive(Deserialize)]
719pub struct OpenAiEmbedding {
720    pub embedding: Vec<f32>,
721}
722
723pub fn embed<'a>(
724    client: &dyn HttpClient,
725    api_url: &str,
726    api_key: &str,
727    model: OpenAiEmbeddingModel,
728    texts: impl IntoIterator<Item = &'a str>,
729) -> impl 'static + Future<Output = Result<OpenAiEmbeddingResponse>> {
730    let uri = format!("{api_url}/embeddings");
731
732    let request = OpenAiEmbeddingRequest {
733        model,
734        input: texts.into_iter().collect(),
735    };
736    let body = AsyncBody::from(serde_json::to_string(&request).unwrap());
737    let request = HttpRequest::builder()
738        .method(Method::POST)
739        .uri(uri)
740        .header("Content-Type", "application/json")
741        .header("Authorization", format!("Bearer {}", api_key))
742        .body(body)
743        .map(|request| client.send(request));
744
745    async move {
746        let mut response = request?.await?;
747        let mut body = String::new();
748        response.body_mut().read_to_string(&mut body).await?;
749
750        anyhow::ensure!(
751            response.status().is_success(),
752            "error during embedding, status: {:?}, body: {:?}",
753            response.status(),
754            body
755        );
756        let response: OpenAiEmbeddingResponse =
757            serde_json::from_str(&body).context("failed to parse OpenAI embedding response")?;
758        Ok(response)
759    }
760}