anthropic.rs

  1use std::str::FromStr;
  2use std::time::Duration;
  3
  4use anyhow::{Context as _, Result, anyhow};
  5use chrono::{DateTime, Utc};
  6use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
  7use http_client::http::{HeaderMap, HeaderValue};
  8use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
  9use serde::{Deserialize, Serialize};
 10use strum::{EnumIter, EnumString};
 11use thiserror::Error;
 12
 13pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
 14
 15#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
 16#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
 17pub struct AnthropicModelCacheConfiguration {
 18    pub min_total_token: usize,
 19    pub should_speculate: bool,
 20    pub max_cache_anchors: usize,
 21}
 22
 23#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
 24#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
 25pub enum AnthropicModelMode {
 26    #[default]
 27    Default,
 28    Thinking {
 29        budget_tokens: Option<u32>,
 30    },
 31}
 32
 33#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
 34#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
 35pub enum Model {
 36    #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
 37    Claude3_5Sonnet,
 38    #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
 39    Claude3_7Sonnet,
 40    #[serde(
 41        rename = "claude-3-7-sonnet-thinking",
 42        alias = "claude-3-7-sonnet-thinking-latest"
 43    )]
 44    Claude3_7SonnetThinking,
 45    #[serde(rename = "claude-opus-4", alias = "claude-opus-4-latest")]
 46    ClaudeOpus4,
 47    #[serde(
 48        rename = "claude-opus-4-thinking",
 49        alias = "claude-opus-4-thinking-latest"
 50    )]
 51    ClaudeOpus4Thinking,
 52    #[default]
 53    #[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
 54    ClaudeSonnet4,
 55    #[serde(
 56        rename = "claude-sonnet-4-thinking",
 57        alias = "claude-sonnet-4-thinking-latest"
 58    )]
 59    ClaudeSonnet4Thinking,
 60    #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
 61    Claude3_5Haiku,
 62    #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
 63    Claude3Opus,
 64    #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
 65    Claude3Sonnet,
 66    #[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")]
 67    Claude3Haiku,
 68    #[serde(rename = "custom")]
 69    Custom {
 70        name: String,
 71        max_tokens: usize,
 72        /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
 73        display_name: Option<String>,
 74        /// Override this model with a different Anthropic model for tool calls.
 75        tool_override: Option<String>,
 76        /// Indicates whether this custom model supports caching.
 77        cache_configuration: Option<AnthropicModelCacheConfiguration>,
 78        max_output_tokens: Option<u32>,
 79        default_temperature: Option<f32>,
 80        #[serde(default)]
 81        extra_beta_headers: Vec<String>,
 82        #[serde(default)]
 83        mode: AnthropicModelMode,
 84    },
 85}
 86
 87impl Model {
 88    pub fn default_fast() -> Self {
 89        Self::Claude3_5Haiku
 90    }
 91
 92    pub fn from_id(id: &str) -> Result<Self> {
 93        if id.starts_with("claude-3-5-sonnet") {
 94            Ok(Self::Claude3_5Sonnet)
 95        } else if id.starts_with("claude-3-7-sonnet-thinking") {
 96            Ok(Self::Claude3_7SonnetThinking)
 97        } else if id.starts_with("claude-3-7-sonnet") {
 98            Ok(Self::Claude3_7Sonnet)
 99        } else if id.starts_with("claude-3-5-haiku") {
100            Ok(Self::Claude3_5Haiku)
101        } else if id.starts_with("claude-3-opus") {
102            Ok(Self::Claude3Opus)
103        } else if id.starts_with("claude-3-sonnet") {
104            Ok(Self::Claude3Sonnet)
105        } else if id.starts_with("claude-3-haiku") {
106            Ok(Self::Claude3Haiku)
107        } else if id.starts_with("claude-opus-4-thinking") {
108            Ok(Self::ClaudeOpus4Thinking)
109        } else if id.starts_with("claude-opus-4") {
110            Ok(Self::ClaudeOpus4)
111        } else if id.starts_with("claude-sonnet-4-thinking") {
112            Ok(Self::ClaudeSonnet4Thinking)
113        } else if id.starts_with("claude-sonnet-4") {
114            Ok(Self::ClaudeSonnet4)
115        } else {
116            anyhow::bail!("invalid model id {id}");
117        }
118    }
119
120    pub fn id(&self) -> &str {
121        match self {
122            Model::ClaudeOpus4 => "claude-opus-4-latest",
123            Model::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest",
124            Model::ClaudeSonnet4 => "claude-sonnet-4-latest",
125            Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
126            Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
127            Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
128            Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
129            Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
130            Model::Claude3Opus => "claude-3-opus-latest",
131            Model::Claude3Sonnet => "claude-3-sonnet-20240229",
132            Model::Claude3Haiku => "claude-3-haiku-20240307",
133            Self::Custom { name, .. } => name,
134        }
135    }
136
137    /// The id of the model that should be used for making API requests
138    pub fn request_id(&self) -> &str {
139        match self {
140            Model::ClaudeOpus4 | Model::ClaudeOpus4Thinking => "claude-opus-4-20250514",
141            Model::ClaudeSonnet4 | Model::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
142            Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
143            Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
144            Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
145            Model::Claude3Opus => "claude-3-opus-latest",
146            Model::Claude3Sonnet => "claude-3-sonnet-20240229",
147            Model::Claude3Haiku => "claude-3-haiku-20240307",
148            Self::Custom { name, .. } => name,
149        }
150    }
151
152    pub fn display_name(&self) -> &str {
153        match self {
154            Model::ClaudeOpus4 => "Claude Opus 4",
155            Model::ClaudeOpus4Thinking => "Claude Opus 4 Thinking",
156            Model::ClaudeSonnet4 => "Claude Sonnet 4",
157            Model::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
158            Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
159            Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
160            Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
161            Self::Claude3_5Haiku => "Claude 3.5 Haiku",
162            Self::Claude3Opus => "Claude 3 Opus",
163            Self::Claude3Sonnet => "Claude 3 Sonnet",
164            Self::Claude3Haiku => "Claude 3 Haiku",
165            Self::Custom {
166                name, display_name, ..
167            } => display_name.as_ref().unwrap_or(name),
168        }
169    }
170
171    pub fn cache_configuration(&self) -> Option<AnthropicModelCacheConfiguration> {
172        match self {
173            Self::ClaudeOpus4
174            | Self::ClaudeOpus4Thinking
175            | Self::ClaudeSonnet4
176            | Self::ClaudeSonnet4Thinking
177            | Self::Claude3_5Sonnet
178            | Self::Claude3_5Haiku
179            | Self::Claude3_7Sonnet
180            | Self::Claude3_7SonnetThinking
181            | Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
182                min_total_token: 2_048,
183                should_speculate: true,
184                max_cache_anchors: 4,
185            }),
186            Self::Custom {
187                cache_configuration,
188                ..
189            } => cache_configuration.clone(),
190            _ => None,
191        }
192    }
193
194    pub fn max_token_count(&self) -> usize {
195        match self {
196            Self::ClaudeOpus4
197            | Self::ClaudeOpus4Thinking
198            | Self::ClaudeSonnet4
199            | Self::ClaudeSonnet4Thinking
200            | Self::Claude3_5Sonnet
201            | Self::Claude3_5Haiku
202            | Self::Claude3_7Sonnet
203            | Self::Claude3_7SonnetThinking
204            | Self::Claude3Opus
205            | Self::Claude3Sonnet
206            | Self::Claude3Haiku => 200_000,
207            Self::Custom { max_tokens, .. } => *max_tokens,
208        }
209    }
210
211    pub fn max_output_tokens(&self) -> u32 {
212        match self {
213            Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
214            Self::Claude3_5Sonnet
215            | Self::Claude3_7Sonnet
216            | Self::Claude3_7SonnetThinking
217            | Self::Claude3_5Haiku
218            | Self::ClaudeOpus4
219            | Self::ClaudeOpus4Thinking
220            | Self::ClaudeSonnet4
221            | Self::ClaudeSonnet4Thinking => 8_192,
222            Self::Custom {
223                max_output_tokens, ..
224            } => max_output_tokens.unwrap_or(4_096),
225        }
226    }
227
228    pub fn default_temperature(&self) -> f32 {
229        match self {
230            Self::ClaudeOpus4
231            | Self::ClaudeOpus4Thinking
232            | Self::ClaudeSonnet4
233            | Self::ClaudeSonnet4Thinking
234            | Self::Claude3_5Sonnet
235            | Self::Claude3_7Sonnet
236            | Self::Claude3_7SonnetThinking
237            | Self::Claude3_5Haiku
238            | Self::Claude3Opus
239            | Self::Claude3Sonnet
240            | Self::Claude3Haiku => 1.0,
241            Self::Custom {
242                default_temperature,
243                ..
244            } => default_temperature.unwrap_or(1.0),
245        }
246    }
247
248    pub fn mode(&self) -> AnthropicModelMode {
249        match self {
250            Self::Claude3_5Sonnet
251            | Self::Claude3_7Sonnet
252            | Self::Claude3_5Haiku
253            | Self::ClaudeOpus4
254            | Self::ClaudeSonnet4
255            | Self::Claude3Opus
256            | Self::Claude3Sonnet
257            | Self::Claude3Haiku => AnthropicModelMode::Default,
258            Self::Claude3_7SonnetThinking
259            | Self::ClaudeOpus4Thinking
260            | Self::ClaudeSonnet4Thinking => AnthropicModelMode::Thinking {
261                budget_tokens: Some(4_096),
262            },
263            Self::Custom { mode, .. } => mode.clone(),
264        }
265    }
266
267    pub const DEFAULT_BETA_HEADERS: &[&str] = &["prompt-caching-2024-07-31"];
268
269    pub fn beta_headers(&self) -> String {
270        let mut headers = Self::DEFAULT_BETA_HEADERS
271            .into_iter()
272            .map(|header| header.to_string())
273            .collect::<Vec<_>>();
274
275        match self {
276            Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => {
277                // Try beta token-efficient tool use (supported in Claude 3.7 Sonnet only)
278                // https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
279                headers.push("token-efficient-tools-2025-02-19".to_string());
280            }
281            Self::Custom {
282                extra_beta_headers, ..
283            } => {
284                headers.extend(
285                    extra_beta_headers
286                        .iter()
287                        .filter(|header| !header.trim().is_empty())
288                        .cloned(),
289                );
290            }
291            _ => {}
292        }
293
294        headers.join(",")
295    }
296
297    pub fn tool_model_id(&self) -> &str {
298        if let Self::Custom {
299            tool_override: Some(tool_override),
300            ..
301        } = self
302        {
303            tool_override
304        } else {
305            self.request_id()
306        }
307    }
308}
309
310pub async fn complete(
311    client: &dyn HttpClient,
312    api_url: &str,
313    api_key: &str,
314    request: Request,
315) -> Result<Response, AnthropicError> {
316    let uri = format!("{api_url}/v1/messages");
317    let beta_headers = Model::from_id(&request.model)
318        .map(|model| model.beta_headers())
319        .unwrap_or_else(|_err| Model::DEFAULT_BETA_HEADERS.join(","));
320    let request_builder = HttpRequest::builder()
321        .method(Method::POST)
322        .uri(uri)
323        .header("Anthropic-Version", "2023-06-01")
324        .header("Anthropic-Beta", beta_headers)
325        .header("X-Api-Key", api_key)
326        .header("Content-Type", "application/json");
327
328    let serialized_request =
329        serde_json::to_string(&request).context("failed to serialize request")?;
330    let request = request_builder
331        .body(AsyncBody::from(serialized_request))
332        .context("failed to construct request body")?;
333
334    let mut response = client
335        .send(request)
336        .await
337        .context("failed to send request to Anthropic")?;
338    if response.status().is_success() {
339        let mut body = Vec::new();
340        response
341            .body_mut()
342            .read_to_end(&mut body)
343            .await
344            .context("failed to read response body")?;
345        let response_message: Response =
346            serde_json::from_slice(&body).context("failed to deserialize response body")?;
347        Ok(response_message)
348    } else {
349        let mut body = Vec::new();
350        response
351            .body_mut()
352            .read_to_end(&mut body)
353            .await
354            .context("failed to read response body")?;
355        let body_str =
356            std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
357        Err(AnthropicError::Other(anyhow!(
358            "Failed to connect to API: {} {}",
359            response.status(),
360            body_str
361        )))
362    }
363}
364
365pub async fn stream_completion(
366    client: &dyn HttpClient,
367    api_url: &str,
368    api_key: &str,
369    request: Request,
370) -> Result<BoxStream<'static, Result<Event, AnthropicError>>, AnthropicError> {
371    stream_completion_with_rate_limit_info(client, api_url, api_key, request)
372        .await
373        .map(|output| output.0)
374}
375
376/// An individual rate limit.
377#[derive(Debug)]
378pub struct RateLimit {
379    pub limit: usize,
380    pub remaining: usize,
381    pub reset: DateTime<Utc>,
382}
383
384impl RateLimit {
385    fn from_headers(resource: &str, headers: &HeaderMap<HeaderValue>) -> Result<Self> {
386        let limit =
387            get_header(&format!("anthropic-ratelimit-{resource}-limit"), headers)?.parse()?;
388        let remaining = get_header(
389            &format!("anthropic-ratelimit-{resource}-remaining"),
390            headers,
391        )?
392        .parse()?;
393        let reset = DateTime::parse_from_rfc3339(get_header(
394            &format!("anthropic-ratelimit-{resource}-reset"),
395            headers,
396        )?)?
397        .to_utc();
398
399        Ok(Self {
400            limit,
401            remaining,
402            reset,
403        })
404    }
405}
406
407/// <https://docs.anthropic.com/en/api/rate-limits#response-headers>
408#[derive(Debug)]
409pub struct RateLimitInfo {
410    pub retry_after: Option<Duration>,
411    pub requests: Option<RateLimit>,
412    pub tokens: Option<RateLimit>,
413    pub input_tokens: Option<RateLimit>,
414    pub output_tokens: Option<RateLimit>,
415}
416
417impl RateLimitInfo {
418    fn from_headers(headers: &HeaderMap<HeaderValue>) -> Self {
419        // Check if any rate limit headers exist
420        let has_rate_limit_headers = headers
421            .keys()
422            .any(|k| k == "retry-after" || k.as_str().starts_with("anthropic-ratelimit-"));
423
424        if !has_rate_limit_headers {
425            return Self {
426                retry_after: None,
427                requests: None,
428                tokens: None,
429                input_tokens: None,
430                output_tokens: None,
431            };
432        }
433
434        Self {
435            retry_after: headers
436                .get("retry-after")
437                .and_then(|v| v.to_str().ok())
438                .and_then(|v| v.parse::<u64>().ok())
439                .map(Duration::from_secs),
440            requests: RateLimit::from_headers("requests", headers).ok(),
441            tokens: RateLimit::from_headers("tokens", headers).ok(),
442            input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
443            output_tokens: RateLimit::from_headers("output-tokens", headers).ok(),
444        }
445    }
446}
447
448fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> anyhow::Result<&'a str> {
449    Ok(headers
450        .get(key)
451        .with_context(|| format!("missing header `{key}`"))?
452        .to_str()?)
453}
454
455pub async fn stream_completion_with_rate_limit_info(
456    client: &dyn HttpClient,
457    api_url: &str,
458    api_key: &str,
459    request: Request,
460) -> Result<
461    (
462        BoxStream<'static, Result<Event, AnthropicError>>,
463        Option<RateLimitInfo>,
464    ),
465    AnthropicError,
466> {
467    let request = StreamingRequest {
468        base: request,
469        stream: true,
470    };
471    let uri = format!("{api_url}/v1/messages");
472    let beta_headers = Model::from_id(&request.base.model)
473        .map(|model| model.beta_headers())
474        .unwrap_or_else(|_err| Model::DEFAULT_BETA_HEADERS.join(","));
475    let request_builder = HttpRequest::builder()
476        .method(Method::POST)
477        .uri(uri)
478        .header("Anthropic-Version", "2023-06-01")
479        .header("Anthropic-Beta", beta_headers)
480        .header("X-Api-Key", api_key)
481        .header("Content-Type", "application/json");
482    let serialized_request =
483        serde_json::to_string(&request).context("failed to serialize request")?;
484    let request = request_builder
485        .body(AsyncBody::from(serialized_request))
486        .context("failed to construct request body")?;
487
488    let mut response = client
489        .send(request)
490        .await
491        .context("failed to send request to Anthropic")?;
492    let rate_limits = RateLimitInfo::from_headers(response.headers());
493    if response.status().is_success() {
494        let reader = BufReader::new(response.into_body());
495        let stream = reader
496            .lines()
497            .filter_map(|line| async move {
498                match line {
499                    Ok(line) => {
500                        let line = line.strip_prefix("data: ")?;
501                        match serde_json::from_str(line) {
502                            Ok(response) => Some(Ok(response)),
503                            Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
504                        }
505                    }
506                    Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
507                }
508            })
509            .boxed();
510        Ok((stream, Some(rate_limits)))
511    } else if let Some(retry_after) = rate_limits.retry_after {
512        Err(AnthropicError::RateLimit(retry_after))
513    } else {
514        let mut body = Vec::new();
515        response
516            .body_mut()
517            .read_to_end(&mut body)
518            .await
519            .context("failed to read response body")?;
520
521        let body_str =
522            std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
523
524        match serde_json::from_str::<Event>(body_str) {
525            Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
526            Ok(_) => Err(AnthropicError::Other(anyhow!(
527                "Unexpected success response while expecting an error: '{body_str}'",
528            ))),
529            Err(_) => Err(AnthropicError::Other(anyhow!(
530                "Failed to connect to API: {} {}",
531                response.status(),
532                body_str,
533            ))),
534        }
535    }
536}
537
538#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
539#[serde(rename_all = "lowercase")]
540pub enum CacheControlType {
541    Ephemeral,
542}
543
544#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
545pub struct CacheControl {
546    #[serde(rename = "type")]
547    pub cache_type: CacheControlType,
548}
549
550#[derive(Debug, Serialize, Deserialize)]
551pub struct Message {
552    pub role: Role,
553    pub content: Vec<RequestContent>,
554}
555
556#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
557#[serde(rename_all = "lowercase")]
558pub enum Role {
559    User,
560    Assistant,
561}
562
563#[derive(Debug, Serialize, Deserialize)]
564#[serde(tag = "type")]
565pub enum RequestContent {
566    #[serde(rename = "text")]
567    Text {
568        text: String,
569        #[serde(skip_serializing_if = "Option::is_none")]
570        cache_control: Option<CacheControl>,
571    },
572    #[serde(rename = "thinking")]
573    Thinking {
574        thinking: String,
575        signature: String,
576        #[serde(skip_serializing_if = "Option::is_none")]
577        cache_control: Option<CacheControl>,
578    },
579    #[serde(rename = "redacted_thinking")]
580    RedactedThinking { data: String },
581    #[serde(rename = "image")]
582    Image {
583        source: ImageSource,
584        #[serde(skip_serializing_if = "Option::is_none")]
585        cache_control: Option<CacheControl>,
586    },
587    #[serde(rename = "tool_use")]
588    ToolUse {
589        id: String,
590        name: String,
591        input: serde_json::Value,
592        #[serde(skip_serializing_if = "Option::is_none")]
593        cache_control: Option<CacheControl>,
594    },
595    #[serde(rename = "tool_result")]
596    ToolResult {
597        tool_use_id: String,
598        is_error: bool,
599        content: ToolResultContent,
600        #[serde(skip_serializing_if = "Option::is_none")]
601        cache_control: Option<CacheControl>,
602    },
603}
604
605#[derive(Debug, Serialize, Deserialize)]
606#[serde(untagged)]
607pub enum ToolResultContent {
608    Plain(String),
609    Multipart(Vec<ToolResultPart>),
610}
611
612#[derive(Debug, Serialize, Deserialize)]
613#[serde(tag = "type", rename_all = "lowercase")]
614pub enum ToolResultPart {
615    Text { text: String },
616    Image { source: ImageSource },
617}
618
619#[derive(Debug, Serialize, Deserialize)]
620#[serde(tag = "type")]
621pub enum ResponseContent {
622    #[serde(rename = "text")]
623    Text { text: String },
624    #[serde(rename = "thinking")]
625    Thinking { thinking: String },
626    #[serde(rename = "redacted_thinking")]
627    RedactedThinking { data: String },
628    #[serde(rename = "tool_use")]
629    ToolUse {
630        id: String,
631        name: String,
632        input: serde_json::Value,
633    },
634}
635
636#[derive(Debug, Serialize, Deserialize)]
637pub struct ImageSource {
638    #[serde(rename = "type")]
639    pub source_type: String,
640    pub media_type: String,
641    pub data: String,
642}
643
644#[derive(Debug, Serialize, Deserialize)]
645pub struct Tool {
646    pub name: String,
647    pub description: String,
648    pub input_schema: serde_json::Value,
649}
650
651#[derive(Debug, Serialize, Deserialize)]
652#[serde(tag = "type", rename_all = "lowercase")]
653pub enum ToolChoice {
654    Auto,
655    Any,
656    Tool { name: String },
657    None,
658}
659
660#[derive(Debug, Serialize, Deserialize)]
661#[serde(tag = "type", rename_all = "lowercase")]
662pub enum Thinking {
663    Enabled { budget_tokens: Option<u32> },
664}
665
666#[derive(Debug, Serialize, Deserialize)]
667#[serde(untagged)]
668pub enum StringOrContents {
669    String(String),
670    Content(Vec<RequestContent>),
671}
672
673#[derive(Debug, Serialize, Deserialize)]
674pub struct Request {
675    pub model: String,
676    pub max_tokens: u32,
677    pub messages: Vec<Message>,
678    #[serde(default, skip_serializing_if = "Vec::is_empty")]
679    pub tools: Vec<Tool>,
680    #[serde(default, skip_serializing_if = "Option::is_none")]
681    pub thinking: Option<Thinking>,
682    #[serde(default, skip_serializing_if = "Option::is_none")]
683    pub tool_choice: Option<ToolChoice>,
684    #[serde(default, skip_serializing_if = "Option::is_none")]
685    pub system: Option<StringOrContents>,
686    #[serde(default, skip_serializing_if = "Option::is_none")]
687    pub metadata: Option<Metadata>,
688    #[serde(default, skip_serializing_if = "Vec::is_empty")]
689    pub stop_sequences: Vec<String>,
690    #[serde(default, skip_serializing_if = "Option::is_none")]
691    pub temperature: Option<f32>,
692    #[serde(default, skip_serializing_if = "Option::is_none")]
693    pub top_k: Option<u32>,
694    #[serde(default, skip_serializing_if = "Option::is_none")]
695    pub top_p: Option<f32>,
696}
697
698#[derive(Debug, Serialize, Deserialize)]
699struct StreamingRequest {
700    #[serde(flatten)]
701    pub base: Request,
702    pub stream: bool,
703}
704
705#[derive(Debug, Serialize, Deserialize)]
706pub struct Metadata {
707    pub user_id: Option<String>,
708}
709
710#[derive(Debug, Serialize, Deserialize, Default)]
711pub struct Usage {
712    #[serde(default, skip_serializing_if = "Option::is_none")]
713    pub input_tokens: Option<u32>,
714    #[serde(default, skip_serializing_if = "Option::is_none")]
715    pub output_tokens: Option<u32>,
716    #[serde(default, skip_serializing_if = "Option::is_none")]
717    pub cache_creation_input_tokens: Option<u32>,
718    #[serde(default, skip_serializing_if = "Option::is_none")]
719    pub cache_read_input_tokens: Option<u32>,
720}
721
722#[derive(Debug, Serialize, Deserialize)]
723pub struct Response {
724    pub id: String,
725    #[serde(rename = "type")]
726    pub response_type: String,
727    pub role: Role,
728    pub content: Vec<ResponseContent>,
729    pub model: String,
730    #[serde(default, skip_serializing_if = "Option::is_none")]
731    pub stop_reason: Option<String>,
732    #[serde(default, skip_serializing_if = "Option::is_none")]
733    pub stop_sequence: Option<String>,
734    pub usage: Usage,
735}
736
737#[derive(Debug, Serialize, Deserialize)]
738#[serde(tag = "type")]
739pub enum Event {
740    #[serde(rename = "message_start")]
741    MessageStart { message: Response },
742    #[serde(rename = "content_block_start")]
743    ContentBlockStart {
744        index: usize,
745        content_block: ResponseContent,
746    },
747    #[serde(rename = "content_block_delta")]
748    ContentBlockDelta { index: usize, delta: ContentDelta },
749    #[serde(rename = "content_block_stop")]
750    ContentBlockStop { index: usize },
751    #[serde(rename = "message_delta")]
752    MessageDelta { delta: MessageDelta, usage: Usage },
753    #[serde(rename = "message_stop")]
754    MessageStop,
755    #[serde(rename = "ping")]
756    Ping,
757    #[serde(rename = "error")]
758    Error { error: ApiError },
759}
760
761#[derive(Debug, Serialize, Deserialize)]
762#[serde(tag = "type")]
763pub enum ContentDelta {
764    #[serde(rename = "text_delta")]
765    TextDelta { text: String },
766    #[serde(rename = "thinking_delta")]
767    ThinkingDelta { thinking: String },
768    #[serde(rename = "signature_delta")]
769    SignatureDelta { signature: String },
770    #[serde(rename = "input_json_delta")]
771    InputJsonDelta { partial_json: String },
772}
773
774#[derive(Debug, Serialize, Deserialize)]
775pub struct MessageDelta {
776    pub stop_reason: Option<String>,
777    pub stop_sequence: Option<String>,
778}
779
780#[derive(Error, Debug)]
781pub enum AnthropicError {
782    #[error("rate limit exceeded, retry after {0:?}")]
783    RateLimit(Duration),
784    #[error("an error occurred while interacting with the Anthropic API: {error_type}: {message}", error_type = .0.error_type, message = .0.message)]
785    ApiError(ApiError),
786    #[error("{0}")]
787    Other(#[from] anyhow::Error),
788}
789
790#[derive(Debug, Serialize, Deserialize)]
791pub struct ApiError {
792    #[serde(rename = "type")]
793    pub error_type: String,
794    pub message: String,
795}
796
797/// An Anthropic API error code.
798/// <https://docs.anthropic.com/en/api/errors#http-errors>
799#[derive(Debug, PartialEq, Eq, Clone, Copy, EnumString)]
800#[strum(serialize_all = "snake_case")]
801pub enum ApiErrorCode {
802    /// 400 - `invalid_request_error`: There was an issue with the format or content of your request.
803    InvalidRequestError,
804    /// 401 - `authentication_error`: There's an issue with your API key.
805    AuthenticationError,
806    /// 403 - `permission_error`: Your API key does not have permission to use the specified resource.
807    PermissionError,
808    /// 404 - `not_found_error`: The requested resource was not found.
809    NotFoundError,
810    /// 413 - `request_too_large`: Request exceeds the maximum allowed number of bytes.
811    RequestTooLarge,
812    /// 429 - `rate_limit_error`: Your account has hit a rate limit.
813    RateLimitError,
814    /// 500 - `api_error`: An unexpected error has occurred internal to Anthropic's systems.
815    ApiError,
816    /// 529 - `overloaded_error`: Anthropic's API is temporarily overloaded.
817    OverloadedError,
818}
819
820impl ApiError {
821    pub fn code(&self) -> Option<ApiErrorCode> {
822        ApiErrorCode::from_str(&self.error_type).ok()
823    }
824
825    pub fn is_rate_limit_error(&self) -> bool {
826        matches!(self.error_type.as_str(), "rate_limit_error")
827    }
828
829    pub fn match_window_exceeded(&self) -> Option<usize> {
830        let Some(ApiErrorCode::InvalidRequestError) = self.code() else {
831            return None;
832        };
833
834        parse_prompt_too_long(&self.message)
835    }
836}
837
838pub fn parse_prompt_too_long(message: &str) -> Option<usize> {
839    message
840        .strip_prefix("prompt is too long: ")?
841        .split_once(" tokens")?
842        .0
843        .parse::<usize>()
844        .ok()
845}
846
847#[test]
848fn test_match_window_exceeded() {
849    let error = ApiError {
850        error_type: "invalid_request_error".to_string(),
851        message: "prompt is too long: 220000 tokens > 200000".to_string(),
852    };
853    assert_eq!(error.match_window_exceeded(), Some(220_000));
854
855    let error = ApiError {
856        error_type: "invalid_request_error".to_string(),
857        message: "prompt is too long: 1234953 tokens".to_string(),
858    };
859    assert_eq!(error.match_window_exceeded(), Some(1234953));
860
861    let error = ApiError {
862        error_type: "invalid_request_error".to_string(),
863        message: "not a prompt length error".to_string(),
864    };
865    assert_eq!(error.match_window_exceeded(), None);
866
867    let error = ApiError {
868        error_type: "rate_limit_error".to_string(),
869        message: "prompt is too long: 12345 tokens".to_string(),
870    };
871    assert_eq!(error.match_window_exceeded(), None);
872
873    let error = ApiError {
874        error_type: "invalid_request_error".to_string(),
875        message: "prompt is too long: invalid tokens".to_string(),
876    };
877    assert_eq!(error.match_window_exceeded(), None);
878}