1mod supported_countries;
2
3use std::str::FromStr;
4
5use anyhow::{Context as _, Result, anyhow};
6use chrono::{DateTime, Utc};
7use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
8use http_client::http::{HeaderMap, HeaderValue};
9use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
10use serde::{Deserialize, Serialize};
11use strum::{EnumIter, EnumString};
12use thiserror::Error;
13
14pub use supported_countries::*;
15
16pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
17
18#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
19#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
20pub struct AnthropicModelCacheConfiguration {
21 pub min_total_token: usize,
22 pub should_speculate: bool,
23 pub max_cache_anchors: usize,
24}
25
26#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
27#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
28pub enum AnthropicModelMode {
29 #[default]
30 Default,
31 Thinking {
32 budget_tokens: Option<u32>,
33 },
34}
35
36#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
37#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
38pub enum Model {
39 #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
40 Claude3_5Sonnet,
41 #[default]
42 #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
43 Claude3_7Sonnet,
44 #[serde(
45 rename = "claude-3-7-sonnet-thinking",
46 alias = "claude-3-7-sonnet-thinking-latest"
47 )]
48 Claude3_7SonnetThinking,
49 #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
50 Claude3_5Haiku,
51 #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
52 Claude3Opus,
53 #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
54 Claude3Sonnet,
55 #[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")]
56 Claude3Haiku,
57 #[serde(rename = "custom")]
58 Custom {
59 name: String,
60 max_tokens: usize,
61 /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
62 display_name: Option<String>,
63 /// Override this model with a different Anthropic model for tool calls.
64 tool_override: Option<String>,
65 /// Indicates whether this custom model supports caching.
66 cache_configuration: Option<AnthropicModelCacheConfiguration>,
67 max_output_tokens: Option<u32>,
68 default_temperature: Option<f32>,
69 #[serde(default)]
70 extra_beta_headers: Vec<String>,
71 #[serde(default)]
72 mode: AnthropicModelMode,
73 },
74}
75
76impl Model {
77 pub fn from_id(id: &str) -> Result<Self> {
78 if id.starts_with("claude-3-5-sonnet") {
79 Ok(Self::Claude3_5Sonnet)
80 } else if id.starts_with("claude-3-7-sonnet-thinking") {
81 Ok(Self::Claude3_7SonnetThinking)
82 } else if id.starts_with("claude-3-7-sonnet") {
83 Ok(Self::Claude3_7Sonnet)
84 } else if id.starts_with("claude-3-5-haiku") {
85 Ok(Self::Claude3_5Haiku)
86 } else if id.starts_with("claude-3-opus") {
87 Ok(Self::Claude3Opus)
88 } else if id.starts_with("claude-3-sonnet") {
89 Ok(Self::Claude3Sonnet)
90 } else if id.starts_with("claude-3-haiku") {
91 Ok(Self::Claude3Haiku)
92 } else {
93 Err(anyhow!("invalid model id"))
94 }
95 }
96
97 pub fn id(&self) -> &str {
98 match self {
99 Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
100 Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
101 Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
102 Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
103 Model::Claude3Opus => "claude-3-opus-latest",
104 Model::Claude3Sonnet => "claude-3-sonnet-20240229",
105 Model::Claude3Haiku => "claude-3-haiku-20240307",
106 Self::Custom { name, .. } => name,
107 }
108 }
109
110 /// The id of the model that should be used for making API requests
111 pub fn request_id(&self) -> &str {
112 match self {
113 Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
114 Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
115 Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
116 Model::Claude3Opus => "claude-3-opus-latest",
117 Model::Claude3Sonnet => "claude-3-sonnet-20240229",
118 Model::Claude3Haiku => "claude-3-haiku-20240307",
119 Self::Custom { name, .. } => name,
120 }
121 }
122
123 pub fn display_name(&self) -> &str {
124 match self {
125 Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
126 Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
127 Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
128 Self::Claude3_5Haiku => "Claude 3.5 Haiku",
129 Self::Claude3Opus => "Claude 3 Opus",
130 Self::Claude3Sonnet => "Claude 3 Sonnet",
131 Self::Claude3Haiku => "Claude 3 Haiku",
132 Self::Custom {
133 name, display_name, ..
134 } => display_name.as_ref().unwrap_or(name),
135 }
136 }
137
138 pub fn cache_configuration(&self) -> Option<AnthropicModelCacheConfiguration> {
139 match self {
140 Self::Claude3_5Sonnet
141 | Self::Claude3_5Haiku
142 | Self::Claude3_7Sonnet
143 | Self::Claude3_7SonnetThinking
144 | Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
145 min_total_token: 2_048,
146 should_speculate: true,
147 max_cache_anchors: 4,
148 }),
149 Self::Custom {
150 cache_configuration,
151 ..
152 } => cache_configuration.clone(),
153 _ => None,
154 }
155 }
156
157 pub fn max_token_count(&self) -> usize {
158 match self {
159 Self::Claude3_5Sonnet
160 | Self::Claude3_5Haiku
161 | Self::Claude3_7Sonnet
162 | Self::Claude3_7SonnetThinking
163 | Self::Claude3Opus
164 | Self::Claude3Sonnet
165 | Self::Claude3Haiku => 200_000,
166 Self::Custom { max_tokens, .. } => *max_tokens,
167 }
168 }
169
170 pub fn max_output_tokens(&self) -> u32 {
171 match self {
172 Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
173 Self::Claude3_5Sonnet
174 | Self::Claude3_7Sonnet
175 | Self::Claude3_7SonnetThinking
176 | Self::Claude3_5Haiku => 8_192,
177 Self::Custom {
178 max_output_tokens, ..
179 } => max_output_tokens.unwrap_or(4_096),
180 }
181 }
182
183 pub fn default_temperature(&self) -> f32 {
184 match self {
185 Self::Claude3_5Sonnet
186 | Self::Claude3_7Sonnet
187 | Self::Claude3_7SonnetThinking
188 | Self::Claude3_5Haiku
189 | Self::Claude3Opus
190 | Self::Claude3Sonnet
191 | Self::Claude3Haiku => 1.0,
192 Self::Custom {
193 default_temperature,
194 ..
195 } => default_temperature.unwrap_or(1.0),
196 }
197 }
198
199 pub fn mode(&self) -> AnthropicModelMode {
200 match self {
201 Self::Claude3_5Sonnet
202 | Self::Claude3_7Sonnet
203 | Self::Claude3_5Haiku
204 | Self::Claude3Opus
205 | Self::Claude3Sonnet
206 | Self::Claude3Haiku => AnthropicModelMode::Default,
207 Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
208 budget_tokens: Some(4_096),
209 },
210 Self::Custom { mode, .. } => mode.clone(),
211 }
212 }
213
214 pub const DEFAULT_BETA_HEADERS: &[&str] = &["prompt-caching-2024-07-31"];
215
216 pub fn beta_headers(&self) -> String {
217 let mut headers = Self::DEFAULT_BETA_HEADERS
218 .into_iter()
219 .map(|header| header.to_string())
220 .collect::<Vec<_>>();
221
222 match self {
223 Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => {
224 // Try beta token-efficient tool use (supported in Claude 3.7 Sonnet only)
225 // https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
226 headers.push("token-efficient-tools-2025-02-19".to_string());
227 }
228 Self::Custom {
229 extra_beta_headers, ..
230 } => {
231 headers.extend(
232 extra_beta_headers
233 .iter()
234 .filter(|header| !header.trim().is_empty())
235 .cloned(),
236 );
237 }
238 _ => {}
239 }
240
241 headers.join(",")
242 }
243
244 pub fn tool_model_id(&self) -> &str {
245 if let Self::Custom {
246 tool_override: Some(tool_override),
247 ..
248 } = self
249 {
250 tool_override
251 } else {
252 self.request_id()
253 }
254 }
255}
256
257pub async fn complete(
258 client: &dyn HttpClient,
259 api_url: &str,
260 api_key: &str,
261 request: Request,
262) -> Result<Response, AnthropicError> {
263 let uri = format!("{api_url}/v1/messages");
264 let beta_headers = Model::from_id(&request.model)
265 .map(|model| model.beta_headers())
266 .unwrap_or_else(|_err| Model::DEFAULT_BETA_HEADERS.join(","));
267 let request_builder = HttpRequest::builder()
268 .method(Method::POST)
269 .uri(uri)
270 .header("Anthropic-Version", "2023-06-01")
271 .header("Anthropic-Beta", beta_headers)
272 .header("X-Api-Key", api_key)
273 .header("Content-Type", "application/json");
274
275 let serialized_request =
276 serde_json::to_string(&request).context("failed to serialize request")?;
277 let request = request_builder
278 .body(AsyncBody::from(serialized_request))
279 .context("failed to construct request body")?;
280
281 let mut response = client
282 .send(request)
283 .await
284 .context("failed to send request to Anthropic")?;
285 if response.status().is_success() {
286 let mut body = Vec::new();
287 response
288 .body_mut()
289 .read_to_end(&mut body)
290 .await
291 .context("failed to read response body")?;
292 let response_message: Response =
293 serde_json::from_slice(&body).context("failed to deserialize response body")?;
294 Ok(response_message)
295 } else {
296 let mut body = Vec::new();
297 response
298 .body_mut()
299 .read_to_end(&mut body)
300 .await
301 .context("failed to read response body")?;
302 let body_str =
303 std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
304 Err(AnthropicError::Other(anyhow!(
305 "Failed to connect to API: {} {}",
306 response.status(),
307 body_str
308 )))
309 }
310}
311
312pub async fn stream_completion(
313 client: &dyn HttpClient,
314 api_url: &str,
315 api_key: &str,
316 request: Request,
317) -> Result<BoxStream<'static, Result<Event, AnthropicError>>, AnthropicError> {
318 stream_completion_with_rate_limit_info(client, api_url, api_key, request)
319 .await
320 .map(|output| output.0)
321}
322
323/// An individual rate limit.
324#[derive(Debug)]
325pub struct RateLimit {
326 pub limit: usize,
327 pub remaining: usize,
328 pub reset: DateTime<Utc>,
329}
330
331impl RateLimit {
332 fn from_headers(resource: &str, headers: &HeaderMap<HeaderValue>) -> Result<Self> {
333 let limit =
334 get_header(&format!("anthropic-ratelimit-{resource}-limit"), headers)?.parse()?;
335 let remaining = get_header(
336 &format!("anthropic-ratelimit-{resource}-remaining"),
337 headers,
338 )?
339 .parse()?;
340 let reset = DateTime::parse_from_rfc3339(get_header(
341 &format!("anthropic-ratelimit-{resource}-reset"),
342 headers,
343 )?)?
344 .to_utc();
345
346 Ok(Self {
347 limit,
348 remaining,
349 reset,
350 })
351 }
352}
353
354/// <https://docs.anthropic.com/en/api/rate-limits#response-headers>
355#[derive(Debug)]
356pub struct RateLimitInfo {
357 pub requests: Option<RateLimit>,
358 pub tokens: Option<RateLimit>,
359 pub input_tokens: Option<RateLimit>,
360 pub output_tokens: Option<RateLimit>,
361}
362
363impl RateLimitInfo {
364 fn from_headers(headers: &HeaderMap<HeaderValue>) -> Self {
365 // Check if any rate limit headers exist
366 let has_rate_limit_headers = headers
367 .keys()
368 .any(|k| k.as_str().starts_with("anthropic-ratelimit-"));
369
370 if !has_rate_limit_headers {
371 return Self {
372 requests: None,
373 tokens: None,
374 input_tokens: None,
375 output_tokens: None,
376 };
377 }
378
379 Self {
380 requests: RateLimit::from_headers("requests", headers).ok(),
381 tokens: RateLimit::from_headers("tokens", headers).ok(),
382 input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
383 output_tokens: RateLimit::from_headers("output-tokens", headers).ok(),
384 }
385 }
386}
387
388fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> Result<&'a str, anyhow::Error> {
389 Ok(headers
390 .get(key)
391 .ok_or_else(|| anyhow!("missing header `{key}`"))?
392 .to_str()?)
393}
394
395pub async fn stream_completion_with_rate_limit_info(
396 client: &dyn HttpClient,
397 api_url: &str,
398 api_key: &str,
399 request: Request,
400) -> Result<
401 (
402 BoxStream<'static, Result<Event, AnthropicError>>,
403 Option<RateLimitInfo>,
404 ),
405 AnthropicError,
406> {
407 let request = StreamingRequest {
408 base: request,
409 stream: true,
410 };
411 let uri = format!("{api_url}/v1/messages");
412 let beta_headers = Model::from_id(&request.base.model)
413 .map(|model| model.beta_headers())
414 .unwrap_or_else(|_err| Model::DEFAULT_BETA_HEADERS.join(","));
415 let request_builder = HttpRequest::builder()
416 .method(Method::POST)
417 .uri(uri)
418 .header("Anthropic-Version", "2023-06-01")
419 .header("Anthropic-Beta", beta_headers)
420 .header("X-Api-Key", api_key)
421 .header("Content-Type", "application/json");
422 let serialized_request =
423 serde_json::to_string(&request).context("failed to serialize request")?;
424 let request = request_builder
425 .body(AsyncBody::from(serialized_request))
426 .context("failed to construct request body")?;
427
428 let mut response = client
429 .send(request)
430 .await
431 .context("failed to send request to Anthropic")?;
432 if response.status().is_success() {
433 let rate_limits = RateLimitInfo::from_headers(response.headers());
434 let reader = BufReader::new(response.into_body());
435 let stream = reader
436 .lines()
437 .filter_map(|line| async move {
438 match line {
439 Ok(line) => {
440 let line = line.strip_prefix("data: ")?;
441 match serde_json::from_str(line) {
442 Ok(response) => Some(Ok(response)),
443 Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
444 }
445 }
446 Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
447 }
448 })
449 .boxed();
450 Ok((stream, Some(rate_limits)))
451 } else {
452 let mut body = Vec::new();
453 response
454 .body_mut()
455 .read_to_end(&mut body)
456 .await
457 .context("failed to read response body")?;
458
459 let body_str =
460 std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
461
462 match serde_json::from_str::<Event>(body_str) {
463 Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
464 Ok(_) => Err(AnthropicError::Other(anyhow!(
465 "Unexpected success response while expecting an error: '{body_str}'",
466 ))),
467 Err(_) => Err(AnthropicError::Other(anyhow!(
468 "Failed to connect to API: {} {}",
469 response.status(),
470 body_str,
471 ))),
472 }
473 }
474}
475
476#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
477#[serde(rename_all = "lowercase")]
478pub enum CacheControlType {
479 Ephemeral,
480}
481
482#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
483pub struct CacheControl {
484 #[serde(rename = "type")]
485 pub cache_type: CacheControlType,
486}
487
488#[derive(Debug, Serialize, Deserialize)]
489pub struct Message {
490 pub role: Role,
491 pub content: Vec<RequestContent>,
492}
493
494#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
495#[serde(rename_all = "lowercase")]
496pub enum Role {
497 User,
498 Assistant,
499}
500
501#[derive(Debug, Serialize, Deserialize)]
502#[serde(tag = "type")]
503pub enum RequestContent {
504 #[serde(rename = "text")]
505 Text {
506 text: String,
507 #[serde(skip_serializing_if = "Option::is_none")]
508 cache_control: Option<CacheControl>,
509 },
510 #[serde(rename = "image")]
511 Image {
512 source: ImageSource,
513 #[serde(skip_serializing_if = "Option::is_none")]
514 cache_control: Option<CacheControl>,
515 },
516 #[serde(rename = "tool_use")]
517 ToolUse {
518 id: String,
519 name: String,
520 input: serde_json::Value,
521 #[serde(skip_serializing_if = "Option::is_none")]
522 cache_control: Option<CacheControl>,
523 },
524 #[serde(rename = "tool_result")]
525 ToolResult {
526 tool_use_id: String,
527 is_error: bool,
528 content: String,
529 #[serde(skip_serializing_if = "Option::is_none")]
530 cache_control: Option<CacheControl>,
531 },
532}
533
534#[derive(Debug, Serialize, Deserialize)]
535#[serde(tag = "type")]
536pub enum ResponseContent {
537 #[serde(rename = "text")]
538 Text { text: String },
539 #[serde(rename = "thinking")]
540 Thinking { thinking: String },
541 #[serde(rename = "redacted_thinking")]
542 RedactedThinking { data: String },
543 #[serde(rename = "tool_use")]
544 ToolUse {
545 id: String,
546 name: String,
547 input: serde_json::Value,
548 },
549}
550
551#[derive(Debug, Serialize, Deserialize)]
552pub struct ImageSource {
553 #[serde(rename = "type")]
554 pub source_type: String,
555 pub media_type: String,
556 pub data: String,
557}
558
559#[derive(Debug, Serialize, Deserialize)]
560pub struct Tool {
561 pub name: String,
562 pub description: String,
563 pub input_schema: serde_json::Value,
564}
565
566#[derive(Debug, Serialize, Deserialize)]
567#[serde(tag = "type", rename_all = "lowercase")]
568pub enum ToolChoice {
569 Auto,
570 Any,
571 Tool { name: String },
572}
573
574#[derive(Debug, Serialize, Deserialize)]
575#[serde(tag = "type", rename_all = "lowercase")]
576pub enum Thinking {
577 Enabled { budget_tokens: Option<u32> },
578}
579
580#[derive(Debug, Serialize, Deserialize)]
581#[serde(untagged)]
582pub enum StringOrContents {
583 String(String),
584 Content(Vec<RequestContent>),
585}
586
587#[derive(Debug, Serialize, Deserialize)]
588pub struct Request {
589 pub model: String,
590 pub max_tokens: u32,
591 pub messages: Vec<Message>,
592 #[serde(default, skip_serializing_if = "Vec::is_empty")]
593 pub tools: Vec<Tool>,
594 #[serde(default, skip_serializing_if = "Option::is_none")]
595 pub thinking: Option<Thinking>,
596 #[serde(default, skip_serializing_if = "Option::is_none")]
597 pub tool_choice: Option<ToolChoice>,
598 #[serde(default, skip_serializing_if = "Option::is_none")]
599 pub system: Option<StringOrContents>,
600 #[serde(default, skip_serializing_if = "Option::is_none")]
601 pub metadata: Option<Metadata>,
602 #[serde(default, skip_serializing_if = "Vec::is_empty")]
603 pub stop_sequences: Vec<String>,
604 #[serde(default, skip_serializing_if = "Option::is_none")]
605 pub temperature: Option<f32>,
606 #[serde(default, skip_serializing_if = "Option::is_none")]
607 pub top_k: Option<u32>,
608 #[serde(default, skip_serializing_if = "Option::is_none")]
609 pub top_p: Option<f32>,
610}
611
612#[derive(Debug, Serialize, Deserialize)]
613struct StreamingRequest {
614 #[serde(flatten)]
615 pub base: Request,
616 pub stream: bool,
617}
618
619#[derive(Debug, Serialize, Deserialize)]
620pub struct Metadata {
621 pub user_id: Option<String>,
622}
623
624#[derive(Debug, Serialize, Deserialize, Default)]
625pub struct Usage {
626 #[serde(default, skip_serializing_if = "Option::is_none")]
627 pub input_tokens: Option<u32>,
628 #[serde(default, skip_serializing_if = "Option::is_none")]
629 pub output_tokens: Option<u32>,
630 #[serde(default, skip_serializing_if = "Option::is_none")]
631 pub cache_creation_input_tokens: Option<u32>,
632 #[serde(default, skip_serializing_if = "Option::is_none")]
633 pub cache_read_input_tokens: Option<u32>,
634}
635
636#[derive(Debug, Serialize, Deserialize)]
637pub struct Response {
638 pub id: String,
639 #[serde(rename = "type")]
640 pub response_type: String,
641 pub role: Role,
642 pub content: Vec<ResponseContent>,
643 pub model: String,
644 #[serde(default, skip_serializing_if = "Option::is_none")]
645 pub stop_reason: Option<String>,
646 #[serde(default, skip_serializing_if = "Option::is_none")]
647 pub stop_sequence: Option<String>,
648 pub usage: Usage,
649}
650
651#[derive(Debug, Serialize, Deserialize)]
652#[serde(tag = "type")]
653pub enum Event {
654 #[serde(rename = "message_start")]
655 MessageStart { message: Response },
656 #[serde(rename = "content_block_start")]
657 ContentBlockStart {
658 index: usize,
659 content_block: ResponseContent,
660 },
661 #[serde(rename = "content_block_delta")]
662 ContentBlockDelta { index: usize, delta: ContentDelta },
663 #[serde(rename = "content_block_stop")]
664 ContentBlockStop { index: usize },
665 #[serde(rename = "message_delta")]
666 MessageDelta { delta: MessageDelta, usage: Usage },
667 #[serde(rename = "message_stop")]
668 MessageStop,
669 #[serde(rename = "ping")]
670 Ping,
671 #[serde(rename = "error")]
672 Error { error: ApiError },
673}
674
675#[derive(Debug, Serialize, Deserialize)]
676#[serde(tag = "type")]
677pub enum ContentDelta {
678 #[serde(rename = "text_delta")]
679 TextDelta { text: String },
680 #[serde(rename = "thinking_delta")]
681 ThinkingDelta { thinking: String },
682 #[serde(rename = "signature_delta")]
683 SignatureDelta { signature: String },
684 #[serde(rename = "input_json_delta")]
685 InputJsonDelta { partial_json: String },
686}
687
688#[derive(Debug, Serialize, Deserialize)]
689pub struct MessageDelta {
690 pub stop_reason: Option<String>,
691 pub stop_sequence: Option<String>,
692}
693
694#[derive(Error, Debug)]
695pub enum AnthropicError {
696 #[error("an error occurred while interacting with the Anthropic API: {error_type}: {message}", error_type = .0.error_type, message = .0.message)]
697 ApiError(ApiError),
698 #[error("{0}")]
699 Other(#[from] anyhow::Error),
700}
701
702#[derive(Debug, Serialize, Deserialize)]
703pub struct ApiError {
704 #[serde(rename = "type")]
705 pub error_type: String,
706 pub message: String,
707}
708
709/// An Anthropic API error code.
710/// <https://docs.anthropic.com/en/api/errors#http-errors>
711#[derive(Debug, PartialEq, Eq, Clone, Copy, EnumString)]
712#[strum(serialize_all = "snake_case")]
713pub enum ApiErrorCode {
714 /// 400 - `invalid_request_error`: There was an issue with the format or content of your request.
715 InvalidRequestError,
716 /// 401 - `authentication_error`: There's an issue with your API key.
717 AuthenticationError,
718 /// 403 - `permission_error`: Your API key does not have permission to use the specified resource.
719 PermissionError,
720 /// 404 - `not_found_error`: The requested resource was not found.
721 NotFoundError,
722 /// 413 - `request_too_large`: Request exceeds the maximum allowed number of bytes.
723 RequestTooLarge,
724 /// 429 - `rate_limit_error`: Your account has hit a rate limit.
725 RateLimitError,
726 /// 500 - `api_error`: An unexpected error has occurred internal to Anthropic's systems.
727 ApiError,
728 /// 529 - `overloaded_error`: Anthropic's API is temporarily overloaded.
729 OverloadedError,
730}
731
732impl ApiError {
733 pub fn code(&self) -> Option<ApiErrorCode> {
734 ApiErrorCode::from_str(&self.error_type).ok()
735 }
736
737 pub fn is_rate_limit_error(&self) -> bool {
738 matches!(self.error_type.as_str(), "rate_limit_error")
739 }
740
741 pub fn match_window_exceeded(&self) -> Option<usize> {
742 let Some(ApiErrorCode::InvalidRequestError) = self.code() else {
743 return None;
744 };
745
746 parse_prompt_too_long(&self.message)
747 }
748}
749
750pub fn parse_prompt_too_long(message: &str) -> Option<usize> {
751 message
752 .strip_prefix("prompt is too long: ")?
753 .split_once(" tokens")?
754 .0
755 .parse::<usize>()
756 .ok()
757}
758
759#[test]
760fn test_match_window_exceeded() {
761 let error = ApiError {
762 error_type: "invalid_request_error".to_string(),
763 message: "prompt is too long: 220000 tokens > 200000".to_string(),
764 };
765 assert_eq!(error.match_window_exceeded(), Some(220_000));
766
767 let error = ApiError {
768 error_type: "invalid_request_error".to_string(),
769 message: "prompt is too long: 1234953 tokens".to_string(),
770 };
771 assert_eq!(error.match_window_exceeded(), Some(1234953));
772
773 let error = ApiError {
774 error_type: "invalid_request_error".to_string(),
775 message: "not a prompt length error".to_string(),
776 };
777 assert_eq!(error.match_window_exceeded(), None);
778
779 let error = ApiError {
780 error_type: "rate_limit_error".to_string(),
781 message: "prompt is too long: 12345 tokens".to_string(),
782 };
783 assert_eq!(error.match_window_exceeded(), None);
784
785 let error = ApiError {
786 error_type: "invalid_request_error".to_string(),
787 message: "prompt is too long: invalid tokens".to_string(),
788 };
789 assert_eq!(error.match_window_exceeded(), None);
790}