1use std::str::FromStr;
2
3use anyhow::{Context as _, Result, anyhow};
4use chrono::{DateTime, Utc};
5use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
6use http_client::http::{HeaderMap, HeaderValue};
7use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
8use serde::{Deserialize, Serialize};
9use strum::{EnumIter, EnumString};
10use thiserror::Error;
11
12pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
13
14#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
15#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
16pub struct AnthropicModelCacheConfiguration {
17 pub min_total_token: usize,
18 pub should_speculate: bool,
19 pub max_cache_anchors: usize,
20}
21
22#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
23#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
24pub enum AnthropicModelMode {
25 #[default]
26 Default,
27 Thinking {
28 budget_tokens: Option<u32>,
29 },
30}
31
32#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
33#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
34pub enum Model {
35 #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
36 Claude3_5Sonnet,
37 #[default]
38 #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
39 Claude3_7Sonnet,
40 #[serde(
41 rename = "claude-3-7-sonnet-thinking",
42 alias = "claude-3-7-sonnet-thinking-latest"
43 )]
44 Claude3_7SonnetThinking,
45 #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
46 Claude3_5Haiku,
47 #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
48 Claude3Opus,
49 #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
50 Claude3Sonnet,
51 #[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")]
52 Claude3Haiku,
53 #[serde(rename = "custom")]
54 Custom {
55 name: String,
56 max_tokens: usize,
57 /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
58 display_name: Option<String>,
59 /// Override this model with a different Anthropic model for tool calls.
60 tool_override: Option<String>,
61 /// Indicates whether this custom model supports caching.
62 cache_configuration: Option<AnthropicModelCacheConfiguration>,
63 max_output_tokens: Option<u32>,
64 default_temperature: Option<f32>,
65 #[serde(default)]
66 extra_beta_headers: Vec<String>,
67 #[serde(default)]
68 mode: AnthropicModelMode,
69 },
70}
71
72impl Model {
73 pub fn default_fast() -> Self {
74 Self::Claude3_5Haiku
75 }
76
77 pub fn from_id(id: &str) -> Result<Self> {
78 if id.starts_with("claude-3-5-sonnet") {
79 Ok(Self::Claude3_5Sonnet)
80 } else if id.starts_with("claude-3-7-sonnet-thinking") {
81 Ok(Self::Claude3_7SonnetThinking)
82 } else if id.starts_with("claude-3-7-sonnet") {
83 Ok(Self::Claude3_7Sonnet)
84 } else if id.starts_with("claude-3-5-haiku") {
85 Ok(Self::Claude3_5Haiku)
86 } else if id.starts_with("claude-3-opus") {
87 Ok(Self::Claude3Opus)
88 } else if id.starts_with("claude-3-sonnet") {
89 Ok(Self::Claude3Sonnet)
90 } else if id.starts_with("claude-3-haiku") {
91 Ok(Self::Claude3Haiku)
92 } else {
93 Err(anyhow!("invalid model id"))
94 }
95 }
96
97 pub fn id(&self) -> &str {
98 match self {
99 Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
100 Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
101 Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
102 Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
103 Model::Claude3Opus => "claude-3-opus-latest",
104 Model::Claude3Sonnet => "claude-3-sonnet-20240229",
105 Model::Claude3Haiku => "claude-3-haiku-20240307",
106 Self::Custom { name, .. } => name,
107 }
108 }
109
110 /// The id of the model that should be used for making API requests
111 pub fn request_id(&self) -> &str {
112 match self {
113 Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
114 Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
115 Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
116 Model::Claude3Opus => "claude-3-opus-latest",
117 Model::Claude3Sonnet => "claude-3-sonnet-20240229",
118 Model::Claude3Haiku => "claude-3-haiku-20240307",
119 Self::Custom { name, .. } => name,
120 }
121 }
122
123 pub fn display_name(&self) -> &str {
124 match self {
125 Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
126 Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
127 Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
128 Self::Claude3_5Haiku => "Claude 3.5 Haiku",
129 Self::Claude3Opus => "Claude 3 Opus",
130 Self::Claude3Sonnet => "Claude 3 Sonnet",
131 Self::Claude3Haiku => "Claude 3 Haiku",
132 Self::Custom {
133 name, display_name, ..
134 } => display_name.as_ref().unwrap_or(name),
135 }
136 }
137
138 pub fn cache_configuration(&self) -> Option<AnthropicModelCacheConfiguration> {
139 match self {
140 Self::Claude3_5Sonnet
141 | Self::Claude3_5Haiku
142 | Self::Claude3_7Sonnet
143 | Self::Claude3_7SonnetThinking
144 | Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
145 min_total_token: 2_048,
146 should_speculate: true,
147 max_cache_anchors: 4,
148 }),
149 Self::Custom {
150 cache_configuration,
151 ..
152 } => cache_configuration.clone(),
153 _ => None,
154 }
155 }
156
157 pub fn max_token_count(&self) -> usize {
158 match self {
159 Self::Claude3_5Sonnet
160 | Self::Claude3_5Haiku
161 | Self::Claude3_7Sonnet
162 | Self::Claude3_7SonnetThinking
163 | Self::Claude3Opus
164 | Self::Claude3Sonnet
165 | Self::Claude3Haiku => 200_000,
166 Self::Custom { max_tokens, .. } => *max_tokens,
167 }
168 }
169
170 pub fn max_output_tokens(&self) -> u32 {
171 match self {
172 Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
173 Self::Claude3_5Sonnet
174 | Self::Claude3_7Sonnet
175 | Self::Claude3_7SonnetThinking
176 | Self::Claude3_5Haiku => 8_192,
177 Self::Custom {
178 max_output_tokens, ..
179 } => max_output_tokens.unwrap_or(4_096),
180 }
181 }
182
183 pub fn default_temperature(&self) -> f32 {
184 match self {
185 Self::Claude3_5Sonnet
186 | Self::Claude3_7Sonnet
187 | Self::Claude3_7SonnetThinking
188 | Self::Claude3_5Haiku
189 | Self::Claude3Opus
190 | Self::Claude3Sonnet
191 | Self::Claude3Haiku => 1.0,
192 Self::Custom {
193 default_temperature,
194 ..
195 } => default_temperature.unwrap_or(1.0),
196 }
197 }
198
199 pub fn mode(&self) -> AnthropicModelMode {
200 match self {
201 Self::Claude3_5Sonnet
202 | Self::Claude3_7Sonnet
203 | Self::Claude3_5Haiku
204 | Self::Claude3Opus
205 | Self::Claude3Sonnet
206 | Self::Claude3Haiku => AnthropicModelMode::Default,
207 Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
208 budget_tokens: Some(4_096),
209 },
210 Self::Custom { mode, .. } => mode.clone(),
211 }
212 }
213
214 pub const DEFAULT_BETA_HEADERS: &[&str] = &["prompt-caching-2024-07-31"];
215
216 pub fn beta_headers(&self) -> String {
217 let mut headers = Self::DEFAULT_BETA_HEADERS
218 .into_iter()
219 .map(|header| header.to_string())
220 .collect::<Vec<_>>();
221
222 match self {
223 Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => {
224 // Try beta token-efficient tool use (supported in Claude 3.7 Sonnet only)
225 // https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
226 headers.push("token-efficient-tools-2025-02-19".to_string());
227 }
228 Self::Custom {
229 extra_beta_headers, ..
230 } => {
231 headers.extend(
232 extra_beta_headers
233 .iter()
234 .filter(|header| !header.trim().is_empty())
235 .cloned(),
236 );
237 }
238 _ => {}
239 }
240
241 headers.join(",")
242 }
243
244 pub fn tool_model_id(&self) -> &str {
245 if let Self::Custom {
246 tool_override: Some(tool_override),
247 ..
248 } = self
249 {
250 tool_override
251 } else {
252 self.request_id()
253 }
254 }
255}
256
257pub async fn complete(
258 client: &dyn HttpClient,
259 api_url: &str,
260 api_key: &str,
261 request: Request,
262) -> Result<Response, AnthropicError> {
263 let uri = format!("{api_url}/v1/messages");
264 let beta_headers = Model::from_id(&request.model)
265 .map(|model| model.beta_headers())
266 .unwrap_or_else(|_err| Model::DEFAULT_BETA_HEADERS.join(","));
267 let request_builder = HttpRequest::builder()
268 .method(Method::POST)
269 .uri(uri)
270 .header("Anthropic-Version", "2023-06-01")
271 .header("Anthropic-Beta", beta_headers)
272 .header("X-Api-Key", api_key)
273 .header("Content-Type", "application/json");
274
275 let serialized_request =
276 serde_json::to_string(&request).context("failed to serialize request")?;
277 let request = request_builder
278 .body(AsyncBody::from(serialized_request))
279 .context("failed to construct request body")?;
280
281 let mut response = client
282 .send(request)
283 .await
284 .context("failed to send request to Anthropic")?;
285 if response.status().is_success() {
286 let mut body = Vec::new();
287 response
288 .body_mut()
289 .read_to_end(&mut body)
290 .await
291 .context("failed to read response body")?;
292 let response_message: Response =
293 serde_json::from_slice(&body).context("failed to deserialize response body")?;
294 Ok(response_message)
295 } else {
296 let mut body = Vec::new();
297 response
298 .body_mut()
299 .read_to_end(&mut body)
300 .await
301 .context("failed to read response body")?;
302 let body_str =
303 std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
304 Err(AnthropicError::Other(anyhow!(
305 "Failed to connect to API: {} {}",
306 response.status(),
307 body_str
308 )))
309 }
310}
311
312pub async fn stream_completion(
313 client: &dyn HttpClient,
314 api_url: &str,
315 api_key: &str,
316 request: Request,
317) -> Result<BoxStream<'static, Result<Event, AnthropicError>>, AnthropicError> {
318 stream_completion_with_rate_limit_info(client, api_url, api_key, request)
319 .await
320 .map(|output| output.0)
321}
322
323/// An individual rate limit.
324#[derive(Debug)]
325pub struct RateLimit {
326 pub limit: usize,
327 pub remaining: usize,
328 pub reset: DateTime<Utc>,
329}
330
331impl RateLimit {
332 fn from_headers(resource: &str, headers: &HeaderMap<HeaderValue>) -> Result<Self> {
333 let limit =
334 get_header(&format!("anthropic-ratelimit-{resource}-limit"), headers)?.parse()?;
335 let remaining = get_header(
336 &format!("anthropic-ratelimit-{resource}-remaining"),
337 headers,
338 )?
339 .parse()?;
340 let reset = DateTime::parse_from_rfc3339(get_header(
341 &format!("anthropic-ratelimit-{resource}-reset"),
342 headers,
343 )?)?
344 .to_utc();
345
346 Ok(Self {
347 limit,
348 remaining,
349 reset,
350 })
351 }
352}
353
354/// <https://docs.anthropic.com/en/api/rate-limits#response-headers>
355#[derive(Debug)]
356pub struct RateLimitInfo {
357 pub requests: Option<RateLimit>,
358 pub tokens: Option<RateLimit>,
359 pub input_tokens: Option<RateLimit>,
360 pub output_tokens: Option<RateLimit>,
361}
362
363impl RateLimitInfo {
364 fn from_headers(headers: &HeaderMap<HeaderValue>) -> Self {
365 // Check if any rate limit headers exist
366 let has_rate_limit_headers = headers
367 .keys()
368 .any(|k| k.as_str().starts_with("anthropic-ratelimit-"));
369
370 if !has_rate_limit_headers {
371 return Self {
372 requests: None,
373 tokens: None,
374 input_tokens: None,
375 output_tokens: None,
376 };
377 }
378
379 Self {
380 requests: RateLimit::from_headers("requests", headers).ok(),
381 tokens: RateLimit::from_headers("tokens", headers).ok(),
382 input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
383 output_tokens: RateLimit::from_headers("output-tokens", headers).ok(),
384 }
385 }
386}
387
388fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> Result<&'a str, anyhow::Error> {
389 Ok(headers
390 .get(key)
391 .ok_or_else(|| anyhow!("missing header `{key}`"))?
392 .to_str()?)
393}
394
395pub async fn stream_completion_with_rate_limit_info(
396 client: &dyn HttpClient,
397 api_url: &str,
398 api_key: &str,
399 request: Request,
400) -> Result<
401 (
402 BoxStream<'static, Result<Event, AnthropicError>>,
403 Option<RateLimitInfo>,
404 ),
405 AnthropicError,
406> {
407 let request = StreamingRequest {
408 base: request,
409 stream: true,
410 };
411 let uri = format!("{api_url}/v1/messages");
412 let beta_headers = Model::from_id(&request.base.model)
413 .map(|model| model.beta_headers())
414 .unwrap_or_else(|_err| Model::DEFAULT_BETA_HEADERS.join(","));
415 let request_builder = HttpRequest::builder()
416 .method(Method::POST)
417 .uri(uri)
418 .header("Anthropic-Version", "2023-06-01")
419 .header("Anthropic-Beta", beta_headers)
420 .header("X-Api-Key", api_key)
421 .header("Content-Type", "application/json");
422 let serialized_request =
423 serde_json::to_string(&request).context("failed to serialize request")?;
424 let request = request_builder
425 .body(AsyncBody::from(serialized_request))
426 .context("failed to construct request body")?;
427
428 let mut response = client
429 .send(request)
430 .await
431 .context("failed to send request to Anthropic")?;
432 if response.status().is_success() {
433 let rate_limits = RateLimitInfo::from_headers(response.headers());
434 let reader = BufReader::new(response.into_body());
435 let stream = reader
436 .lines()
437 .filter_map(|line| async move {
438 match line {
439 Ok(line) => {
440 let line = line.strip_prefix("data: ")?;
441 match serde_json::from_str(line) {
442 Ok(response) => Some(Ok(response)),
443 Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
444 }
445 }
446 Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
447 }
448 })
449 .boxed();
450 Ok((stream, Some(rate_limits)))
451 } else {
452 let mut body = Vec::new();
453 response
454 .body_mut()
455 .read_to_end(&mut body)
456 .await
457 .context("failed to read response body")?;
458
459 let body_str =
460 std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
461
462 match serde_json::from_str::<Event>(body_str) {
463 Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
464 Ok(_) => Err(AnthropicError::Other(anyhow!(
465 "Unexpected success response while expecting an error: '{body_str}'",
466 ))),
467 Err(_) => Err(AnthropicError::Other(anyhow!(
468 "Failed to connect to API: {} {}",
469 response.status(),
470 body_str,
471 ))),
472 }
473 }
474}
475
476#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
477#[serde(rename_all = "lowercase")]
478pub enum CacheControlType {
479 Ephemeral,
480}
481
482#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
483pub struct CacheControl {
484 #[serde(rename = "type")]
485 pub cache_type: CacheControlType,
486}
487
488#[derive(Debug, Serialize, Deserialize)]
489pub struct Message {
490 pub role: Role,
491 pub content: Vec<RequestContent>,
492}
493
494#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
495#[serde(rename_all = "lowercase")]
496pub enum Role {
497 User,
498 Assistant,
499}
500
501#[derive(Debug, Serialize, Deserialize)]
502#[serde(tag = "type")]
503pub enum RequestContent {
504 #[serde(rename = "text")]
505 Text {
506 text: String,
507 #[serde(skip_serializing_if = "Option::is_none")]
508 cache_control: Option<CacheControl>,
509 },
510 #[serde(rename = "thinking")]
511 Thinking {
512 thinking: String,
513 signature: String,
514 #[serde(skip_serializing_if = "Option::is_none")]
515 cache_control: Option<CacheControl>,
516 },
517 #[serde(rename = "redacted_thinking")]
518 RedactedThinking { data: String },
519 #[serde(rename = "image")]
520 Image {
521 source: ImageSource,
522 #[serde(skip_serializing_if = "Option::is_none")]
523 cache_control: Option<CacheControl>,
524 },
525 #[serde(rename = "tool_use")]
526 ToolUse {
527 id: String,
528 name: String,
529 input: serde_json::Value,
530 #[serde(skip_serializing_if = "Option::is_none")]
531 cache_control: Option<CacheControl>,
532 },
533 #[serde(rename = "tool_result")]
534 ToolResult {
535 tool_use_id: String,
536 is_error: bool,
537 content: String,
538 #[serde(skip_serializing_if = "Option::is_none")]
539 cache_control: Option<CacheControl>,
540 },
541}
542
543#[derive(Debug, Serialize, Deserialize)]
544#[serde(tag = "type")]
545pub enum ResponseContent {
546 #[serde(rename = "text")]
547 Text { text: String },
548 #[serde(rename = "thinking")]
549 Thinking { thinking: String },
550 #[serde(rename = "redacted_thinking")]
551 RedactedThinking { data: String },
552 #[serde(rename = "tool_use")]
553 ToolUse {
554 id: String,
555 name: String,
556 input: serde_json::Value,
557 },
558}
559
560#[derive(Debug, Serialize, Deserialize)]
561pub struct ImageSource {
562 #[serde(rename = "type")]
563 pub source_type: String,
564 pub media_type: String,
565 pub data: String,
566}
567
568#[derive(Debug, Serialize, Deserialize)]
569pub struct Tool {
570 pub name: String,
571 pub description: String,
572 pub input_schema: serde_json::Value,
573}
574
575#[derive(Debug, Serialize, Deserialize)]
576#[serde(tag = "type", rename_all = "lowercase")]
577pub enum ToolChoice {
578 Auto,
579 Any,
580 Tool { name: String },
581 None,
582}
583
584#[derive(Debug, Serialize, Deserialize)]
585#[serde(tag = "type", rename_all = "lowercase")]
586pub enum Thinking {
587 Enabled { budget_tokens: Option<u32> },
588}
589
590#[derive(Debug, Serialize, Deserialize)]
591#[serde(untagged)]
592pub enum StringOrContents {
593 String(String),
594 Content(Vec<RequestContent>),
595}
596
597#[derive(Debug, Serialize, Deserialize)]
598pub struct Request {
599 pub model: String,
600 pub max_tokens: u32,
601 pub messages: Vec<Message>,
602 #[serde(default, skip_serializing_if = "Vec::is_empty")]
603 pub tools: Vec<Tool>,
604 #[serde(default, skip_serializing_if = "Option::is_none")]
605 pub thinking: Option<Thinking>,
606 #[serde(default, skip_serializing_if = "Option::is_none")]
607 pub tool_choice: Option<ToolChoice>,
608 #[serde(default, skip_serializing_if = "Option::is_none")]
609 pub system: Option<StringOrContents>,
610 #[serde(default, skip_serializing_if = "Option::is_none")]
611 pub metadata: Option<Metadata>,
612 #[serde(default, skip_serializing_if = "Vec::is_empty")]
613 pub stop_sequences: Vec<String>,
614 #[serde(default, skip_serializing_if = "Option::is_none")]
615 pub temperature: Option<f32>,
616 #[serde(default, skip_serializing_if = "Option::is_none")]
617 pub top_k: Option<u32>,
618 #[serde(default, skip_serializing_if = "Option::is_none")]
619 pub top_p: Option<f32>,
620}
621
622#[derive(Debug, Serialize, Deserialize)]
623struct StreamingRequest {
624 #[serde(flatten)]
625 pub base: Request,
626 pub stream: bool,
627}
628
629#[derive(Debug, Serialize, Deserialize)]
630pub struct Metadata {
631 pub user_id: Option<String>,
632}
633
634#[derive(Debug, Serialize, Deserialize, Default)]
635pub struct Usage {
636 #[serde(default, skip_serializing_if = "Option::is_none")]
637 pub input_tokens: Option<u32>,
638 #[serde(default, skip_serializing_if = "Option::is_none")]
639 pub output_tokens: Option<u32>,
640 #[serde(default, skip_serializing_if = "Option::is_none")]
641 pub cache_creation_input_tokens: Option<u32>,
642 #[serde(default, skip_serializing_if = "Option::is_none")]
643 pub cache_read_input_tokens: Option<u32>,
644}
645
646#[derive(Debug, Serialize, Deserialize)]
647pub struct Response {
648 pub id: String,
649 #[serde(rename = "type")]
650 pub response_type: String,
651 pub role: Role,
652 pub content: Vec<ResponseContent>,
653 pub model: String,
654 #[serde(default, skip_serializing_if = "Option::is_none")]
655 pub stop_reason: Option<String>,
656 #[serde(default, skip_serializing_if = "Option::is_none")]
657 pub stop_sequence: Option<String>,
658 pub usage: Usage,
659}
660
661#[derive(Debug, Serialize, Deserialize)]
662#[serde(tag = "type")]
663pub enum Event {
664 #[serde(rename = "message_start")]
665 MessageStart { message: Response },
666 #[serde(rename = "content_block_start")]
667 ContentBlockStart {
668 index: usize,
669 content_block: ResponseContent,
670 },
671 #[serde(rename = "content_block_delta")]
672 ContentBlockDelta { index: usize, delta: ContentDelta },
673 #[serde(rename = "content_block_stop")]
674 ContentBlockStop { index: usize },
675 #[serde(rename = "message_delta")]
676 MessageDelta { delta: MessageDelta, usage: Usage },
677 #[serde(rename = "message_stop")]
678 MessageStop,
679 #[serde(rename = "ping")]
680 Ping,
681 #[serde(rename = "error")]
682 Error { error: ApiError },
683}
684
685#[derive(Debug, Serialize, Deserialize)]
686#[serde(tag = "type")]
687pub enum ContentDelta {
688 #[serde(rename = "text_delta")]
689 TextDelta { text: String },
690 #[serde(rename = "thinking_delta")]
691 ThinkingDelta { thinking: String },
692 #[serde(rename = "signature_delta")]
693 SignatureDelta { signature: String },
694 #[serde(rename = "input_json_delta")]
695 InputJsonDelta { partial_json: String },
696}
697
698#[derive(Debug, Serialize, Deserialize)]
699pub struct MessageDelta {
700 pub stop_reason: Option<String>,
701 pub stop_sequence: Option<String>,
702}
703
704#[derive(Error, Debug)]
705pub enum AnthropicError {
706 #[error("an error occurred while interacting with the Anthropic API: {error_type}: {message}", error_type = .0.error_type, message = .0.message)]
707 ApiError(ApiError),
708 #[error("{0}")]
709 Other(#[from] anyhow::Error),
710}
711
712#[derive(Debug, Serialize, Deserialize)]
713pub struct ApiError {
714 #[serde(rename = "type")]
715 pub error_type: String,
716 pub message: String,
717}
718
719/// An Anthropic API error code.
720/// <https://docs.anthropic.com/en/api/errors#http-errors>
721#[derive(Debug, PartialEq, Eq, Clone, Copy, EnumString)]
722#[strum(serialize_all = "snake_case")]
723pub enum ApiErrorCode {
724 /// 400 - `invalid_request_error`: There was an issue with the format or content of your request.
725 InvalidRequestError,
726 /// 401 - `authentication_error`: There's an issue with your API key.
727 AuthenticationError,
728 /// 403 - `permission_error`: Your API key does not have permission to use the specified resource.
729 PermissionError,
730 /// 404 - `not_found_error`: The requested resource was not found.
731 NotFoundError,
732 /// 413 - `request_too_large`: Request exceeds the maximum allowed number of bytes.
733 RequestTooLarge,
734 /// 429 - `rate_limit_error`: Your account has hit a rate limit.
735 RateLimitError,
736 /// 500 - `api_error`: An unexpected error has occurred internal to Anthropic's systems.
737 ApiError,
738 /// 529 - `overloaded_error`: Anthropic's API is temporarily overloaded.
739 OverloadedError,
740}
741
742impl ApiError {
743 pub fn code(&self) -> Option<ApiErrorCode> {
744 ApiErrorCode::from_str(&self.error_type).ok()
745 }
746
747 pub fn is_rate_limit_error(&self) -> bool {
748 matches!(self.error_type.as_str(), "rate_limit_error")
749 }
750
751 pub fn match_window_exceeded(&self) -> Option<usize> {
752 let Some(ApiErrorCode::InvalidRequestError) = self.code() else {
753 return None;
754 };
755
756 parse_prompt_too_long(&self.message)
757 }
758}
759
760pub fn parse_prompt_too_long(message: &str) -> Option<usize> {
761 message
762 .strip_prefix("prompt is too long: ")?
763 .split_once(" tokens")?
764 .0
765 .parse::<usize>()
766 .ok()
767}
768
769#[test]
770fn test_match_window_exceeded() {
771 let error = ApiError {
772 error_type: "invalid_request_error".to_string(),
773 message: "prompt is too long: 220000 tokens > 200000".to_string(),
774 };
775 assert_eq!(error.match_window_exceeded(), Some(220_000));
776
777 let error = ApiError {
778 error_type: "invalid_request_error".to_string(),
779 message: "prompt is too long: 1234953 tokens".to_string(),
780 };
781 assert_eq!(error.match_window_exceeded(), Some(1234953));
782
783 let error = ApiError {
784 error_type: "invalid_request_error".to_string(),
785 message: "not a prompt length error".to_string(),
786 };
787 assert_eq!(error.match_window_exceeded(), None);
788
789 let error = ApiError {
790 error_type: "rate_limit_error".to_string(),
791 message: "prompt is too long: 12345 tokens".to_string(),
792 };
793 assert_eq!(error.match_window_exceeded(), None);
794
795 let error = ApiError {
796 error_type: "invalid_request_error".to_string(),
797 message: "prompt is too long: invalid tokens".to_string(),
798 };
799 assert_eq!(error.match_window_exceeded(), None);
800}