1use std::str::FromStr;
2
3use anyhow::{Context as _, Result, anyhow};
4use chrono::{DateTime, Utc};
5use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
6use http_client::http::{HeaderMap, HeaderValue};
7use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
8use serde::{Deserialize, Serialize};
9use strum::{EnumIter, EnumString};
10use thiserror::Error;
11
12pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
13
14#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
15#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
16pub struct AnthropicModelCacheConfiguration {
17 pub min_total_token: usize,
18 pub should_speculate: bool,
19 pub max_cache_anchors: usize,
20}
21
22#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
23#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
24pub enum AnthropicModelMode {
25 #[default]
26 Default,
27 Thinking {
28 budget_tokens: Option<u32>,
29 },
30}
31
32#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
33#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
34pub enum Model {
35 #[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
36 Claude3_5Sonnet,
37 #[default]
38 #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
39 Claude3_7Sonnet,
40 #[serde(
41 rename = "claude-3-7-sonnet-thinking",
42 alias = "claude-3-7-sonnet-thinking-latest"
43 )]
44 Claude3_7SonnetThinking,
45 #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
46 Claude3_5Haiku,
47 #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
48 Claude3Opus,
49 #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
50 Claude3Sonnet,
51 #[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-latest")]
52 Claude3Haiku,
53 #[serde(rename = "custom")]
54 Custom {
55 name: String,
56 max_tokens: usize,
57 /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
58 display_name: Option<String>,
59 /// Override this model with a different Anthropic model for tool calls.
60 tool_override: Option<String>,
61 /// Indicates whether this custom model supports caching.
62 cache_configuration: Option<AnthropicModelCacheConfiguration>,
63 max_output_tokens: Option<u32>,
64 default_temperature: Option<f32>,
65 #[serde(default)]
66 extra_beta_headers: Vec<String>,
67 #[serde(default)]
68 mode: AnthropicModelMode,
69 },
70}
71
72impl Model {
73 pub fn default_fast() -> Self {
74 Self::Claude3_5Haiku
75 }
76
77 pub fn from_id(id: &str) -> Result<Self> {
78 if id.starts_with("claude-3-5-sonnet") {
79 Ok(Self::Claude3_5Sonnet)
80 } else if id.starts_with("claude-3-7-sonnet-thinking") {
81 Ok(Self::Claude3_7SonnetThinking)
82 } else if id.starts_with("claude-3-7-sonnet") {
83 Ok(Self::Claude3_7Sonnet)
84 } else if id.starts_with("claude-3-5-haiku") {
85 Ok(Self::Claude3_5Haiku)
86 } else if id.starts_with("claude-3-opus") {
87 Ok(Self::Claude3Opus)
88 } else if id.starts_with("claude-3-sonnet") {
89 Ok(Self::Claude3Sonnet)
90 } else if id.starts_with("claude-3-haiku") {
91 Ok(Self::Claude3Haiku)
92 } else {
93 Err(anyhow!("invalid model id"))
94 }
95 }
96
97 pub fn id(&self) -> &str {
98 match self {
99 Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
100 Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
101 Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
102 Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
103 Model::Claude3Opus => "claude-3-opus-latest",
104 Model::Claude3Sonnet => "claude-3-sonnet-20240229",
105 Model::Claude3Haiku => "claude-3-haiku-20240307",
106 Self::Custom { name, .. } => name,
107 }
108 }
109
110 /// The id of the model that should be used for making API requests
111 pub fn request_id(&self) -> &str {
112 match self {
113 Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
114 Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
115 Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
116 Model::Claude3Opus => "claude-3-opus-latest",
117 Model::Claude3Sonnet => "claude-3-sonnet-20240229",
118 Model::Claude3Haiku => "claude-3-haiku-20240307",
119 Self::Custom { name, .. } => name,
120 }
121 }
122
123 pub fn display_name(&self) -> &str {
124 match self {
125 Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
126 Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
127 Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
128 Self::Claude3_5Haiku => "Claude 3.5 Haiku",
129 Self::Claude3Opus => "Claude 3 Opus",
130 Self::Claude3Sonnet => "Claude 3 Sonnet",
131 Self::Claude3Haiku => "Claude 3 Haiku",
132 Self::Custom {
133 name, display_name, ..
134 } => display_name.as_ref().unwrap_or(name),
135 }
136 }
137
138 pub fn cache_configuration(&self) -> Option<AnthropicModelCacheConfiguration> {
139 match self {
140 Self::Claude3_5Sonnet
141 | Self::Claude3_5Haiku
142 | Self::Claude3_7Sonnet
143 | Self::Claude3_7SonnetThinking
144 | Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
145 min_total_token: 2_048,
146 should_speculate: true,
147 max_cache_anchors: 4,
148 }),
149 Self::Custom {
150 cache_configuration,
151 ..
152 } => cache_configuration.clone(),
153 _ => None,
154 }
155 }
156
157 pub fn max_token_count(&self) -> usize {
158 match self {
159 Self::Claude3_5Sonnet
160 | Self::Claude3_5Haiku
161 | Self::Claude3_7Sonnet
162 | Self::Claude3_7SonnetThinking
163 | Self::Claude3Opus
164 | Self::Claude3Sonnet
165 | Self::Claude3Haiku => 200_000,
166 Self::Custom { max_tokens, .. } => *max_tokens,
167 }
168 }
169
170 pub fn max_output_tokens(&self) -> u32 {
171 match self {
172 Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
173 Self::Claude3_5Sonnet
174 | Self::Claude3_7Sonnet
175 | Self::Claude3_7SonnetThinking
176 | Self::Claude3_5Haiku => 8_192,
177 Self::Custom {
178 max_output_tokens, ..
179 } => max_output_tokens.unwrap_or(4_096),
180 }
181 }
182
183 pub fn default_temperature(&self) -> f32 {
184 match self {
185 Self::Claude3_5Sonnet
186 | Self::Claude3_7Sonnet
187 | Self::Claude3_7SonnetThinking
188 | Self::Claude3_5Haiku
189 | Self::Claude3Opus
190 | Self::Claude3Sonnet
191 | Self::Claude3Haiku => 1.0,
192 Self::Custom {
193 default_temperature,
194 ..
195 } => default_temperature.unwrap_or(1.0),
196 }
197 }
198
199 pub fn mode(&self) -> AnthropicModelMode {
200 match self {
201 Self::Claude3_5Sonnet
202 | Self::Claude3_7Sonnet
203 | Self::Claude3_5Haiku
204 | Self::Claude3Opus
205 | Self::Claude3Sonnet
206 | Self::Claude3Haiku => AnthropicModelMode::Default,
207 Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
208 budget_tokens: Some(4_096),
209 },
210 Self::Custom { mode, .. } => mode.clone(),
211 }
212 }
213
214 pub const DEFAULT_BETA_HEADERS: &[&str] = &["prompt-caching-2024-07-31"];
215
216 pub fn beta_headers(&self) -> String {
217 let mut headers = Self::DEFAULT_BETA_HEADERS
218 .into_iter()
219 .map(|header| header.to_string())
220 .collect::<Vec<_>>();
221
222 match self {
223 Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => {
224 // Try beta token-efficient tool use (supported in Claude 3.7 Sonnet only)
225 // https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
226 headers.push("token-efficient-tools-2025-02-19".to_string());
227 }
228 Self::Custom {
229 extra_beta_headers, ..
230 } => {
231 headers.extend(
232 extra_beta_headers
233 .iter()
234 .filter(|header| !header.trim().is_empty())
235 .cloned(),
236 );
237 }
238 _ => {}
239 }
240
241 headers.join(",")
242 }
243
244 pub fn tool_model_id(&self) -> &str {
245 if let Self::Custom {
246 tool_override: Some(tool_override),
247 ..
248 } = self
249 {
250 tool_override
251 } else {
252 self.request_id()
253 }
254 }
255}
256
257pub async fn complete(
258 client: &dyn HttpClient,
259 api_url: &str,
260 api_key: &str,
261 request: Request,
262) -> Result<Response, AnthropicError> {
263 let uri = format!("{api_url}/v1/messages");
264 let beta_headers = Model::from_id(&request.model)
265 .map(|model| model.beta_headers())
266 .unwrap_or_else(|_err| Model::DEFAULT_BETA_HEADERS.join(","));
267 let request_builder = HttpRequest::builder()
268 .method(Method::POST)
269 .uri(uri)
270 .header("Anthropic-Version", "2023-06-01")
271 .header("Anthropic-Beta", beta_headers)
272 .header("X-Api-Key", api_key)
273 .header("Content-Type", "application/json");
274
275 let serialized_request =
276 serde_json::to_string(&request).context("failed to serialize request")?;
277 let request = request_builder
278 .body(AsyncBody::from(serialized_request))
279 .context("failed to construct request body")?;
280
281 let mut response = client
282 .send(request)
283 .await
284 .context("failed to send request to Anthropic")?;
285 if response.status().is_success() {
286 let mut body = Vec::new();
287 response
288 .body_mut()
289 .read_to_end(&mut body)
290 .await
291 .context("failed to read response body")?;
292 let response_message: Response =
293 serde_json::from_slice(&body).context("failed to deserialize response body")?;
294 Ok(response_message)
295 } else {
296 let mut body = Vec::new();
297 response
298 .body_mut()
299 .read_to_end(&mut body)
300 .await
301 .context("failed to read response body")?;
302 let body_str =
303 std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
304 Err(AnthropicError::Other(anyhow!(
305 "Failed to connect to API: {} {}",
306 response.status(),
307 body_str
308 )))
309 }
310}
311
312pub async fn stream_completion(
313 client: &dyn HttpClient,
314 api_url: &str,
315 api_key: &str,
316 request: Request,
317) -> Result<BoxStream<'static, Result<Event, AnthropicError>>, AnthropicError> {
318 stream_completion_with_rate_limit_info(client, api_url, api_key, request)
319 .await
320 .map(|output| output.0)
321}
322
323/// An individual rate limit.
324#[derive(Debug)]
325pub struct RateLimit {
326 pub limit: usize,
327 pub remaining: usize,
328 pub reset: DateTime<Utc>,
329}
330
331impl RateLimit {
332 fn from_headers(resource: &str, headers: &HeaderMap<HeaderValue>) -> Result<Self> {
333 let limit =
334 get_header(&format!("anthropic-ratelimit-{resource}-limit"), headers)?.parse()?;
335 let remaining = get_header(
336 &format!("anthropic-ratelimit-{resource}-remaining"),
337 headers,
338 )?
339 .parse()?;
340 let reset = DateTime::parse_from_rfc3339(get_header(
341 &format!("anthropic-ratelimit-{resource}-reset"),
342 headers,
343 )?)?
344 .to_utc();
345
346 Ok(Self {
347 limit,
348 remaining,
349 reset,
350 })
351 }
352}
353
354/// <https://docs.anthropic.com/en/api/rate-limits#response-headers>
355#[derive(Debug)]
356pub struct RateLimitInfo {
357 pub requests: Option<RateLimit>,
358 pub tokens: Option<RateLimit>,
359 pub input_tokens: Option<RateLimit>,
360 pub output_tokens: Option<RateLimit>,
361}
362
363impl RateLimitInfo {
364 fn from_headers(headers: &HeaderMap<HeaderValue>) -> Self {
365 // Check if any rate limit headers exist
366 let has_rate_limit_headers = headers
367 .keys()
368 .any(|k| k.as_str().starts_with("anthropic-ratelimit-"));
369
370 if !has_rate_limit_headers {
371 return Self {
372 requests: None,
373 tokens: None,
374 input_tokens: None,
375 output_tokens: None,
376 };
377 }
378
379 Self {
380 requests: RateLimit::from_headers("requests", headers).ok(),
381 tokens: RateLimit::from_headers("tokens", headers).ok(),
382 input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
383 output_tokens: RateLimit::from_headers("output-tokens", headers).ok(),
384 }
385 }
386}
387
388fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> Result<&'a str, anyhow::Error> {
389 Ok(headers
390 .get(key)
391 .ok_or_else(|| anyhow!("missing header `{key}`"))?
392 .to_str()?)
393}
394
395pub async fn stream_completion_with_rate_limit_info(
396 client: &dyn HttpClient,
397 api_url: &str,
398 api_key: &str,
399 request: Request,
400) -> Result<
401 (
402 BoxStream<'static, Result<Event, AnthropicError>>,
403 Option<RateLimitInfo>,
404 ),
405 AnthropicError,
406> {
407 let request = StreamingRequest {
408 base: request,
409 stream: true,
410 };
411 let uri = format!("{api_url}/v1/messages");
412 let beta_headers = Model::from_id(&request.base.model)
413 .map(|model| model.beta_headers())
414 .unwrap_or_else(|_err| Model::DEFAULT_BETA_HEADERS.join(","));
415 let request_builder = HttpRequest::builder()
416 .method(Method::POST)
417 .uri(uri)
418 .header("Anthropic-Version", "2023-06-01")
419 .header("Anthropic-Beta", beta_headers)
420 .header("X-Api-Key", api_key)
421 .header("Content-Type", "application/json");
422 let serialized_request =
423 serde_json::to_string(&request).context("failed to serialize request")?;
424 let request = request_builder
425 .body(AsyncBody::from(serialized_request))
426 .context("failed to construct request body")?;
427
428 let mut response = client
429 .send(request)
430 .await
431 .context("failed to send request to Anthropic")?;
432 if response.status().is_success() {
433 let rate_limits = RateLimitInfo::from_headers(response.headers());
434 let reader = BufReader::new(response.into_body());
435 let stream = reader
436 .lines()
437 .filter_map(|line| async move {
438 match line {
439 Ok(line) => {
440 let line = line.strip_prefix("data: ")?;
441 match serde_json::from_str(line) {
442 Ok(response) => Some(Ok(response)),
443 Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
444 }
445 }
446 Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
447 }
448 })
449 .boxed();
450 Ok((stream, Some(rate_limits)))
451 } else {
452 let mut body = Vec::new();
453 response
454 .body_mut()
455 .read_to_end(&mut body)
456 .await
457 .context("failed to read response body")?;
458
459 let body_str =
460 std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
461
462 match serde_json::from_str::<Event>(body_str) {
463 Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
464 Ok(_) => Err(AnthropicError::Other(anyhow!(
465 "Unexpected success response while expecting an error: '{body_str}'",
466 ))),
467 Err(_) => Err(AnthropicError::Other(anyhow!(
468 "Failed to connect to API: {} {}",
469 response.status(),
470 body_str,
471 ))),
472 }
473 }
474}
475
476#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
477#[serde(rename_all = "lowercase")]
478pub enum CacheControlType {
479 Ephemeral,
480}
481
482#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
483pub struct CacheControl {
484 #[serde(rename = "type")]
485 pub cache_type: CacheControlType,
486}
487
488#[derive(Debug, Serialize, Deserialize)]
489pub struct Message {
490 pub role: Role,
491 pub content: Vec<RequestContent>,
492}
493
494#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
495#[serde(rename_all = "lowercase")]
496pub enum Role {
497 User,
498 Assistant,
499}
500
501#[derive(Debug, Serialize, Deserialize)]
502#[serde(tag = "type")]
503pub enum RequestContent {
504 #[serde(rename = "text")]
505 Text {
506 text: String,
507 #[serde(skip_serializing_if = "Option::is_none")]
508 cache_control: Option<CacheControl>,
509 },
510 #[serde(rename = "thinking")]
511 Thinking {
512 thinking: String,
513 signature: String,
514 #[serde(skip_serializing_if = "Option::is_none")]
515 cache_control: Option<CacheControl>,
516 },
517 #[serde(rename = "redacted_thinking")]
518 RedactedThinking { data: String },
519 #[serde(rename = "image")]
520 Image {
521 source: ImageSource,
522 #[serde(skip_serializing_if = "Option::is_none")]
523 cache_control: Option<CacheControl>,
524 },
525 #[serde(rename = "tool_use")]
526 ToolUse {
527 id: String,
528 name: String,
529 input: serde_json::Value,
530 #[serde(skip_serializing_if = "Option::is_none")]
531 cache_control: Option<CacheControl>,
532 },
533 #[serde(rename = "tool_result")]
534 ToolResult {
535 tool_use_id: String,
536 is_error: bool,
537 content: String,
538 #[serde(skip_serializing_if = "Option::is_none")]
539 cache_control: Option<CacheControl>,
540 },
541}
542
543#[derive(Debug, Serialize, Deserialize)]
544#[serde(tag = "type")]
545pub enum ResponseContent {
546 #[serde(rename = "text")]
547 Text { text: String },
548 #[serde(rename = "thinking")]
549 Thinking { thinking: String },
550 #[serde(rename = "redacted_thinking")]
551 RedactedThinking { data: String },
552 #[serde(rename = "tool_use")]
553 ToolUse {
554 id: String,
555 name: String,
556 input: serde_json::Value,
557 },
558}
559
560#[derive(Debug, Serialize, Deserialize)]
561pub struct ImageSource {
562 #[serde(rename = "type")]
563 pub source_type: String,
564 pub media_type: String,
565 pub data: String,
566}
567
568#[derive(Debug, Serialize, Deserialize)]
569pub struct Tool {
570 pub name: String,
571 pub description: String,
572 pub input_schema: serde_json::Value,
573}
574
575#[derive(Debug, Serialize, Deserialize)]
576#[serde(tag = "type", rename_all = "lowercase")]
577pub enum ToolChoice {
578 Auto,
579 Any,
580 Tool { name: String },
581}
582
583#[derive(Debug, Serialize, Deserialize)]
584#[serde(tag = "type", rename_all = "lowercase")]
585pub enum Thinking {
586 Enabled { budget_tokens: Option<u32> },
587}
588
589#[derive(Debug, Serialize, Deserialize)]
590#[serde(untagged)]
591pub enum StringOrContents {
592 String(String),
593 Content(Vec<RequestContent>),
594}
595
596#[derive(Debug, Serialize, Deserialize)]
597pub struct Request {
598 pub model: String,
599 pub max_tokens: u32,
600 pub messages: Vec<Message>,
601 #[serde(default, skip_serializing_if = "Vec::is_empty")]
602 pub tools: Vec<Tool>,
603 #[serde(default, skip_serializing_if = "Option::is_none")]
604 pub thinking: Option<Thinking>,
605 #[serde(default, skip_serializing_if = "Option::is_none")]
606 pub tool_choice: Option<ToolChoice>,
607 #[serde(default, skip_serializing_if = "Option::is_none")]
608 pub system: Option<StringOrContents>,
609 #[serde(default, skip_serializing_if = "Option::is_none")]
610 pub metadata: Option<Metadata>,
611 #[serde(default, skip_serializing_if = "Vec::is_empty")]
612 pub stop_sequences: Vec<String>,
613 #[serde(default, skip_serializing_if = "Option::is_none")]
614 pub temperature: Option<f32>,
615 #[serde(default, skip_serializing_if = "Option::is_none")]
616 pub top_k: Option<u32>,
617 #[serde(default, skip_serializing_if = "Option::is_none")]
618 pub top_p: Option<f32>,
619}
620
621#[derive(Debug, Serialize, Deserialize)]
622struct StreamingRequest {
623 #[serde(flatten)]
624 pub base: Request,
625 pub stream: bool,
626}
627
628#[derive(Debug, Serialize, Deserialize)]
629pub struct Metadata {
630 pub user_id: Option<String>,
631}
632
633#[derive(Debug, Serialize, Deserialize, Default)]
634pub struct Usage {
635 #[serde(default, skip_serializing_if = "Option::is_none")]
636 pub input_tokens: Option<u32>,
637 #[serde(default, skip_serializing_if = "Option::is_none")]
638 pub output_tokens: Option<u32>,
639 #[serde(default, skip_serializing_if = "Option::is_none")]
640 pub cache_creation_input_tokens: Option<u32>,
641 #[serde(default, skip_serializing_if = "Option::is_none")]
642 pub cache_read_input_tokens: Option<u32>,
643}
644
645#[derive(Debug, Serialize, Deserialize)]
646pub struct Response {
647 pub id: String,
648 #[serde(rename = "type")]
649 pub response_type: String,
650 pub role: Role,
651 pub content: Vec<ResponseContent>,
652 pub model: String,
653 #[serde(default, skip_serializing_if = "Option::is_none")]
654 pub stop_reason: Option<String>,
655 #[serde(default, skip_serializing_if = "Option::is_none")]
656 pub stop_sequence: Option<String>,
657 pub usage: Usage,
658}
659
660#[derive(Debug, Serialize, Deserialize)]
661#[serde(tag = "type")]
662pub enum Event {
663 #[serde(rename = "message_start")]
664 MessageStart { message: Response },
665 #[serde(rename = "content_block_start")]
666 ContentBlockStart {
667 index: usize,
668 content_block: ResponseContent,
669 },
670 #[serde(rename = "content_block_delta")]
671 ContentBlockDelta { index: usize, delta: ContentDelta },
672 #[serde(rename = "content_block_stop")]
673 ContentBlockStop { index: usize },
674 #[serde(rename = "message_delta")]
675 MessageDelta { delta: MessageDelta, usage: Usage },
676 #[serde(rename = "message_stop")]
677 MessageStop,
678 #[serde(rename = "ping")]
679 Ping,
680 #[serde(rename = "error")]
681 Error { error: ApiError },
682}
683
684#[derive(Debug, Serialize, Deserialize)]
685#[serde(tag = "type")]
686pub enum ContentDelta {
687 #[serde(rename = "text_delta")]
688 TextDelta { text: String },
689 #[serde(rename = "thinking_delta")]
690 ThinkingDelta { thinking: String },
691 #[serde(rename = "signature_delta")]
692 SignatureDelta { signature: String },
693 #[serde(rename = "input_json_delta")]
694 InputJsonDelta { partial_json: String },
695}
696
697#[derive(Debug, Serialize, Deserialize)]
698pub struct MessageDelta {
699 pub stop_reason: Option<String>,
700 pub stop_sequence: Option<String>,
701}
702
703#[derive(Error, Debug)]
704pub enum AnthropicError {
705 #[error("an error occurred while interacting with the Anthropic API: {error_type}: {message}", error_type = .0.error_type, message = .0.message)]
706 ApiError(ApiError),
707 #[error("{0}")]
708 Other(#[from] anyhow::Error),
709}
710
711#[derive(Debug, Serialize, Deserialize)]
712pub struct ApiError {
713 #[serde(rename = "type")]
714 pub error_type: String,
715 pub message: String,
716}
717
718/// An Anthropic API error code.
719/// <https://docs.anthropic.com/en/api/errors#http-errors>
720#[derive(Debug, PartialEq, Eq, Clone, Copy, EnumString)]
721#[strum(serialize_all = "snake_case")]
722pub enum ApiErrorCode {
723 /// 400 - `invalid_request_error`: There was an issue with the format or content of your request.
724 InvalidRequestError,
725 /// 401 - `authentication_error`: There's an issue with your API key.
726 AuthenticationError,
727 /// 403 - `permission_error`: Your API key does not have permission to use the specified resource.
728 PermissionError,
729 /// 404 - `not_found_error`: The requested resource was not found.
730 NotFoundError,
731 /// 413 - `request_too_large`: Request exceeds the maximum allowed number of bytes.
732 RequestTooLarge,
733 /// 429 - `rate_limit_error`: Your account has hit a rate limit.
734 RateLimitError,
735 /// 500 - `api_error`: An unexpected error has occurred internal to Anthropic's systems.
736 ApiError,
737 /// 529 - `overloaded_error`: Anthropic's API is temporarily overloaded.
738 OverloadedError,
739}
740
741impl ApiError {
742 pub fn code(&self) -> Option<ApiErrorCode> {
743 ApiErrorCode::from_str(&self.error_type).ok()
744 }
745
746 pub fn is_rate_limit_error(&self) -> bool {
747 matches!(self.error_type.as_str(), "rate_limit_error")
748 }
749
750 pub fn match_window_exceeded(&self) -> Option<usize> {
751 let Some(ApiErrorCode::InvalidRequestError) = self.code() else {
752 return None;
753 };
754
755 parse_prompt_too_long(&self.message)
756 }
757}
758
759pub fn parse_prompt_too_long(message: &str) -> Option<usize> {
760 message
761 .strip_prefix("prompt is too long: ")?
762 .split_once(" tokens")?
763 .0
764 .parse::<usize>()
765 .ok()
766}
767
768#[test]
769fn test_match_window_exceeded() {
770 let error = ApiError {
771 error_type: "invalid_request_error".to_string(),
772 message: "prompt is too long: 220000 tokens > 200000".to_string(),
773 };
774 assert_eq!(error.match_window_exceeded(), Some(220_000));
775
776 let error = ApiError {
777 error_type: "invalid_request_error".to_string(),
778 message: "prompt is too long: 1234953 tokens".to_string(),
779 };
780 assert_eq!(error.match_window_exceeded(), Some(1234953));
781
782 let error = ApiError {
783 error_type: "invalid_request_error".to_string(),
784 message: "not a prompt length error".to_string(),
785 };
786 assert_eq!(error.match_window_exceeded(), None);
787
788 let error = ApiError {
789 error_type: "rate_limit_error".to_string(),
790 message: "prompt is too long: 12345 tokens".to_string(),
791 };
792 assert_eq!(error.match_window_exceeded(), None);
793
794 let error = ApiError {
795 error_type: "invalid_request_error".to_string(),
796 message: "prompt is too long: invalid tokens".to_string(),
797 };
798 assert_eq!(error.match_window_exceeded(), None);
799}