1use anyhow::anyhow;
2use serde::{Deserialize, Serialize};
3use strum::EnumIter;
4
5#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
6#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
7pub enum BedrockModelMode {
8 #[default]
9 Default,
10 Thinking {
11 budget_tokens: Option<u64>,
12 },
13}
14
15#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
16#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
17pub enum Model {
18 // Anthropic models (already included)
19 #[default]
20 #[serde(rename = "claude-3-5-sonnet-v2", alias = "claude-3-5-sonnet-latest")]
21 Claude3_5SonnetV2,
22 #[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
23 Claude3_7Sonnet,
24 #[serde(
25 rename = "claude-3-7-sonnet-thinking",
26 alias = "claude-3-7-sonnet-thinking-latest"
27 )]
28 Claude3_7SonnetThinking,
29 #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
30 Claude3Opus,
31 #[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-latest")]
32 Claude3Sonnet,
33 #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
34 Claude3_5Haiku,
35 Claude3_5Sonnet,
36 Claude3Haiku,
37 // Amazon Nova Models
38 AmazonNovaLite,
39 AmazonNovaMicro,
40 AmazonNovaPro,
41 // AI21 models
42 AI21J2GrandeInstruct,
43 AI21J2JumboInstruct,
44 AI21J2Mid,
45 AI21J2MidV1,
46 AI21J2Ultra,
47 AI21J2UltraV1_8k,
48 AI21J2UltraV1,
49 AI21JambaInstructV1,
50 AI21Jamba15LargeV1,
51 AI21Jamba15MiniV1,
52 // Cohere models
53 CohereCommandTextV14_4k,
54 CohereCommandRV1,
55 CohereCommandRPlusV1,
56 CohereCommandLightTextV14_4k,
57 // DeepSeek
58 DeepSeekR1,
59 // Meta models
60 MetaLlama38BInstructV1,
61 MetaLlama370BInstructV1,
62 MetaLlama318BInstructV1_128k,
63 MetaLlama318BInstructV1,
64 MetaLlama3170BInstructV1_128k,
65 MetaLlama3170BInstructV1,
66 MetaLlama3211BInstructV1,
67 MetaLlama3290BInstructV1,
68 MetaLlama321BInstructV1,
69 MetaLlama323BInstructV1,
70 // Mistral models
71 MistralMistral7BInstructV0,
72 MistralMixtral8x7BInstructV0,
73 MistralMistralLarge2402V1,
74 MistralMistralSmall2402V1,
75 // Writer models
76 PalmyraWriterX5,
77 PalmyraWriterX4,
78 #[serde(rename = "custom")]
79 Custom {
80 name: String,
81 max_tokens: usize,
82 /// The name displayed in the UI, such as in the assistant panel model dropdown menu.
83 display_name: Option<String>,
84 max_output_tokens: Option<u32>,
85 default_temperature: Option<f32>,
86 },
87}
88
89impl Model {
90 pub fn default_fast() -> Self {
91 Self::Claude3_5Haiku
92 }
93
94 pub fn from_id(id: &str) -> anyhow::Result<Self> {
95 if id.starts_with("claude-3-5-sonnet-v2") {
96 Ok(Self::Claude3_5SonnetV2)
97 } else if id.starts_with("claude-3-opus") {
98 Ok(Self::Claude3Opus)
99 } else if id.starts_with("claude-3-sonnet") {
100 Ok(Self::Claude3Sonnet)
101 } else if id.starts_with("claude-3-5-haiku") {
102 Ok(Self::Claude3_5Haiku)
103 } else if id.starts_with("claude-3-7-sonnet") {
104 Ok(Self::Claude3_7Sonnet)
105 } else if id.starts_with("claude-3-7-sonnet-thinking") {
106 Ok(Self::Claude3_7SonnetThinking)
107 } else {
108 Err(anyhow!("invalid model id"))
109 }
110 }
111
112 pub fn id(&self) -> &str {
113 match self {
114 Model::Claude3_5SonnetV2 => "anthropic.claude-3-5-sonnet-20241022-v2:0",
115 Model::Claude3_5Sonnet => "anthropic.claude-3-5-sonnet-20240620-v1:0",
116 Model::Claude3Opus => "anthropic.claude-3-opus-20240229-v1:0",
117 Model::Claude3Sonnet => "anthropic.claude-3-sonnet-20240229-v1:0",
118 Model::Claude3Haiku => "anthropic.claude-3-haiku-20240307-v1:0",
119 Model::Claude3_5Haiku => "anthropic.claude-3-5-haiku-20241022-v1:0",
120 Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => {
121 "anthropic.claude-3-7-sonnet-20250219-v1:0"
122 }
123 Model::AmazonNovaLite => "amazon.nova-lite-v1:0",
124 Model::AmazonNovaMicro => "amazon.nova-micro-v1:0",
125 Model::AmazonNovaPro => "amazon.nova-pro-v1:0",
126 Model::DeepSeekR1 => "us.deepseek.r1-v1:0",
127 Model::AI21J2GrandeInstruct => "ai21.j2-grande-instruct",
128 Model::AI21J2JumboInstruct => "ai21.j2-jumbo-instruct",
129 Model::AI21J2Mid => "ai21.j2-mid",
130 Model::AI21J2MidV1 => "ai21.j2-mid-v1",
131 Model::AI21J2Ultra => "ai21.j2-ultra",
132 Model::AI21J2UltraV1_8k => "ai21.j2-ultra-v1:0:8k",
133 Model::AI21J2UltraV1 => "ai21.j2-ultra-v1",
134 Model::AI21JambaInstructV1 => "ai21.jamba-instruct-v1:0",
135 Model::AI21Jamba15LargeV1 => "ai21.jamba-1-5-large-v1:0",
136 Model::AI21Jamba15MiniV1 => "ai21.jamba-1-5-mini-v1:0",
137 Model::CohereCommandTextV14_4k => "cohere.command-text-v14:7:4k",
138 Model::CohereCommandRV1 => "cohere.command-r-v1:0",
139 Model::CohereCommandRPlusV1 => "cohere.command-r-plus-v1:0",
140 Model::CohereCommandLightTextV14_4k => "cohere.command-light-text-v14:7:4k",
141 Model::MetaLlama38BInstructV1 => "meta.llama3-8b-instruct-v1:0",
142 Model::MetaLlama370BInstructV1 => "meta.llama3-70b-instruct-v1:0",
143 Model::MetaLlama318BInstructV1_128k => "meta.llama3-1-8b-instruct-v1:0:128k",
144 Model::MetaLlama318BInstructV1 => "meta.llama3-1-8b-instruct-v1:0",
145 Model::MetaLlama3170BInstructV1_128k => "meta.llama3-1-70b-instruct-v1:0:128k",
146 Model::MetaLlama3170BInstructV1 => "meta.llama3-1-70b-instruct-v1:0",
147 Model::MetaLlama3211BInstructV1 => "meta.llama3-2-11b-instruct-v1:0",
148 Model::MetaLlama3290BInstructV1 => "meta.llama3-2-90b-instruct-v1:0",
149 Model::MetaLlama321BInstructV1 => "meta.llama3-2-1b-instruct-v1:0",
150 Model::MetaLlama323BInstructV1 => "meta.llama3-2-3b-instruct-v1:0",
151 Model::MistralMistral7BInstructV0 => "mistral.mistral-7b-instruct-v0:2",
152 Model::MistralMixtral8x7BInstructV0 => "mistral.mixtral-8x7b-instruct-v0:1",
153 Model::MistralMistralLarge2402V1 => "mistral.mistral-large-2402-v1:0",
154 Model::MistralMistralSmall2402V1 => "mistral.mistral-small-2402-v1:0",
155 Model::PalmyraWriterX4 => "writer.palmyra-x4-v1:0",
156 Model::PalmyraWriterX5 => "writer.palmyra-x5-v1:0",
157 Self::Custom { name, .. } => name,
158 }
159 }
160
161 pub fn display_name(&self) -> &str {
162 match self {
163 Self::Claude3_5SonnetV2 => "Claude 3.5 Sonnet v2",
164 Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
165 Self::Claude3Opus => "Claude 3 Opus",
166 Self::Claude3Sonnet => "Claude 3 Sonnet",
167 Self::Claude3Haiku => "Claude 3 Haiku",
168 Self::Claude3_5Haiku => "Claude 3.5 Haiku",
169 Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
170 Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
171 Self::AmazonNovaLite => "Amazon Nova Lite",
172 Self::AmazonNovaMicro => "Amazon Nova Micro",
173 Self::AmazonNovaPro => "Amazon Nova Pro",
174 Self::DeepSeekR1 => "DeepSeek R1",
175 Self::AI21J2GrandeInstruct => "AI21 Jurassic2 Grande Instruct",
176 Self::AI21J2JumboInstruct => "AI21 Jurassic2 Jumbo Instruct",
177 Self::AI21J2Mid => "AI21 Jurassic2 Mid",
178 Self::AI21J2MidV1 => "AI21 Jurassic2 Mid V1",
179 Self::AI21J2Ultra => "AI21 Jurassic2 Ultra",
180 Self::AI21J2UltraV1_8k => "AI21 Jurassic2 Ultra V1 8K",
181 Self::AI21J2UltraV1 => "AI21 Jurassic2 Ultra V1",
182 Self::AI21JambaInstructV1 => "AI21 Jamba Instruct",
183 Self::AI21Jamba15LargeV1 => "AI21 Jamba 1.5 Large",
184 Self::AI21Jamba15MiniV1 => "AI21 Jamba 1.5 Mini",
185 Self::CohereCommandTextV14_4k => "Cohere Command Text V14 4K",
186 Self::CohereCommandRV1 => "Cohere Command R V1",
187 Self::CohereCommandRPlusV1 => "Cohere Command R Plus V1",
188 Self::CohereCommandLightTextV14_4k => "Cohere Command Light Text V14 4K",
189 Self::MetaLlama38BInstructV1 => "Meta Llama 3 8B Instruct V1",
190 Self::MetaLlama370BInstructV1 => "Meta Llama 3 70B Instruct V1",
191 Self::MetaLlama318BInstructV1_128k => "Meta Llama 3 1.8B Instruct V1 128K",
192 Self::MetaLlama318BInstructV1 => "Meta Llama 3 1.8B Instruct V1",
193 Self::MetaLlama3170BInstructV1_128k => "Meta Llama 3 1 70B Instruct V1 128K",
194 Self::MetaLlama3170BInstructV1 => "Meta Llama 3 1 70B Instruct V1",
195 Self::MetaLlama3211BInstructV1 => "Meta Llama 3 2 11B Instruct V1",
196 Self::MetaLlama3290BInstructV1 => "Meta Llama 3 2 90B Instruct V1",
197 Self::MetaLlama321BInstructV1 => "Meta Llama 3 2 1B Instruct V1",
198 Self::MetaLlama323BInstructV1 => "Meta Llama 3 2 3B Instruct V1",
199 Self::MistralMistral7BInstructV0 => "Mistral 7B Instruct V0",
200 Self::MistralMixtral8x7BInstructV0 => "Mistral Mixtral 8x7B Instruct V0",
201 Self::MistralMistralLarge2402V1 => "Mistral Large 2402 V1",
202 Self::MistralMistralSmall2402V1 => "Mistral Small 2402 V1",
203 Self::PalmyraWriterX5 => "Writer Palmyra X5",
204 Self::PalmyraWriterX4 => "Writer Palmyra X4",
205 Self::Custom {
206 display_name, name, ..
207 } => display_name.as_deref().unwrap_or(name),
208 }
209 }
210
211 pub fn max_token_count(&self) -> usize {
212 match self {
213 Self::Claude3_5SonnetV2
214 | Self::Claude3Opus
215 | Self::Claude3Sonnet
216 | Self::Claude3_5Haiku
217 | Self::Claude3_7Sonnet => 200_000,
218 Self::PalmyraWriterX5 => 1_000_000,
219 Self::PalmyraWriterX4 => 128_000,
220 Self::Custom { max_tokens, .. } => *max_tokens,
221 _ => 200_000,
222 }
223 }
224
225 pub fn max_output_tokens(&self) -> u32 {
226 match self {
227 Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
228 Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000,
229 Self::Claude3_5SonnetV2 | Self::PalmyraWriterX4 | Self::PalmyraWriterX5 => 8_192,
230 Self::Custom {
231 max_output_tokens, ..
232 } => max_output_tokens.unwrap_or(4_096),
233 _ => 4_096,
234 }
235 }
236
237 pub fn default_temperature(&self) -> f32 {
238 match self {
239 Self::Claude3_5SonnetV2
240 | Self::Claude3Opus
241 | Self::Claude3Sonnet
242 | Self::Claude3_5Haiku
243 | Self::Claude3_7Sonnet => 1.0,
244 Self::Custom {
245 default_temperature,
246 ..
247 } => default_temperature.unwrap_or(1.0),
248 _ => 1.0,
249 }
250 }
251
252 pub fn supports_tool_use(&self) -> bool {
253 match self {
254 // Anthropic Claude 3 models (all support tool use)
255 Self::Claude3Opus
256 | Self::Claude3Sonnet
257 | Self::Claude3_5Sonnet
258 | Self::Claude3_5SonnetV2
259 | Self::Claude3_7Sonnet
260 | Self::Claude3_7SonnetThinking
261 | Self::Claude3_5Haiku => true,
262
263 // Amazon Nova models (all support tool use)
264 Self::AmazonNovaPro | Self::AmazonNovaLite | Self::AmazonNovaMicro => true,
265
266 // AI21 Jamba 1.5 models support tool use
267 Self::AI21Jamba15LargeV1 | Self::AI21Jamba15MiniV1 => true,
268
269 // Cohere Command R models support tool use
270 Self::CohereCommandRV1 | Self::CohereCommandRPlusV1 => true,
271
272 // All other models don't support tool use
273 // Including Meta Llama 3.2, AI21 Jurassic, and others
274 _ => false,
275 }
276 }
277
278 pub fn mode(&self) -> BedrockModelMode {
279 match self {
280 Model::Claude3_7SonnetThinking => BedrockModelMode::Thinking {
281 budget_tokens: Some(4096),
282 },
283 _ => BedrockModelMode::Default,
284 }
285 }
286
287 pub fn cross_region_inference_id(&self, region: &str) -> Result<String, anyhow::Error> {
288 let region_group = if region.starts_with("us-gov-") {
289 "us-gov"
290 } else if region.starts_with("us-") {
291 "us"
292 } else if region.starts_with("eu-") {
293 "eu"
294 } else if region.starts_with("ap-") || region == "me-central-1" || region == "me-south-1" {
295 "apac"
296 } else if region.starts_with("ca-") || region.starts_with("sa-") {
297 // Canada and South America regions - default to US profiles
298 "us"
299 } else {
300 // Unknown region
301 return Err(anyhow!("Unsupported Region"));
302 };
303
304 let model_id = self.id();
305
306 match (self, region_group) {
307 // Custom models can't have CRI IDs
308 (Model::Custom { .. }, _) => Ok(self.id().into()),
309
310 // Models with US Gov only
311 (Model::Claude3_5Sonnet, "us-gov") | (Model::Claude3Haiku, "us-gov") => {
312 Ok(format!("{}.{}", region_group, model_id))
313 }
314
315 // Models available only in US
316 (Model::Claude3Opus, "us")
317 | (Model::Claude3_7Sonnet, "us")
318 | (Model::Claude3_7SonnetThinking, "us") => {
319 Ok(format!("{}.{}", region_group, model_id))
320 }
321
322 // Models available in US, EU, and APAC
323 (Model::Claude3_5SonnetV2, "us")
324 | (Model::Claude3_5SonnetV2, "apac")
325 | (Model::Claude3_5Sonnet, _)
326 | (Model::Claude3Haiku, _)
327 | (Model::Claude3Sonnet, _)
328 | (Model::AmazonNovaLite, _)
329 | (Model::AmazonNovaMicro, _)
330 | (Model::AmazonNovaPro, _) => Ok(format!("{}.{}", region_group, model_id)),
331
332 // Models with limited EU availability
333 (Model::MetaLlama321BInstructV1, "us")
334 | (Model::MetaLlama321BInstructV1, "eu")
335 | (Model::MetaLlama323BInstructV1, "us")
336 | (Model::MetaLlama323BInstructV1, "eu") => {
337 Ok(format!("{}.{}", region_group, model_id))
338 }
339
340 // US-only models (all remaining Meta models)
341 (Model::MetaLlama38BInstructV1, "us")
342 | (Model::MetaLlama370BInstructV1, "us")
343 | (Model::MetaLlama318BInstructV1, "us")
344 | (Model::MetaLlama318BInstructV1_128k, "us")
345 | (Model::MetaLlama3170BInstructV1, "us")
346 | (Model::MetaLlama3170BInstructV1_128k, "us")
347 | (Model::MetaLlama3211BInstructV1, "us")
348 | (Model::MetaLlama3290BInstructV1, "us") => {
349 Ok(format!("{}.{}", region_group, model_id))
350 }
351
352 // Writer models only available in the US
353 (Model::PalmyraWriterX4, "us") | (Model::PalmyraWriterX5, "us") => {
354 // They have some goofiness
355 Ok(format!("{}.{}", region_group, model_id))
356 }
357
358 // Any other combination is not supported
359 _ => Ok(self.id().into()),
360 }
361 }
362}
363
364#[cfg(test)]
365mod tests {
366 use super::*;
367
368 #[test]
369 fn test_us_region_inference_ids() -> anyhow::Result<()> {
370 // Test US regions
371 assert_eq!(
372 Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1")?,
373 "us.anthropic.claude-3-5-sonnet-20241022-v2:0"
374 );
375 assert_eq!(
376 Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2")?,
377 "us.anthropic.claude-3-5-sonnet-20241022-v2:0"
378 );
379 assert_eq!(
380 Model::AmazonNovaPro.cross_region_inference_id("us-east-2")?,
381 "us.amazon.nova-pro-v1:0"
382 );
383 Ok(())
384 }
385
386 #[test]
387 fn test_eu_region_inference_ids() -> anyhow::Result<()> {
388 // Test European regions
389 assert_eq!(
390 Model::Claude3Sonnet.cross_region_inference_id("eu-west-1")?,
391 "eu.anthropic.claude-3-sonnet-20240229-v1:0"
392 );
393 assert_eq!(
394 Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1")?,
395 "eu.amazon.nova-micro-v1:0"
396 );
397 Ok(())
398 }
399
400 #[test]
401 fn test_apac_region_inference_ids() -> anyhow::Result<()> {
402 // Test Asia-Pacific regions
403 assert_eq!(
404 Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1")?,
405 "apac.anthropic.claude-3-5-sonnet-20241022-v2:0"
406 );
407 assert_eq!(
408 Model::AmazonNovaLite.cross_region_inference_id("ap-south-1")?,
409 "apac.amazon.nova-lite-v1:0"
410 );
411 Ok(())
412 }
413
414 #[test]
415 fn test_gov_region_inference_ids() -> anyhow::Result<()> {
416 // Test Government regions
417 assert_eq!(
418 Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1")?,
419 "us-gov.anthropic.claude-3-5-sonnet-20240620-v1:0"
420 );
421 assert_eq!(
422 Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1")?,
423 "us-gov.anthropic.claude-3-haiku-20240307-v1:0"
424 );
425 Ok(())
426 }
427
428 #[test]
429 fn test_meta_models_inference_ids() -> anyhow::Result<()> {
430 // Test Meta models
431 assert_eq!(
432 Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1")?,
433 "us.meta.llama3-70b-instruct-v1:0"
434 );
435 assert_eq!(
436 Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1")?,
437 "eu.meta.llama3-2-1b-instruct-v1:0"
438 );
439 Ok(())
440 }
441
442 #[test]
443 fn test_mistral_models_inference_ids() -> anyhow::Result<()> {
444 // Mistral models don't follow the regional prefix pattern,
445 // so they should return their original IDs
446 assert_eq!(
447 Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1")?,
448 "mistral.mistral-large-2402-v1:0"
449 );
450 assert_eq!(
451 Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1")?,
452 "mistral.mixtral-8x7b-instruct-v0:1"
453 );
454 Ok(())
455 }
456
457 #[test]
458 fn test_ai21_models_inference_ids() -> anyhow::Result<()> {
459 // AI21 models don't follow the regional prefix pattern,
460 // so they should return their original IDs
461 assert_eq!(
462 Model::AI21J2UltraV1.cross_region_inference_id("us-east-1")?,
463 "ai21.j2-ultra-v1"
464 );
465 assert_eq!(
466 Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1")?,
467 "ai21.jamba-instruct-v1:0"
468 );
469 Ok(())
470 }
471
472 #[test]
473 fn test_cohere_models_inference_ids() -> anyhow::Result<()> {
474 // Cohere models don't follow the regional prefix pattern,
475 // so they should return their original IDs
476 assert_eq!(
477 Model::CohereCommandRV1.cross_region_inference_id("us-east-1")?,
478 "cohere.command-r-v1:0"
479 );
480 assert_eq!(
481 Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1")?,
482 "cohere.command-text-v14:7:4k"
483 );
484 Ok(())
485 }
486
487 #[test]
488 fn test_custom_model_inference_ids() -> anyhow::Result<()> {
489 // Test custom models
490 let custom_model = Model::Custom {
491 name: "custom.my-model-v1:0".to_string(),
492 max_tokens: 100000,
493 display_name: Some("My Custom Model".to_string()),
494 max_output_tokens: Some(8192),
495 default_temperature: Some(0.7),
496 };
497
498 // Custom model should return its name unchanged
499 assert_eq!(
500 custom_model.cross_region_inference_id("us-east-1")?,
501 "custom.my-model-v1:0"
502 );
503
504 Ok(())
505 }
506}