1{
2 "name": "OpenRouter",
3 "id": "openrouter",
4 "api_key": "$OPENROUTER_API_KEY",
5 "api_endpoint": "https://openrouter.ai/api/v1",
6 "type": "openai",
7 "default_large_model_id": "anthropic/claude-sonnet-4",
8 "default_small_model_id": "anthropic/claude-3.5-haiku",
9 "models": [
10 {
11 "id": "qwen/qwen3-next-80b-a3b-thinking",
12 "name": "Qwen: Qwen3 Next 80B A3B Thinking",
13 "cost_per_1m_in": 0.14673906,
14 "cost_per_1m_out": 0.586956456,
15 "cost_per_1m_in_cached": 0,
16 "cost_per_1m_out_cached": 0,
17 "context_window": 262144,
18 "default_max_tokens": 26214,
19 "can_reason": true,
20 "has_reasoning_efforts": true,
21 "supports_attachments": false
22 },
23 {
24 "id": "qwen/qwen3-next-80b-a3b-instruct",
25 "name": "Qwen: Qwen3 Next 80B A3B Instruct",
26 "cost_per_1m_in": 0.3,
27 "cost_per_1m_out": 0.3,
28 "cost_per_1m_in_cached": 0,
29 "cost_per_1m_out_cached": 0,
30 "context_window": 262144,
31 "default_max_tokens": 131072,
32 "can_reason": false,
33 "has_reasoning_efforts": false,
34 "supports_attachments": false
35 },
36 {
37 "id": "meituan/longcat-flash-chat",
38 "name": "Meituan: LongCat Flash Chat",
39 "cost_per_1m_in": 0.24999987999999998,
40 "cost_per_1m_out": 0.999999888,
41 "cost_per_1m_in_cached": 0,
42 "cost_per_1m_out_cached": 0,
43 "context_window": 131072,
44 "default_max_tokens": 13107,
45 "can_reason": false,
46 "has_reasoning_efforts": false,
47 "supports_attachments": false
48 },
49 {
50 "id": "qwen/qwen-plus-2025-07-28",
51 "name": "Qwen: Qwen Plus 0728",
52 "cost_per_1m_in": 0.39999999999999997,
53 "cost_per_1m_out": 1.2,
54 "cost_per_1m_in_cached": 0,
55 "cost_per_1m_out_cached": 0,
56 "context_window": 1000000,
57 "default_max_tokens": 16384,
58 "can_reason": false,
59 "has_reasoning_efforts": false,
60 "supports_attachments": false
61 },
62 {
63 "id": "qwen/qwen-plus-2025-07-28:thinking",
64 "name": "Qwen: Qwen Plus 0728 (thinking)",
65 "cost_per_1m_in": 0.39999999999999997,
66 "cost_per_1m_out": 4,
67 "cost_per_1m_in_cached": 0,
68 "cost_per_1m_out_cached": 0,
69 "context_window": 1000000,
70 "default_max_tokens": 16384,
71 "can_reason": true,
72 "has_reasoning_efforts": true,
73 "supports_attachments": false
74 },
75 {
76 "id": "nvidia/nemotron-nano-9b-v2:free",
77 "name": "NVIDIA: Nemotron Nano 9B V2 (free)",
78 "cost_per_1m_in": 0,
79 "cost_per_1m_out": 0,
80 "cost_per_1m_in_cached": 0,
81 "cost_per_1m_out_cached": 0,
82 "context_window": 128000,
83 "default_max_tokens": 12800,
84 "can_reason": true,
85 "has_reasoning_efforts": true,
86 "supports_attachments": false
87 },
88 {
89 "id": "nvidia/nemotron-nano-9b-v2",
90 "name": "NVIDIA: Nemotron Nano 9B V2",
91 "cost_per_1m_in": 0.04,
92 "cost_per_1m_out": 0.16,
93 "cost_per_1m_in_cached": 0,
94 "cost_per_1m_out_cached": 0,
95 "context_window": 131072,
96 "default_max_tokens": 13107,
97 "can_reason": true,
98 "has_reasoning_efforts": true,
99 "supports_attachments": false
100 },
101 {
102 "id": "openrouter/sonoma-dusk-alpha",
103 "name": "Sonoma Dusk Alpha",
104 "cost_per_1m_in": 0,
105 "cost_per_1m_out": 0,
106 "cost_per_1m_in_cached": 0,
107 "cost_per_1m_out_cached": 0,
108 "context_window": 2000000,
109 "default_max_tokens": 200000,
110 "can_reason": false,
111 "has_reasoning_efforts": false,
112 "supports_attachments": true
113 },
114 {
115 "id": "openrouter/sonoma-sky-alpha",
116 "name": "Sonoma Sky Alpha",
117 "cost_per_1m_in": 0,
118 "cost_per_1m_out": 0,
119 "cost_per_1m_in_cached": 0,
120 "cost_per_1m_out_cached": 0,
121 "context_window": 2000000,
122 "default_max_tokens": 200000,
123 "can_reason": true,
124 "has_reasoning_efforts": true,
125 "supports_attachments": true
126 },
127 {
128 "id": "qwen/qwen3-max",
129 "name": "Qwen: Qwen3 Max",
130 "cost_per_1m_in": 1.2,
131 "cost_per_1m_out": 6,
132 "cost_per_1m_in_cached": 0,
133 "cost_per_1m_out_cached": 0.24,
134 "context_window": 256000,
135 "default_max_tokens": 16384,
136 "can_reason": false,
137 "has_reasoning_efforts": false,
138 "supports_attachments": false
139 },
140 {
141 "id": "moonshotai/kimi-k2-0905",
142 "name": "MoonshotAI: Kimi K2 0905",
143 "cost_per_1m_in": 0.58,
144 "cost_per_1m_out": 2.29,
145 "cost_per_1m_in_cached": 0,
146 "cost_per_1m_out_cached": 0,
147 "context_window": 262144,
148 "default_max_tokens": 131072,
149 "can_reason": false,
150 "has_reasoning_efforts": false,
151 "supports_attachments": false
152 },
153 {
154 "id": "deepcogito/cogito-v2-preview-llama-109b-moe",
155 "name": "Cogito V2 Preview Llama 109B",
156 "cost_per_1m_in": 0.18,
157 "cost_per_1m_out": 0.59,
158 "cost_per_1m_in_cached": 0,
159 "cost_per_1m_out_cached": 0,
160 "context_window": 32767,
161 "default_max_tokens": 3276,
162 "can_reason": true,
163 "has_reasoning_efforts": true,
164 "supports_attachments": true
165 },
166 {
167 "id": "stepfun-ai/step3",
168 "name": "StepFun: Step3",
169 "cost_per_1m_in": 0.5700000000000001,
170 "cost_per_1m_out": 1.42,
171 "cost_per_1m_in_cached": 0,
172 "cost_per_1m_out_cached": 0,
173 "context_window": 65536,
174 "default_max_tokens": 32768,
175 "can_reason": true,
176 "has_reasoning_efforts": true,
177 "supports_attachments": true
178 },
179 {
180 "id": "qwen/qwen3-30b-a3b-thinking-2507",
181 "name": "Qwen: Qwen3 30B A3B Thinking 2507",
182 "cost_per_1m_in": 0.09999999999999999,
183 "cost_per_1m_out": 0.3,
184 "cost_per_1m_in_cached": 0,
185 "cost_per_1m_out_cached": 0,
186 "context_window": 262144,
187 "default_max_tokens": 26214,
188 "can_reason": true,
189 "has_reasoning_efforts": true,
190 "supports_attachments": false
191 },
192 {
193 "id": "x-ai/grok-code-fast-1",
194 "name": "xAI: Grok Code Fast 1",
195 "cost_per_1m_in": 0.19999999999999998,
196 "cost_per_1m_out": 1.5,
197 "cost_per_1m_in_cached": 0,
198 "cost_per_1m_out_cached": 0.02,
199 "context_window": 256000,
200 "default_max_tokens": 5000,
201 "can_reason": true,
202 "has_reasoning_efforts": true,
203 "supports_attachments": false
204 },
205 {
206 "id": "nousresearch/hermes-4-70b",
207 "name": "Nous: Hermes 4 70B",
208 "cost_per_1m_in": 0.127173852,
209 "cost_per_1m_out": 0.5086955952000001,
210 "cost_per_1m_in_cached": 0,
211 "cost_per_1m_out_cached": 0,
212 "context_window": 131072,
213 "default_max_tokens": 13107,
214 "can_reason": true,
215 "has_reasoning_efforts": true,
216 "supports_attachments": false
217 },
218 {
219 "id": "nousresearch/hermes-4-405b",
220 "name": "Nous: Hermes 4 405B",
221 "cost_per_1m_in": 0.24999987999999998,
222 "cost_per_1m_out": 0.999999888,
223 "cost_per_1m_in_cached": 0,
224 "cost_per_1m_out_cached": 0,
225 "context_window": 131072,
226 "default_max_tokens": 13107,
227 "can_reason": true,
228 "has_reasoning_efforts": true,
229 "supports_attachments": false
230 },
231 {
232 "id": "deepseek/deepseek-chat-v3.1:free",
233 "name": "DeepSeek: DeepSeek V3.1 (free)",
234 "cost_per_1m_in": 0,
235 "cost_per_1m_out": 0,
236 "cost_per_1m_in_cached": 0,
237 "cost_per_1m_out_cached": 0,
238 "context_window": 163840,
239 "default_max_tokens": 16384,
240 "can_reason": true,
241 "has_reasoning_efforts": true,
242 "supports_attachments": false
243 },
244 {
245 "id": "deepseek/deepseek-chat-v3.1",
246 "name": "DeepSeek: DeepSeek V3.1",
247 "cost_per_1m_in": 0.27,
248 "cost_per_1m_out": 1,
249 "cost_per_1m_in_cached": 0,
250 "cost_per_1m_out_cached": 0,
251 "context_window": 163840,
252 "default_max_tokens": 16384,
253 "can_reason": true,
254 "has_reasoning_efforts": true,
255 "supports_attachments": false
256 },
257 {
258 "id": "openai/gpt-4o-audio-preview",
259 "name": "OpenAI: GPT-4o Audio",
260 "cost_per_1m_in": 2.5,
261 "cost_per_1m_out": 10,
262 "cost_per_1m_in_cached": 0,
263 "cost_per_1m_out_cached": 0,
264 "context_window": 128000,
265 "default_max_tokens": 8192,
266 "can_reason": false,
267 "has_reasoning_efforts": false,
268 "supports_attachments": false
269 },
270 {
271 "id": "mistralai/mistral-medium-3.1",
272 "name": "Mistral: Mistral Medium 3.1",
273 "cost_per_1m_in": 0.39999999999999997,
274 "cost_per_1m_out": 2,
275 "cost_per_1m_in_cached": 0,
276 "cost_per_1m_out_cached": 0,
277 "context_window": 131072,
278 "default_max_tokens": 13107,
279 "can_reason": false,
280 "has_reasoning_efforts": false,
281 "supports_attachments": true
282 },
283 {
284 "id": "z-ai/glm-4.5v",
285 "name": "Z.AI: GLM 4.5V",
286 "cost_per_1m_in": 0.5,
287 "cost_per_1m_out": 1.7999999999999998,
288 "cost_per_1m_in_cached": 0,
289 "cost_per_1m_out_cached": 0,
290 "context_window": 65536,
291 "default_max_tokens": 32768,
292 "can_reason": true,
293 "has_reasoning_efforts": true,
294 "supports_attachments": true
295 },
296 {
297 "id": "ai21/jamba-mini-1.7",
298 "name": "AI21: Jamba Mini 1.7",
299 "cost_per_1m_in": 0.19999999999999998,
300 "cost_per_1m_out": 0.39999999999999997,
301 "cost_per_1m_in_cached": 0,
302 "cost_per_1m_out_cached": 0,
303 "context_window": 256000,
304 "default_max_tokens": 2048,
305 "can_reason": false,
306 "has_reasoning_efforts": false,
307 "supports_attachments": false
308 },
309 {
310 "id": "ai21/jamba-large-1.7",
311 "name": "AI21: Jamba Large 1.7",
312 "cost_per_1m_in": 2,
313 "cost_per_1m_out": 8,
314 "cost_per_1m_in_cached": 0,
315 "cost_per_1m_out_cached": 0,
316 "context_window": 256000,
317 "default_max_tokens": 2048,
318 "can_reason": false,
319 "has_reasoning_efforts": false,
320 "supports_attachments": false
321 },
322 {
323 "id": "openai/gpt-5",
324 "name": "OpenAI: GPT-5",
325 "cost_per_1m_in": 1.25,
326 "cost_per_1m_out": 10,
327 "cost_per_1m_in_cached": 0,
328 "cost_per_1m_out_cached": 0.125,
329 "context_window": 400000,
330 "default_max_tokens": 64000,
331 "can_reason": true,
332 "has_reasoning_efforts": true,
333 "supports_attachments": true
334 },
335 {
336 "id": "openai/gpt-5-mini",
337 "name": "OpenAI: GPT-5 Mini",
338 "cost_per_1m_in": 0.25,
339 "cost_per_1m_out": 2,
340 "cost_per_1m_in_cached": 0,
341 "cost_per_1m_out_cached": 0.024999999999999998,
342 "context_window": 400000,
343 "default_max_tokens": 64000,
344 "can_reason": true,
345 "has_reasoning_efforts": true,
346 "supports_attachments": true
347 },
348 {
349 "id": "openai/gpt-5-nano",
350 "name": "OpenAI: GPT-5 Nano",
351 "cost_per_1m_in": 0.049999999999999996,
352 "cost_per_1m_out": 0.39999999999999997,
353 "cost_per_1m_in_cached": 0,
354 "cost_per_1m_out_cached": 0.005,
355 "context_window": 400000,
356 "default_max_tokens": 64000,
357 "can_reason": true,
358 "has_reasoning_efforts": true,
359 "supports_attachments": true
360 },
361 {
362 "id": "openai/gpt-oss-120b",
363 "name": "OpenAI: gpt-oss-120b",
364 "cost_per_1m_in": 0.15,
365 "cost_per_1m_out": 0.6,
366 "cost_per_1m_in_cached": 0,
367 "cost_per_1m_out_cached": 0,
368 "context_window": 131072,
369 "default_max_tokens": 16384,
370 "can_reason": true,
371 "has_reasoning_efforts": true,
372 "supports_attachments": false
373 },
374 {
375 "id": "openai/gpt-oss-20b",
376 "name": "OpenAI: gpt-oss-20b",
377 "cost_per_1m_in": 0.049999999999999996,
378 "cost_per_1m_out": 0.19999999999999998,
379 "cost_per_1m_in_cached": 0,
380 "cost_per_1m_out_cached": 0,
381 "context_window": 131072,
382 "default_max_tokens": 16384,
383 "can_reason": true,
384 "has_reasoning_efforts": true,
385 "supports_attachments": false
386 },
387 {
388 "id": "anthropic/claude-opus-4.1",
389 "name": "Anthropic: Claude Opus 4.1",
390 "cost_per_1m_in": 15,
391 "cost_per_1m_out": 75,
392 "cost_per_1m_in_cached": 18.75,
393 "cost_per_1m_out_cached": 1.5,
394 "context_window": 200000,
395 "default_max_tokens": 16000,
396 "can_reason": true,
397 "has_reasoning_efforts": true,
398 "supports_attachments": true
399 },
400 {
401 "id": "mistralai/codestral-2508",
402 "name": "Mistral: Codestral 2508",
403 "cost_per_1m_in": 0.3,
404 "cost_per_1m_out": 0.8999999999999999,
405 "cost_per_1m_in_cached": 0,
406 "cost_per_1m_out_cached": 0,
407 "context_window": 256000,
408 "default_max_tokens": 25600,
409 "can_reason": false,
410 "has_reasoning_efforts": false,
411 "supports_attachments": false
412 },
413 {
414 "id": "qwen/qwen3-coder-30b-a3b-instruct",
415 "name": "Qwen: Qwen3 Coder 30B A3B Instruct",
416 "cost_per_1m_in": 0.09999999999999999,
417 "cost_per_1m_out": 0.3,
418 "cost_per_1m_in_cached": 0,
419 "cost_per_1m_out_cached": 0,
420 "context_window": 262144,
421 "default_max_tokens": 26214,
422 "can_reason": false,
423 "has_reasoning_efforts": false,
424 "supports_attachments": false
425 },
426 {
427 "id": "qwen/qwen3-30b-a3b-instruct-2507",
428 "name": "Qwen: Qwen3 30B A3B Instruct 2507",
429 "cost_per_1m_in": 0.09999999999999999,
430 "cost_per_1m_out": 0.3,
431 "cost_per_1m_in_cached": 0,
432 "cost_per_1m_out_cached": 0,
433 "context_window": 262144,
434 "default_max_tokens": 26214,
435 "can_reason": false,
436 "has_reasoning_efforts": false,
437 "supports_attachments": false
438 },
439 {
440 "id": "z-ai/glm-4.5",
441 "name": "Z.AI: GLM 4.5",
442 "cost_per_1m_in": 0.6,
443 "cost_per_1m_out": 2.2,
444 "cost_per_1m_in_cached": 0,
445 "cost_per_1m_out_cached": 0,
446 "context_window": 131072,
447 "default_max_tokens": 13107,
448 "can_reason": true,
449 "has_reasoning_efforts": true,
450 "supports_attachments": false
451 },
452 {
453 "id": "z-ai/glm-4.5-air:free",
454 "name": "Z.AI: GLM 4.5 Air (free)",
455 "cost_per_1m_in": 0,
456 "cost_per_1m_out": 0,
457 "cost_per_1m_in_cached": 0,
458 "cost_per_1m_out_cached": 0,
459 "context_window": 131072,
460 "default_max_tokens": 48000,
461 "can_reason": true,
462 "has_reasoning_efforts": true,
463 "supports_attachments": false
464 },
465 {
466 "id": "z-ai/glm-4.5-air",
467 "name": "Z.AI: GLM 4.5 Air",
468 "cost_per_1m_in": 0.19999999999999998,
469 "cost_per_1m_out": 1.1,
470 "cost_per_1m_in_cached": 0,
471 "cost_per_1m_out_cached": 0.03,
472 "context_window": 131072,
473 "default_max_tokens": 48000,
474 "can_reason": true,
475 "has_reasoning_efforts": true,
476 "supports_attachments": false
477 },
478 {
479 "id": "qwen/qwen3-235b-a22b-thinking-2507",
480 "name": "Qwen: Qwen3 235B A22B Thinking 2507",
481 "cost_per_1m_in": 0.0974999532,
482 "cost_per_1m_out": 0.38999995632,
483 "cost_per_1m_in_cached": 0,
484 "cost_per_1m_out_cached": 0,
485 "context_window": 262144,
486 "default_max_tokens": 26214,
487 "can_reason": true,
488 "has_reasoning_efforts": true,
489 "supports_attachments": false
490 },
491 {
492 "id": "z-ai/glm-4-32b",
493 "name": "Z.AI: GLM 4 32B ",
494 "cost_per_1m_in": 0.09999999999999999,
495 "cost_per_1m_out": 0.09999999999999999,
496 "cost_per_1m_in_cached": 0,
497 "cost_per_1m_out_cached": 0,
498 "context_window": 128000,
499 "default_max_tokens": 12800,
500 "can_reason": false,
501 "has_reasoning_efforts": false,
502 "supports_attachments": false
503 },
504 {
505 "id": "qwen/qwen3-coder:free",
506 "name": "Qwen: Qwen3 Coder 480B A35B (free)",
507 "cost_per_1m_in": 0,
508 "cost_per_1m_out": 0,
509 "cost_per_1m_in_cached": 0,
510 "cost_per_1m_out_cached": 0,
511 "context_window": 262144,
512 "default_max_tokens": 26214,
513 "can_reason": false,
514 "has_reasoning_efforts": false,
515 "supports_attachments": false
516 },
517 {
518 "id": "qwen/qwen3-coder",
519 "name": "Qwen: Qwen3 Coder 480B A35B",
520 "cost_per_1m_in": 0.39999999999999997,
521 "cost_per_1m_out": 1.7999999999999998,
522 "cost_per_1m_in_cached": 0,
523 "cost_per_1m_out_cached": 0,
524 "context_window": 262144,
525 "default_max_tokens": 131072,
526 "can_reason": false,
527 "has_reasoning_efforts": false,
528 "supports_attachments": false
529 },
530 {
531 "id": "google/gemini-2.5-flash-lite",
532 "name": "Google: Gemini 2.5 Flash Lite",
533 "cost_per_1m_in": 0.09999999999999999,
534 "cost_per_1m_out": 0.39999999999999997,
535 "cost_per_1m_in_cached": 0.18330000000000002,
536 "cost_per_1m_out_cached": 0.024999999999999998,
537 "context_window": 1048576,
538 "default_max_tokens": 32767,
539 "can_reason": true,
540 "has_reasoning_efforts": true,
541 "supports_attachments": true
542 },
543 {
544 "id": "qwen/qwen3-235b-a22b-2507",
545 "name": "Qwen: Qwen3 235B A22B Instruct 2507",
546 "cost_per_1m_in": 0.22,
547 "cost_per_1m_out": 0.7999999999999999,
548 "cost_per_1m_in_cached": 0,
549 "cost_per_1m_out_cached": 0,
550 "context_window": 262144,
551 "default_max_tokens": 131072,
552 "can_reason": false,
553 "has_reasoning_efforts": false,
554 "supports_attachments": false
555 },
556 {
557 "id": "moonshotai/kimi-k2:free",
558 "name": "MoonshotAI: Kimi K2 0711 (free)",
559 "cost_per_1m_in": 0,
560 "cost_per_1m_out": 0,
561 "cost_per_1m_in_cached": 0,
562 "cost_per_1m_out_cached": 0,
563 "context_window": 32768,
564 "default_max_tokens": 3276,
565 "can_reason": false,
566 "has_reasoning_efforts": false,
567 "supports_attachments": false
568 },
569 {
570 "id": "moonshotai/kimi-k2",
571 "name": "MoonshotAI: Kimi K2 0711",
572 "cost_per_1m_in": 0.5,
573 "cost_per_1m_out": 2.4,
574 "cost_per_1m_in_cached": 0,
575 "cost_per_1m_out_cached": 0,
576 "context_window": 131072,
577 "default_max_tokens": 13107,
578 "can_reason": false,
579 "has_reasoning_efforts": false,
580 "supports_attachments": false
581 },
582 {
583 "id": "mistralai/devstral-medium",
584 "name": "Mistral: Devstral Medium",
585 "cost_per_1m_in": 0.39999999999999997,
586 "cost_per_1m_out": 2,
587 "cost_per_1m_in_cached": 0,
588 "cost_per_1m_out_cached": 0,
589 "context_window": 131072,
590 "default_max_tokens": 13107,
591 "can_reason": false,
592 "has_reasoning_efforts": false,
593 "supports_attachments": false
594 },
595 {
596 "id": "x-ai/grok-4",
597 "name": "xAI: Grok 4",
598 "cost_per_1m_in": 3,
599 "cost_per_1m_out": 15,
600 "cost_per_1m_in_cached": 0,
601 "cost_per_1m_out_cached": 0.75,
602 "context_window": 256000,
603 "default_max_tokens": 25600,
604 "can_reason": true,
605 "has_reasoning_efforts": true,
606 "supports_attachments": true
607 },
608 {
609 "id": "inception/mercury",
610 "name": "Inception: Mercury",
611 "cost_per_1m_in": 0.25,
612 "cost_per_1m_out": 1,
613 "cost_per_1m_in_cached": 0,
614 "cost_per_1m_out_cached": 0,
615 "context_window": 128000,
616 "default_max_tokens": 8192,
617 "can_reason": false,
618 "has_reasoning_efforts": false,
619 "supports_attachments": false
620 },
621 {
622 "id": "mistralai/mistral-small-3.2-24b-instruct:free",
623 "name": "Mistral: Mistral Small 3.2 24B (free)",
624 "cost_per_1m_in": 0,
625 "cost_per_1m_out": 0,
626 "cost_per_1m_in_cached": 0,
627 "cost_per_1m_out_cached": 0,
628 "context_window": 131072,
629 "default_max_tokens": 13107,
630 "can_reason": false,
631 "has_reasoning_efforts": false,
632 "supports_attachments": true
633 },
634 {
635 "id": "mistralai/mistral-small-3.2-24b-instruct",
636 "name": "Mistral: Mistral Small 3.2 24B",
637 "cost_per_1m_in": 0.09999999999999999,
638 "cost_per_1m_out": 0.3,
639 "cost_per_1m_in_cached": 0,
640 "cost_per_1m_out_cached": 0,
641 "context_window": 131072,
642 "default_max_tokens": 13107,
643 "can_reason": false,
644 "has_reasoning_efforts": false,
645 "supports_attachments": true
646 },
647 {
648 "id": "minimax/minimax-m1",
649 "name": "MiniMax: MiniMax M1",
650 "cost_per_1m_in": 0.55,
651 "cost_per_1m_out": 2.2,
652 "cost_per_1m_in_cached": 0,
653 "cost_per_1m_out_cached": 0,
654 "context_window": 1000000,
655 "default_max_tokens": 20000,
656 "can_reason": true,
657 "has_reasoning_efforts": true,
658 "supports_attachments": false
659 },
660 {
661 "id": "google/gemini-2.5-flash-lite-preview-06-17",
662 "name": "Google: Gemini 2.5 Flash Lite Preview 06-17",
663 "cost_per_1m_in": 0.09999999999999999,
664 "cost_per_1m_out": 0.39999999999999997,
665 "cost_per_1m_in_cached": 0.18330000000000002,
666 "cost_per_1m_out_cached": 0.024999999999999998,
667 "context_window": 1048576,
668 "default_max_tokens": 32767,
669 "can_reason": true,
670 "has_reasoning_efforts": true,
671 "supports_attachments": true
672 },
673 {
674 "id": "google/gemini-2.5-flash",
675 "name": "Google: Gemini 2.5 Flash",
676 "cost_per_1m_in": 0.3,
677 "cost_per_1m_out": 2.5,
678 "cost_per_1m_in_cached": 0.3833,
679 "cost_per_1m_out_cached": 0.075,
680 "context_window": 1048576,
681 "default_max_tokens": 32767,
682 "can_reason": true,
683 "has_reasoning_efforts": true,
684 "supports_attachments": true
685 },
686 {
687 "id": "google/gemini-2.5-pro",
688 "name": "Google: Gemini 2.5 Pro",
689 "cost_per_1m_in": 1.25,
690 "cost_per_1m_out": 10,
691 "cost_per_1m_in_cached": 1.625,
692 "cost_per_1m_out_cached": 0.31,
693 "context_window": 1048576,
694 "default_max_tokens": 32768,
695 "can_reason": true,
696 "has_reasoning_efforts": true,
697 "supports_attachments": true
698 },
699 {
700 "id": "openai/o3-pro",
701 "name": "OpenAI: o3 Pro",
702 "cost_per_1m_in": 20,
703 "cost_per_1m_out": 80,
704 "cost_per_1m_in_cached": 0,
705 "cost_per_1m_out_cached": 0,
706 "context_window": 200000,
707 "default_max_tokens": 50000,
708 "can_reason": true,
709 "has_reasoning_efforts": true,
710 "supports_attachments": true
711 },
712 {
713 "id": "x-ai/grok-3-mini",
714 "name": "xAI: Grok 3 Mini",
715 "cost_per_1m_in": 0.6,
716 "cost_per_1m_out": 4,
717 "cost_per_1m_in_cached": 0,
718 "cost_per_1m_out_cached": 0.15,
719 "context_window": 131072,
720 "default_max_tokens": 13107,
721 "can_reason": true,
722 "has_reasoning_efforts": true,
723 "supports_attachments": false
724 },
725 {
726 "id": "x-ai/grok-3",
727 "name": "xAI: Grok 3",
728 "cost_per_1m_in": 5,
729 "cost_per_1m_out": 25,
730 "cost_per_1m_in_cached": 0,
731 "cost_per_1m_out_cached": 1.25,
732 "context_window": 131072,
733 "default_max_tokens": 13107,
734 "can_reason": false,
735 "has_reasoning_efforts": false,
736 "supports_attachments": false
737 },
738 {
739 "id": "mistralai/magistral-small-2506",
740 "name": "Mistral: Magistral Small 2506",
741 "cost_per_1m_in": 0.5,
742 "cost_per_1m_out": 1.5,
743 "cost_per_1m_in_cached": 0,
744 "cost_per_1m_out_cached": 0,
745 "context_window": 40960,
746 "default_max_tokens": 20000,
747 "can_reason": true,
748 "has_reasoning_efforts": true,
749 "supports_attachments": false
750 },
751 {
752 "id": "mistralai/magistral-medium-2506",
753 "name": "Mistral: Magistral Medium 2506",
754 "cost_per_1m_in": 2,
755 "cost_per_1m_out": 5,
756 "cost_per_1m_in_cached": 0,
757 "cost_per_1m_out_cached": 0,
758 "context_window": 40960,
759 "default_max_tokens": 20000,
760 "can_reason": true,
761 "has_reasoning_efforts": true,
762 "supports_attachments": false
763 },
764 {
765 "id": "mistralai/magistral-medium-2506:thinking",
766 "name": "Mistral: Magistral Medium 2506 (thinking)",
767 "cost_per_1m_in": 2,
768 "cost_per_1m_out": 5,
769 "cost_per_1m_in_cached": 0,
770 "cost_per_1m_out_cached": 0,
771 "context_window": 40960,
772 "default_max_tokens": 20000,
773 "can_reason": true,
774 "has_reasoning_efforts": true,
775 "supports_attachments": false
776 },
777 {
778 "id": "google/gemini-2.5-pro-preview",
779 "name": "Google: Gemini 2.5 Pro Preview 06-05",
780 "cost_per_1m_in": 1.25,
781 "cost_per_1m_out": 10,
782 "cost_per_1m_in_cached": 1.625,
783 "cost_per_1m_out_cached": 0.31,
784 "context_window": 1048576,
785 "default_max_tokens": 32768,
786 "can_reason": true,
787 "has_reasoning_efforts": true,
788 "supports_attachments": true
789 },
790 {
791 "id": "deepseek/deepseek-r1-0528",
792 "name": "DeepSeek: R1 0528",
793 "cost_per_1m_in": 2.5500000000000003,
794 "cost_per_1m_out": 5.95,
795 "cost_per_1m_in_cached": 0,
796 "cost_per_1m_out_cached": 0,
797 "context_window": 163840,
798 "default_max_tokens": 65536,
799 "can_reason": true,
800 "has_reasoning_efforts": true,
801 "supports_attachments": false
802 },
803 {
804 "id": "anthropic/claude-opus-4",
805 "name": "Anthropic: Claude Opus 4",
806 "cost_per_1m_in": 15,
807 "cost_per_1m_out": 75,
808 "cost_per_1m_in_cached": 18.75,
809 "cost_per_1m_out_cached": 1.5,
810 "context_window": 200000,
811 "default_max_tokens": 16000,
812 "can_reason": true,
813 "has_reasoning_efforts": true,
814 "supports_attachments": true
815 },
816 {
817 "id": "anthropic/claude-sonnet-4",
818 "name": "Anthropic: Claude Sonnet 4",
819 "cost_per_1m_in": 3,
820 "cost_per_1m_out": 15,
821 "cost_per_1m_in_cached": 3.75,
822 "cost_per_1m_out_cached": 0.3,
823 "context_window": 1000000,
824 "default_max_tokens": 32000,
825 "can_reason": true,
826 "has_reasoning_efforts": true,
827 "supports_attachments": true
828 },
829 {
830 "id": "mistralai/devstral-small-2505:free",
831 "name": "Mistral: Devstral Small 2505 (free)",
832 "cost_per_1m_in": 0,
833 "cost_per_1m_out": 0,
834 "cost_per_1m_in_cached": 0,
835 "cost_per_1m_out_cached": 0,
836 "context_window": 32768,
837 "default_max_tokens": 3276,
838 "can_reason": false,
839 "has_reasoning_efforts": false,
840 "supports_attachments": false
841 },
842 {
843 "id": "mistralai/devstral-small-2505",
844 "name": "Mistral: Devstral Small 2505",
845 "cost_per_1m_in": 0.035869548,
846 "cost_per_1m_out": 0.14347824480000002,
847 "cost_per_1m_in_cached": 0,
848 "cost_per_1m_out_cached": 0,
849 "context_window": 131072,
850 "default_max_tokens": 13107,
851 "can_reason": false,
852 "has_reasoning_efforts": false,
853 "supports_attachments": false
854 },
855 {
856 "id": "openai/codex-mini",
857 "name": "OpenAI: Codex Mini",
858 "cost_per_1m_in": 1.5,
859 "cost_per_1m_out": 6,
860 "cost_per_1m_in_cached": 0,
861 "cost_per_1m_out_cached": 0.375,
862 "context_window": 200000,
863 "default_max_tokens": 50000,
864 "can_reason": true,
865 "has_reasoning_efforts": true,
866 "supports_attachments": true
867 },
868 {
869 "id": "meta-llama/llama-3.3-8b-instruct:free",
870 "name": "Meta: Llama 3.3 8B Instruct (free)",
871 "cost_per_1m_in": 0,
872 "cost_per_1m_out": 0,
873 "cost_per_1m_in_cached": 0,
874 "cost_per_1m_out_cached": 0,
875 "context_window": 128000,
876 "default_max_tokens": 2014,
877 "can_reason": false,
878 "has_reasoning_efforts": false,
879 "supports_attachments": false
880 },
881 {
882 "id": "mistralai/mistral-medium-3",
883 "name": "Mistral: Mistral Medium 3",
884 "cost_per_1m_in": 0.39999999999999997,
885 "cost_per_1m_out": 2,
886 "cost_per_1m_in_cached": 0,
887 "cost_per_1m_out_cached": 0,
888 "context_window": 131072,
889 "default_max_tokens": 13107,
890 "can_reason": false,
891 "has_reasoning_efforts": false,
892 "supports_attachments": true
893 },
894 {
895 "id": "google/gemini-2.5-pro-preview-05-06",
896 "name": "Google: Gemini 2.5 Pro Preview 05-06",
897 "cost_per_1m_in": 1.25,
898 "cost_per_1m_out": 10,
899 "cost_per_1m_in_cached": 1.625,
900 "cost_per_1m_out_cached": 0.31,
901 "context_window": 1048576,
902 "default_max_tokens": 32768,
903 "can_reason": true,
904 "has_reasoning_efforts": true,
905 "supports_attachments": true
906 },
907 {
908 "id": "arcee-ai/virtuoso-large",
909 "name": "Arcee AI: Virtuoso Large",
910 "cost_per_1m_in": 0.75,
911 "cost_per_1m_out": 1.2,
912 "cost_per_1m_in_cached": 0,
913 "cost_per_1m_out_cached": 0,
914 "context_window": 131072,
915 "default_max_tokens": 32000,
916 "can_reason": false,
917 "has_reasoning_efforts": false,
918 "supports_attachments": false
919 },
920 {
921 "id": "inception/mercury-coder",
922 "name": "Inception: Mercury Coder",
923 "cost_per_1m_in": 0.25,
924 "cost_per_1m_out": 1,
925 "cost_per_1m_in_cached": 0,
926 "cost_per_1m_out_cached": 0,
927 "context_window": 128000,
928 "default_max_tokens": 8192,
929 "can_reason": false,
930 "has_reasoning_efforts": false,
931 "supports_attachments": false
932 },
933 {
934 "id": "qwen/qwen3-4b:free",
935 "name": "Qwen: Qwen3 4B (free)",
936 "cost_per_1m_in": 0,
937 "cost_per_1m_out": 0,
938 "cost_per_1m_in_cached": 0,
939 "cost_per_1m_out_cached": 0,
940 "context_window": 40960,
941 "default_max_tokens": 4096,
942 "can_reason": true,
943 "has_reasoning_efforts": true,
944 "supports_attachments": false
945 },
946 {
947 "id": "qwen/qwen3-30b-a3b",
948 "name": "Qwen: Qwen3 30B A3B",
949 "cost_per_1m_in": 0.09,
950 "cost_per_1m_out": 0.44999999999999996,
951 "cost_per_1m_in_cached": 0,
952 "cost_per_1m_out_cached": 0,
953 "context_window": 131072,
954 "default_max_tokens": 65536,
955 "can_reason": true,
956 "has_reasoning_efforts": true,
957 "supports_attachments": false
958 },
959 {
960 "id": "qwen/qwen3-14b",
961 "name": "Qwen: Qwen3 14B",
962 "cost_per_1m_in": 0.06,
963 "cost_per_1m_out": 0.24,
964 "cost_per_1m_in_cached": 0,
965 "cost_per_1m_out_cached": 0,
966 "context_window": 40960,
967 "default_max_tokens": 20480,
968 "can_reason": true,
969 "has_reasoning_efforts": true,
970 "supports_attachments": false
971 },
972 {
973 "id": "qwen/qwen3-32b",
974 "name": "Qwen: Qwen3 32B",
975 "cost_per_1m_in": 0.14,
976 "cost_per_1m_out": 0.5700000000000001,
977 "cost_per_1m_in_cached": 0,
978 "cost_per_1m_out_cached": 0,
979 "context_window": 131072,
980 "default_max_tokens": 65536,
981 "can_reason": true,
982 "has_reasoning_efforts": true,
983 "supports_attachments": false
984 },
985 {
986 "id": "qwen/qwen3-235b-a22b:free",
987 "name": "Qwen: Qwen3 235B A22B (free)",
988 "cost_per_1m_in": 0,
989 "cost_per_1m_out": 0,
990 "cost_per_1m_in_cached": 0,
991 "cost_per_1m_out_cached": 0,
992 "context_window": 131072,
993 "default_max_tokens": 13107,
994 "can_reason": false,
995 "has_reasoning_efforts": false,
996 "supports_attachments": false
997 },
998 {
999 "id": "qwen/qwen3-235b-a22b",
1000 "name": "Qwen: Qwen3 235B A22B",
1001 "cost_per_1m_in": 0.22,
1002 "cost_per_1m_out": 0.88,
1003 "cost_per_1m_in_cached": 0,
1004 "cost_per_1m_out_cached": 0,
1005 "context_window": 131072,
1006 "default_max_tokens": 13107,
1007 "can_reason": true,
1008 "has_reasoning_efforts": true,
1009 "supports_attachments": false
1010 },
1011 {
1012 "id": "openai/o4-mini-high",
1013 "name": "OpenAI: o4 Mini High",
1014 "cost_per_1m_in": 1.1,
1015 "cost_per_1m_out": 4.4,
1016 "cost_per_1m_in_cached": 0,
1017 "cost_per_1m_out_cached": 0.275,
1018 "context_window": 200000,
1019 "default_max_tokens": 50000,
1020 "can_reason": true,
1021 "has_reasoning_efforts": true,
1022 "supports_attachments": true
1023 },
1024 {
1025 "id": "openai/o3",
1026 "name": "OpenAI: o3",
1027 "cost_per_1m_in": 2,
1028 "cost_per_1m_out": 8,
1029 "cost_per_1m_in_cached": 0,
1030 "cost_per_1m_out_cached": 0.5,
1031 "context_window": 200000,
1032 "default_max_tokens": 50000,
1033 "can_reason": true,
1034 "has_reasoning_efforts": true,
1035 "supports_attachments": true
1036 },
1037 {
1038 "id": "openai/o4-mini",
1039 "name": "OpenAI: o4 Mini",
1040 "cost_per_1m_in": 1.1,
1041 "cost_per_1m_out": 4.4,
1042 "cost_per_1m_in_cached": 0,
1043 "cost_per_1m_out_cached": 0.275,
1044 "context_window": 200000,
1045 "default_max_tokens": 50000,
1046 "can_reason": true,
1047 "has_reasoning_efforts": true,
1048 "supports_attachments": true
1049 },
1050 {
1051 "id": "openai/gpt-4.1",
1052 "name": "OpenAI: GPT-4.1",
1053 "cost_per_1m_in": 2,
1054 "cost_per_1m_out": 8,
1055 "cost_per_1m_in_cached": 0,
1056 "cost_per_1m_out_cached": 0.5,
1057 "context_window": 1047576,
1058 "default_max_tokens": 16384,
1059 "can_reason": false,
1060 "has_reasoning_efforts": false,
1061 "supports_attachments": true
1062 },
1063 {
1064 "id": "openai/gpt-4.1-mini",
1065 "name": "OpenAI: GPT-4.1 Mini",
1066 "cost_per_1m_in": 0.39999999999999997,
1067 "cost_per_1m_out": 1.5999999999999999,
1068 "cost_per_1m_in_cached": 0,
1069 "cost_per_1m_out_cached": 0.09999999999999999,
1070 "context_window": 1047576,
1071 "default_max_tokens": 16384,
1072 "can_reason": false,
1073 "has_reasoning_efforts": false,
1074 "supports_attachments": true
1075 },
1076 {
1077 "id": "openai/gpt-4.1-nano",
1078 "name": "OpenAI: GPT-4.1 Nano",
1079 "cost_per_1m_in": 0.09999999999999999,
1080 "cost_per_1m_out": 0.39999999999999997,
1081 "cost_per_1m_in_cached": 0,
1082 "cost_per_1m_out_cached": 0.024999999999999998,
1083 "context_window": 1047576,
1084 "default_max_tokens": 16384,
1085 "can_reason": false,
1086 "has_reasoning_efforts": false,
1087 "supports_attachments": true
1088 },
1089 {
1090 "id": "x-ai/grok-3-mini-beta",
1091 "name": "xAI: Grok 3 Mini Beta",
1092 "cost_per_1m_in": 0.6,
1093 "cost_per_1m_out": 4,
1094 "cost_per_1m_in_cached": 0,
1095 "cost_per_1m_out_cached": 0.15,
1096 "context_window": 131072,
1097 "default_max_tokens": 13107,
1098 "can_reason": true,
1099 "has_reasoning_efforts": true,
1100 "supports_attachments": false
1101 },
1102 {
1103 "id": "x-ai/grok-3-beta",
1104 "name": "xAI: Grok 3 Beta",
1105 "cost_per_1m_in": 5,
1106 "cost_per_1m_out": 25,
1107 "cost_per_1m_in_cached": 0,
1108 "cost_per_1m_out_cached": 1.25,
1109 "context_window": 131072,
1110 "default_max_tokens": 13107,
1111 "can_reason": false,
1112 "has_reasoning_efforts": false,
1113 "supports_attachments": false
1114 },
1115 {
1116 "id": "meta-llama/llama-4-maverick:free",
1117 "name": "Meta: Llama 4 Maverick (free)",
1118 "cost_per_1m_in": 0,
1119 "cost_per_1m_out": 0,
1120 "cost_per_1m_in_cached": 0,
1121 "cost_per_1m_out_cached": 0,
1122 "context_window": 128000,
1123 "default_max_tokens": 2014,
1124 "can_reason": false,
1125 "has_reasoning_efforts": false,
1126 "supports_attachments": true
1127 },
1128 {
1129 "id": "meta-llama/llama-4-maverick",
1130 "name": "Meta: Llama 4 Maverick",
1131 "cost_per_1m_in": 0.18,
1132 "cost_per_1m_out": 0.6,
1133 "cost_per_1m_in_cached": 0,
1134 "cost_per_1m_out_cached": 0,
1135 "context_window": 1048576,
1136 "default_max_tokens": 524288,
1137 "can_reason": false,
1138 "has_reasoning_efforts": false,
1139 "supports_attachments": true
1140 },
1141 {
1142 "id": "meta-llama/llama-4-scout:free",
1143 "name": "Meta: Llama 4 Scout (free)",
1144 "cost_per_1m_in": 0,
1145 "cost_per_1m_out": 0,
1146 "cost_per_1m_in_cached": 0,
1147 "cost_per_1m_out_cached": 0,
1148 "context_window": 128000,
1149 "default_max_tokens": 2014,
1150 "can_reason": false,
1151 "has_reasoning_efforts": false,
1152 "supports_attachments": true
1153 },
1154 {
1155 "id": "meta-llama/llama-4-scout",
1156 "name": "Meta: Llama 4 Scout",
1157 "cost_per_1m_in": 0.15,
1158 "cost_per_1m_out": 0.6,
1159 "cost_per_1m_in_cached": 0,
1160 "cost_per_1m_out_cached": 0,
1161 "context_window": 1048576,
1162 "default_max_tokens": 104857,
1163 "can_reason": false,
1164 "has_reasoning_efforts": false,
1165 "supports_attachments": true
1166 },
1167 {
1168 "id": "deepseek/deepseek-chat-v3-0324:free",
1169 "name": "DeepSeek: DeepSeek V3 0324 (free)",
1170 "cost_per_1m_in": 0,
1171 "cost_per_1m_out": 0,
1172 "cost_per_1m_in_cached": 0,
1173 "cost_per_1m_out_cached": 0,
1174 "context_window": 163840,
1175 "default_max_tokens": 16384,
1176 "can_reason": false,
1177 "has_reasoning_efforts": false,
1178 "supports_attachments": false
1179 },
1180 {
1181 "id": "deepseek/deepseek-chat-v3-0324",
1182 "name": "DeepSeek: DeepSeek V3 0324",
1183 "cost_per_1m_in": 0.28,
1184 "cost_per_1m_out": 1.1400000000000001,
1185 "cost_per_1m_in_cached": 0,
1186 "cost_per_1m_out_cached": 0,
1187 "context_window": 163840,
1188 "default_max_tokens": 81920,
1189 "can_reason": false,
1190 "has_reasoning_efforts": false,
1191 "supports_attachments": false
1192 },
1193 {
1194 "id": "mistralai/mistral-small-3.1-24b-instruct:free",
1195 "name": "Mistral: Mistral Small 3.1 24B (free)",
1196 "cost_per_1m_in": 0,
1197 "cost_per_1m_out": 0,
1198 "cost_per_1m_in_cached": 0,
1199 "cost_per_1m_out_cached": 0,
1200 "context_window": 128000,
1201 "default_max_tokens": 12800,
1202 "can_reason": false,
1203 "has_reasoning_efforts": false,
1204 "supports_attachments": true
1205 },
1206 {
1207 "id": "mistralai/mistral-small-3.1-24b-instruct",
1208 "name": "Mistral: Mistral Small 3.1 24B",
1209 "cost_per_1m_in": 0.09999999999999999,
1210 "cost_per_1m_out": 0.3,
1211 "cost_per_1m_in_cached": 0,
1212 "cost_per_1m_out_cached": 0,
1213 "context_window": 131072,
1214 "default_max_tokens": 13107,
1215 "can_reason": false,
1216 "has_reasoning_efforts": false,
1217 "supports_attachments": true
1218 },
1219 {
1220 "id": "qwen/qwq-32b",
1221 "name": "Qwen: QwQ 32B",
1222 "cost_per_1m_in": 0.15,
1223 "cost_per_1m_out": 0.58,
1224 "cost_per_1m_in_cached": 0,
1225 "cost_per_1m_out_cached": 0,
1226 "context_window": 131072,
1227 "default_max_tokens": 65536,
1228 "can_reason": true,
1229 "has_reasoning_efforts": true,
1230 "supports_attachments": false
1231 },
1232 {
1233 "id": "google/gemini-2.0-flash-lite-001",
1234 "name": "Google: Gemini 2.0 Flash Lite",
1235 "cost_per_1m_in": 0.075,
1236 "cost_per_1m_out": 0.3,
1237 "cost_per_1m_in_cached": 0,
1238 "cost_per_1m_out_cached": 0,
1239 "context_window": 1048576,
1240 "default_max_tokens": 4096,
1241 "can_reason": false,
1242 "has_reasoning_efforts": false,
1243 "supports_attachments": true
1244 },
1245 {
1246 "id": "anthropic/claude-3.7-sonnet",
1247 "name": "Anthropic: Claude 3.7 Sonnet",
1248 "cost_per_1m_in": 3,
1249 "cost_per_1m_out": 15,
1250 "cost_per_1m_in_cached": 3.75,
1251 "cost_per_1m_out_cached": 0.3,
1252 "context_window": 200000,
1253 "default_max_tokens": 64000,
1254 "can_reason": true,
1255 "has_reasoning_efforts": true,
1256 "supports_attachments": true
1257 },
1258 {
1259 "id": "anthropic/claude-3.7-sonnet:thinking",
1260 "name": "Anthropic: Claude 3.7 Sonnet (thinking)",
1261 "cost_per_1m_in": 3,
1262 "cost_per_1m_out": 15,
1263 "cost_per_1m_in_cached": 3.75,
1264 "cost_per_1m_out_cached": 0.3,
1265 "context_window": 200000,
1266 "default_max_tokens": 32000,
1267 "can_reason": true,
1268 "has_reasoning_efforts": true,
1269 "supports_attachments": true
1270 },
1271 {
1272 "id": "mistralai/mistral-saba",
1273 "name": "Mistral: Saba",
1274 "cost_per_1m_in": 0.19999999999999998,
1275 "cost_per_1m_out": 0.6,
1276 "cost_per_1m_in_cached": 0,
1277 "cost_per_1m_out_cached": 0,
1278 "context_window": 32768,
1279 "default_max_tokens": 3276,
1280 "can_reason": false,
1281 "has_reasoning_efforts": false,
1282 "supports_attachments": false
1283 },
1284 {
1285 "id": "openai/o3-mini-high",
1286 "name": "OpenAI: o3 Mini High",
1287 "cost_per_1m_in": 1.1,
1288 "cost_per_1m_out": 4.4,
1289 "cost_per_1m_in_cached": 0,
1290 "cost_per_1m_out_cached": 0.55,
1291 "context_window": 200000,
1292 "default_max_tokens": 50000,
1293 "can_reason": false,
1294 "has_reasoning_efforts": false,
1295 "supports_attachments": false
1296 },
1297 {
1298 "id": "google/gemini-2.0-flash-001",
1299 "name": "Google: Gemini 2.0 Flash",
1300 "cost_per_1m_in": 0.09999999999999999,
1301 "cost_per_1m_out": 0.39999999999999997,
1302 "cost_per_1m_in_cached": 0.18330000000000002,
1303 "cost_per_1m_out_cached": 0.024999999999999998,
1304 "context_window": 1048576,
1305 "default_max_tokens": 4096,
1306 "can_reason": false,
1307 "has_reasoning_efforts": false,
1308 "supports_attachments": true
1309 },
1310 {
1311 "id": "qwen/qwen-turbo",
1312 "name": "Qwen: Qwen-Turbo",
1313 "cost_per_1m_in": 0.049999999999999996,
1314 "cost_per_1m_out": 0.19999999999999998,
1315 "cost_per_1m_in_cached": 0,
1316 "cost_per_1m_out_cached": 0.02,
1317 "context_window": 1000000,
1318 "default_max_tokens": 4096,
1319 "can_reason": false,
1320 "has_reasoning_efforts": false,
1321 "supports_attachments": false
1322 },
1323 {
1324 "id": "qwen/qwen-plus",
1325 "name": "Qwen: Qwen-Plus",
1326 "cost_per_1m_in": 0.39999999999999997,
1327 "cost_per_1m_out": 1.2,
1328 "cost_per_1m_in_cached": 0,
1329 "cost_per_1m_out_cached": 0.16,
1330 "context_window": 131072,
1331 "default_max_tokens": 4096,
1332 "can_reason": false,
1333 "has_reasoning_efforts": false,
1334 "supports_attachments": false
1335 },
1336 {
1337 "id": "qwen/qwen-max",
1338 "name": "Qwen: Qwen-Max ",
1339 "cost_per_1m_in": 1.5999999999999999,
1340 "cost_per_1m_out": 6.3999999999999995,
1341 "cost_per_1m_in_cached": 0,
1342 "cost_per_1m_out_cached": 0.64,
1343 "context_window": 32768,
1344 "default_max_tokens": 4096,
1345 "can_reason": false,
1346 "has_reasoning_efforts": false,
1347 "supports_attachments": false
1348 },
1349 {
1350 "id": "openai/o3-mini",
1351 "name": "OpenAI: o3 Mini",
1352 "cost_per_1m_in": 1.1,
1353 "cost_per_1m_out": 4.4,
1354 "cost_per_1m_in_cached": 0,
1355 "cost_per_1m_out_cached": 0.55,
1356 "context_window": 200000,
1357 "default_max_tokens": 50000,
1358 "can_reason": false,
1359 "has_reasoning_efforts": false,
1360 "supports_attachments": false
1361 },
1362 {
1363 "id": "mistralai/mistral-small-24b-instruct-2501",
1364 "name": "Mistral: Mistral Small 3",
1365 "cost_per_1m_in": 0.09999999999999999,
1366 "cost_per_1m_out": 0.3,
1367 "cost_per_1m_in_cached": 0,
1368 "cost_per_1m_out_cached": 0,
1369 "context_window": 32768,
1370 "default_max_tokens": 3276,
1371 "can_reason": false,
1372 "has_reasoning_efforts": false,
1373 "supports_attachments": false
1374 },
1375 {
1376 "id": "deepseek/deepseek-r1",
1377 "name": "DeepSeek: R1",
1378 "cost_per_1m_in": 0.7,
1379 "cost_per_1m_out": 2.4,
1380 "cost_per_1m_in_cached": 0,
1381 "cost_per_1m_out_cached": 0,
1382 "context_window": 163840,
1383 "default_max_tokens": 81920,
1384 "can_reason": true,
1385 "has_reasoning_efforts": true,
1386 "supports_attachments": false
1387 },
1388 {
1389 "id": "mistralai/codestral-2501",
1390 "name": "Mistral: Codestral 2501",
1391 "cost_per_1m_in": 0.3,
1392 "cost_per_1m_out": 0.8999999999999999,
1393 "cost_per_1m_in_cached": 0,
1394 "cost_per_1m_out_cached": 0,
1395 "context_window": 262144,
1396 "default_max_tokens": 26214,
1397 "can_reason": false,
1398 "has_reasoning_efforts": false,
1399 "supports_attachments": false
1400 },
1401 {
1402 "id": "deepseek/deepseek-chat",
1403 "name": "DeepSeek: DeepSeek V3",
1404 "cost_per_1m_in": 0.8999999999999999,
1405 "cost_per_1m_out": 0.8999999999999999,
1406 "cost_per_1m_in_cached": 0,
1407 "cost_per_1m_out_cached": 0,
1408 "context_window": 131072,
1409 "default_max_tokens": 13107,
1410 "can_reason": false,
1411 "has_reasoning_efforts": false,
1412 "supports_attachments": false
1413 },
1414 {
1415 "id": "openai/o1",
1416 "name": "OpenAI: o1",
1417 "cost_per_1m_in": 15,
1418 "cost_per_1m_out": 60,
1419 "cost_per_1m_in_cached": 0,
1420 "cost_per_1m_out_cached": 7.5,
1421 "context_window": 200000,
1422 "default_max_tokens": 50000,
1423 "can_reason": false,
1424 "has_reasoning_efforts": false,
1425 "supports_attachments": true
1426 },
1427 {
1428 "id": "x-ai/grok-2-1212",
1429 "name": "xAI: Grok 2 1212",
1430 "cost_per_1m_in": 2,
1431 "cost_per_1m_out": 10,
1432 "cost_per_1m_in_cached": 0,
1433 "cost_per_1m_out_cached": 0,
1434 "context_window": 131072,
1435 "default_max_tokens": 13107,
1436 "can_reason": false,
1437 "has_reasoning_efforts": false,
1438 "supports_attachments": false
1439 },
1440 {
1441 "id": "google/gemini-2.0-flash-exp:free",
1442 "name": "Google: Gemini 2.0 Flash Experimental (free)",
1443 "cost_per_1m_in": 0,
1444 "cost_per_1m_out": 0,
1445 "cost_per_1m_in_cached": 0,
1446 "cost_per_1m_out_cached": 0,
1447 "context_window": 1048576,
1448 "default_max_tokens": 4096,
1449 "can_reason": false,
1450 "has_reasoning_efforts": false,
1451 "supports_attachments": true
1452 },
1453 {
1454 "id": "meta-llama/llama-3.3-70b-instruct:free",
1455 "name": "Meta: Llama 3.3 70B Instruct (free)",
1456 "cost_per_1m_in": 0,
1457 "cost_per_1m_out": 0,
1458 "cost_per_1m_in_cached": 0,
1459 "cost_per_1m_out_cached": 0,
1460 "context_window": 65536,
1461 "default_max_tokens": 6553,
1462 "can_reason": false,
1463 "has_reasoning_efforts": false,
1464 "supports_attachments": false
1465 },
1466 {
1467 "id": "meta-llama/llama-3.3-70b-instruct",
1468 "name": "Meta: Llama 3.3 70B Instruct",
1469 "cost_per_1m_in": 0.039,
1470 "cost_per_1m_out": 0.12,
1471 "cost_per_1m_in_cached": 0,
1472 "cost_per_1m_out_cached": 0,
1473 "context_window": 131072,
1474 "default_max_tokens": 4096,
1475 "can_reason": false,
1476 "has_reasoning_efforts": false,
1477 "supports_attachments": false
1478 },
1479 {
1480 "id": "amazon/nova-lite-v1",
1481 "name": "Amazon: Nova Lite 1.0",
1482 "cost_per_1m_in": 0.06,
1483 "cost_per_1m_out": 0.24,
1484 "cost_per_1m_in_cached": 0,
1485 "cost_per_1m_out_cached": 0,
1486 "context_window": 300000,
1487 "default_max_tokens": 2560,
1488 "can_reason": false,
1489 "has_reasoning_efforts": false,
1490 "supports_attachments": true
1491 },
1492 {
1493 "id": "amazon/nova-micro-v1",
1494 "name": "Amazon: Nova Micro 1.0",
1495 "cost_per_1m_in": 0.035,
1496 "cost_per_1m_out": 0.14,
1497 "cost_per_1m_in_cached": 0,
1498 "cost_per_1m_out_cached": 0,
1499 "context_window": 128000,
1500 "default_max_tokens": 2560,
1501 "can_reason": false,
1502 "has_reasoning_efforts": false,
1503 "supports_attachments": false
1504 },
1505 {
1506 "id": "amazon/nova-pro-v1",
1507 "name": "Amazon: Nova Pro 1.0",
1508 "cost_per_1m_in": 0.7999999999999999,
1509 "cost_per_1m_out": 3.1999999999999997,
1510 "cost_per_1m_in_cached": 0,
1511 "cost_per_1m_out_cached": 0,
1512 "context_window": 300000,
1513 "default_max_tokens": 2560,
1514 "can_reason": false,
1515 "has_reasoning_efforts": false,
1516 "supports_attachments": true
1517 },
1518 {
1519 "id": "openai/gpt-4o-2024-11-20",
1520 "name": "OpenAI: GPT-4o (2024-11-20)",
1521 "cost_per_1m_in": 2.5,
1522 "cost_per_1m_out": 10,
1523 "cost_per_1m_in_cached": 0,
1524 "cost_per_1m_out_cached": 1.25,
1525 "context_window": 128000,
1526 "default_max_tokens": 8192,
1527 "can_reason": false,
1528 "has_reasoning_efforts": false,
1529 "supports_attachments": true
1530 },
1531 {
1532 "id": "mistralai/mistral-large-2411",
1533 "name": "Mistral Large 2411",
1534 "cost_per_1m_in": 2,
1535 "cost_per_1m_out": 6,
1536 "cost_per_1m_in_cached": 0,
1537 "cost_per_1m_out_cached": 0,
1538 "context_window": 131072,
1539 "default_max_tokens": 13107,
1540 "can_reason": false,
1541 "has_reasoning_efforts": false,
1542 "supports_attachments": false
1543 },
1544 {
1545 "id": "mistralai/mistral-large-2407",
1546 "name": "Mistral Large 2407",
1547 "cost_per_1m_in": 2,
1548 "cost_per_1m_out": 6,
1549 "cost_per_1m_in_cached": 0,
1550 "cost_per_1m_out_cached": 0,
1551 "context_window": 131072,
1552 "default_max_tokens": 13107,
1553 "can_reason": false,
1554 "has_reasoning_efforts": false,
1555 "supports_attachments": false
1556 },
1557 {
1558 "id": "mistralai/pixtral-large-2411",
1559 "name": "Mistral: Pixtral Large 2411",
1560 "cost_per_1m_in": 2,
1561 "cost_per_1m_out": 6,
1562 "cost_per_1m_in_cached": 0,
1563 "cost_per_1m_out_cached": 0,
1564 "context_window": 131072,
1565 "default_max_tokens": 13107,
1566 "can_reason": false,
1567 "has_reasoning_efforts": false,
1568 "supports_attachments": true
1569 },
1570 {
1571 "id": "thedrummer/unslopnemo-12b",
1572 "name": "TheDrummer: UnslopNemo 12B",
1573 "cost_per_1m_in": 0.39999999999999997,
1574 "cost_per_1m_out": 0.39999999999999997,
1575 "cost_per_1m_in_cached": 0,
1576 "cost_per_1m_out_cached": 0,
1577 "context_window": 32768,
1578 "default_max_tokens": 3276,
1579 "can_reason": false,
1580 "has_reasoning_efforts": false,
1581 "supports_attachments": false
1582 },
1583 {
1584 "id": "anthropic/claude-3.5-haiku",
1585 "name": "Anthropic: Claude 3.5 Haiku",
1586 "cost_per_1m_in": 0.7999999999999999,
1587 "cost_per_1m_out": 4,
1588 "cost_per_1m_in_cached": 1,
1589 "cost_per_1m_out_cached": 0.08,
1590 "context_window": 200000,
1591 "default_max_tokens": 4096,
1592 "can_reason": false,
1593 "has_reasoning_efforts": false,
1594 "supports_attachments": true
1595 },
1596 {
1597 "id": "anthropic/claude-3.5-haiku-20241022",
1598 "name": "Anthropic: Claude 3.5 Haiku (2024-10-22)",
1599 "cost_per_1m_in": 0.7999999999999999,
1600 "cost_per_1m_out": 4,
1601 "cost_per_1m_in_cached": 1,
1602 "cost_per_1m_out_cached": 0.08,
1603 "context_window": 200000,
1604 "default_max_tokens": 4096,
1605 "can_reason": false,
1606 "has_reasoning_efforts": false,
1607 "supports_attachments": true
1608 },
1609 {
1610 "id": "anthropic/claude-3.5-sonnet",
1611 "name": "Anthropic: Claude 3.5 Sonnet",
1612 "cost_per_1m_in": 3,
1613 "cost_per_1m_out": 15,
1614 "cost_per_1m_in_cached": 3.75,
1615 "cost_per_1m_out_cached": 0.3,
1616 "context_window": 200000,
1617 "default_max_tokens": 4096,
1618 "can_reason": false,
1619 "has_reasoning_efforts": false,
1620 "supports_attachments": true
1621 },
1622 {
1623 "id": "mistralai/ministral-8b",
1624 "name": "Mistral: Ministral 8B",
1625 "cost_per_1m_in": 0.09999999999999999,
1626 "cost_per_1m_out": 0.09999999999999999,
1627 "cost_per_1m_in_cached": 0,
1628 "cost_per_1m_out_cached": 0,
1629 "context_window": 128000,
1630 "default_max_tokens": 12800,
1631 "can_reason": false,
1632 "has_reasoning_efforts": false,
1633 "supports_attachments": false
1634 },
1635 {
1636 "id": "nvidia/llama-3.1-nemotron-70b-instruct",
1637 "name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct",
1638 "cost_per_1m_in": 0.12,
1639 "cost_per_1m_out": 0.3,
1640 "cost_per_1m_in_cached": 0,
1641 "cost_per_1m_out_cached": 0,
1642 "context_window": 131072,
1643 "default_max_tokens": 8192,
1644 "can_reason": false,
1645 "has_reasoning_efforts": false,
1646 "supports_attachments": false
1647 },
1648 {
1649 "id": "google/gemini-flash-1.5-8b",
1650 "name": "Google: Gemini 1.5 Flash 8B",
1651 "cost_per_1m_in": 0.0375,
1652 "cost_per_1m_out": 0.15,
1653 "cost_per_1m_in_cached": 0.0583,
1654 "cost_per_1m_out_cached": 0.01,
1655 "context_window": 1000000,
1656 "default_max_tokens": 4096,
1657 "can_reason": false,
1658 "has_reasoning_efforts": false,
1659 "supports_attachments": true
1660 },
1661 {
1662 "id": "thedrummer/rocinante-12b",
1663 "name": "TheDrummer: Rocinante 12B",
1664 "cost_per_1m_in": 0.16999999999999998,
1665 "cost_per_1m_out": 0.43,
1666 "cost_per_1m_in_cached": 0,
1667 "cost_per_1m_out_cached": 0,
1668 "context_window": 32768,
1669 "default_max_tokens": 3276,
1670 "can_reason": false,
1671 "has_reasoning_efforts": false,
1672 "supports_attachments": false
1673 },
1674 {
1675 "id": "meta-llama/llama-3.2-3b-instruct",
1676 "name": "Meta: Llama 3.2 3B Instruct",
1677 "cost_per_1m_in": 0.03,
1678 "cost_per_1m_out": 0.049999999999999996,
1679 "cost_per_1m_in_cached": 0,
1680 "cost_per_1m_out_cached": 0,
1681 "context_window": 32768,
1682 "default_max_tokens": 16000,
1683 "can_reason": false,
1684 "has_reasoning_efforts": false,
1685 "supports_attachments": false
1686 },
1687 {
1688 "id": "qwen/qwen-2.5-72b-instruct",
1689 "name": "Qwen2.5 72B Instruct",
1690 "cost_per_1m_in": 0.12,
1691 "cost_per_1m_out": 0.39,
1692 "cost_per_1m_in_cached": 0,
1693 "cost_per_1m_out_cached": 0,
1694 "context_window": 32768,
1695 "default_max_tokens": 8192,
1696 "can_reason": false,
1697 "has_reasoning_efforts": false,
1698 "supports_attachments": false
1699 },
1700 {
1701 "id": "cohere/command-r-plus-08-2024",
1702 "name": "Cohere: Command R+ (08-2024)",
1703 "cost_per_1m_in": 2.5,
1704 "cost_per_1m_out": 10,
1705 "cost_per_1m_in_cached": 0,
1706 "cost_per_1m_out_cached": 0,
1707 "context_window": 128000,
1708 "default_max_tokens": 2000,
1709 "can_reason": false,
1710 "has_reasoning_efforts": false,
1711 "supports_attachments": false
1712 },
1713 {
1714 "id": "cohere/command-r-08-2024",
1715 "name": "Cohere: Command R (08-2024)",
1716 "cost_per_1m_in": 0.15,
1717 "cost_per_1m_out": 0.6,
1718 "cost_per_1m_in_cached": 0,
1719 "cost_per_1m_out_cached": 0,
1720 "context_window": 128000,
1721 "default_max_tokens": 2000,
1722 "can_reason": false,
1723 "has_reasoning_efforts": false,
1724 "supports_attachments": false
1725 },
1726 {
1727 "id": "microsoft/phi-3.5-mini-128k-instruct",
1728 "name": "Microsoft: Phi-3.5 Mini 128K Instruct",
1729 "cost_per_1m_in": 0.09999999999999999,
1730 "cost_per_1m_out": 0.09999999999999999,
1731 "cost_per_1m_in_cached": 0,
1732 "cost_per_1m_out_cached": 0,
1733 "context_window": 128000,
1734 "default_max_tokens": 12800,
1735 "can_reason": false,
1736 "has_reasoning_efforts": false,
1737 "supports_attachments": false
1738 },
1739 {
1740 "id": "openai/gpt-4o-2024-08-06",
1741 "name": "OpenAI: GPT-4o (2024-08-06)",
1742 "cost_per_1m_in": 2.5,
1743 "cost_per_1m_out": 10,
1744 "cost_per_1m_in_cached": 0,
1745 "cost_per_1m_out_cached": 1.25,
1746 "context_window": 128000,
1747 "default_max_tokens": 8192,
1748 "can_reason": false,
1749 "has_reasoning_efforts": false,
1750 "supports_attachments": true
1751 },
1752 {
1753 "id": "meta-llama/llama-3.1-8b-instruct",
1754 "name": "Meta: Llama 3.1 8B Instruct",
1755 "cost_per_1m_in": 0.09999999999999999,
1756 "cost_per_1m_out": 0.09999999999999999,
1757 "cost_per_1m_in_cached": 0,
1758 "cost_per_1m_out_cached": 0,
1759 "context_window": 131072,
1760 "default_max_tokens": 4000,
1761 "can_reason": false,
1762 "has_reasoning_efforts": false,
1763 "supports_attachments": false
1764 },
1765 {
1766 "id": "meta-llama/llama-3.1-405b-instruct",
1767 "name": "Meta: Llama 3.1 405B Instruct",
1768 "cost_per_1m_in": 3,
1769 "cost_per_1m_out": 3,
1770 "cost_per_1m_in_cached": 0,
1771 "cost_per_1m_out_cached": 0,
1772 "context_window": 131072,
1773 "default_max_tokens": 13107,
1774 "can_reason": false,
1775 "has_reasoning_efforts": false,
1776 "supports_attachments": false
1777 },
1778 {
1779 "id": "meta-llama/llama-3.1-70b-instruct",
1780 "name": "Meta: Llama 3.1 70B Instruct",
1781 "cost_per_1m_in": 0.88,
1782 "cost_per_1m_out": 0.88,
1783 "cost_per_1m_in_cached": 0,
1784 "cost_per_1m_out_cached": 0,
1785 "context_window": 131072,
1786 "default_max_tokens": 13107,
1787 "can_reason": false,
1788 "has_reasoning_efforts": false,
1789 "supports_attachments": false
1790 },
1791 {
1792 "id": "mistralai/mistral-nemo",
1793 "name": "Mistral: Mistral Nemo",
1794 "cost_per_1m_in": 0.15,
1795 "cost_per_1m_out": 0.15,
1796 "cost_per_1m_in_cached": 0,
1797 "cost_per_1m_out_cached": 0,
1798 "context_window": 131072,
1799 "default_max_tokens": 13107,
1800 "can_reason": false,
1801 "has_reasoning_efforts": false,
1802 "supports_attachments": false
1803 },
1804 {
1805 "id": "openai/gpt-4o-mini-2024-07-18",
1806 "name": "OpenAI: GPT-4o-mini (2024-07-18)",
1807 "cost_per_1m_in": 0.15,
1808 "cost_per_1m_out": 0.6,
1809 "cost_per_1m_in_cached": 0,
1810 "cost_per_1m_out_cached": 0.075,
1811 "context_window": 128000,
1812 "default_max_tokens": 8192,
1813 "can_reason": false,
1814 "has_reasoning_efforts": false,
1815 "supports_attachments": true
1816 },
1817 {
1818 "id": "openai/gpt-4o-mini",
1819 "name": "OpenAI: GPT-4o-mini",
1820 "cost_per_1m_in": 0.15,
1821 "cost_per_1m_out": 0.6,
1822 "cost_per_1m_in_cached": 0,
1823 "cost_per_1m_out_cached": 0.075,
1824 "context_window": 128000,
1825 "default_max_tokens": 8192,
1826 "can_reason": false,
1827 "has_reasoning_efforts": false,
1828 "supports_attachments": true
1829 },
1830 {
1831 "id": "anthropic/claude-3.5-sonnet-20240620",
1832 "name": "Anthropic: Claude 3.5 Sonnet (2024-06-20)",
1833 "cost_per_1m_in": 3,
1834 "cost_per_1m_out": 15,
1835 "cost_per_1m_in_cached": 3.75,
1836 "cost_per_1m_out_cached": 0.3,
1837 "context_window": 200000,
1838 "default_max_tokens": 4096,
1839 "can_reason": false,
1840 "has_reasoning_efforts": false,
1841 "supports_attachments": true
1842 },
1843 {
1844 "id": "mistralai/mistral-7b-instruct-v0.3",
1845 "name": "Mistral: Mistral 7B Instruct v0.3",
1846 "cost_per_1m_in": 0.028,
1847 "cost_per_1m_out": 0.054,
1848 "cost_per_1m_in_cached": 0,
1849 "cost_per_1m_out_cached": 0,
1850 "context_window": 32768,
1851 "default_max_tokens": 8192,
1852 "can_reason": false,
1853 "has_reasoning_efforts": false,
1854 "supports_attachments": false
1855 },
1856 {
1857 "id": "mistralai/mistral-7b-instruct:free",
1858 "name": "Mistral: Mistral 7B Instruct (free)",
1859 "cost_per_1m_in": 0,
1860 "cost_per_1m_out": 0,
1861 "cost_per_1m_in_cached": 0,
1862 "cost_per_1m_out_cached": 0,
1863 "context_window": 32768,
1864 "default_max_tokens": 8192,
1865 "can_reason": false,
1866 "has_reasoning_efforts": false,
1867 "supports_attachments": false
1868 },
1869 {
1870 "id": "mistralai/mistral-7b-instruct",
1871 "name": "Mistral: Mistral 7B Instruct",
1872 "cost_per_1m_in": 0.028,
1873 "cost_per_1m_out": 0.054,
1874 "cost_per_1m_in_cached": 0,
1875 "cost_per_1m_out_cached": 0,
1876 "context_window": 32768,
1877 "default_max_tokens": 8192,
1878 "can_reason": false,
1879 "has_reasoning_efforts": false,
1880 "supports_attachments": false
1881 },
1882 {
1883 "id": "microsoft/phi-3-mini-128k-instruct",
1884 "name": "Microsoft: Phi-3 Mini 128K Instruct",
1885 "cost_per_1m_in": 0.09999999999999999,
1886 "cost_per_1m_out": 0.09999999999999999,
1887 "cost_per_1m_in_cached": 0,
1888 "cost_per_1m_out_cached": 0,
1889 "context_window": 128000,
1890 "default_max_tokens": 12800,
1891 "can_reason": false,
1892 "has_reasoning_efforts": false,
1893 "supports_attachments": false
1894 },
1895 {
1896 "id": "microsoft/phi-3-medium-128k-instruct",
1897 "name": "Microsoft: Phi-3 Medium 128K Instruct",
1898 "cost_per_1m_in": 1,
1899 "cost_per_1m_out": 1,
1900 "cost_per_1m_in_cached": 0,
1901 "cost_per_1m_out_cached": 0,
1902 "context_window": 128000,
1903 "default_max_tokens": 12800,
1904 "can_reason": false,
1905 "has_reasoning_efforts": false,
1906 "supports_attachments": false
1907 },
1908 {
1909 "id": "google/gemini-flash-1.5",
1910 "name": "Google: Gemini 1.5 Flash ",
1911 "cost_per_1m_in": 0.075,
1912 "cost_per_1m_out": 0.3,
1913 "cost_per_1m_in_cached": 0.1583,
1914 "cost_per_1m_out_cached": 0.01875,
1915 "context_window": 1000000,
1916 "default_max_tokens": 4096,
1917 "can_reason": false,
1918 "has_reasoning_efforts": false,
1919 "supports_attachments": true
1920 },
1921 {
1922 "id": "openai/gpt-4o-2024-05-13",
1923 "name": "OpenAI: GPT-4o (2024-05-13)",
1924 "cost_per_1m_in": 5,
1925 "cost_per_1m_out": 15,
1926 "cost_per_1m_in_cached": 0,
1927 "cost_per_1m_out_cached": 0,
1928 "context_window": 128000,
1929 "default_max_tokens": 2048,
1930 "can_reason": false,
1931 "has_reasoning_efforts": false,
1932 "supports_attachments": true
1933 },
1934 {
1935 "id": "openai/gpt-4o",
1936 "name": "OpenAI: GPT-4o",
1937 "cost_per_1m_in": 2.5,
1938 "cost_per_1m_out": 10,
1939 "cost_per_1m_in_cached": 0,
1940 "cost_per_1m_out_cached": 0,
1941 "context_window": 128000,
1942 "default_max_tokens": 8192,
1943 "can_reason": false,
1944 "has_reasoning_efforts": false,
1945 "supports_attachments": true
1946 },
1947 {
1948 "id": "openai/gpt-4o:extended",
1949 "name": "OpenAI: GPT-4o (extended)",
1950 "cost_per_1m_in": 6,
1951 "cost_per_1m_out": 18,
1952 "cost_per_1m_in_cached": 0,
1953 "cost_per_1m_out_cached": 0,
1954 "context_window": 128000,
1955 "default_max_tokens": 32000,
1956 "can_reason": false,
1957 "has_reasoning_efforts": false,
1958 "supports_attachments": true
1959 },
1960 {
1961 "id": "meta-llama/llama-3-70b-instruct",
1962 "name": "Meta: Llama 3 70B Instruct",
1963 "cost_per_1m_in": 0.3,
1964 "cost_per_1m_out": 0.39999999999999997,
1965 "cost_per_1m_in_cached": 0,
1966 "cost_per_1m_out_cached": 0,
1967 "context_window": 8192,
1968 "default_max_tokens": 8192,
1969 "can_reason": false,
1970 "has_reasoning_efforts": false,
1971 "supports_attachments": false
1972 },
1973 {
1974 "id": "meta-llama/llama-3-8b-instruct",
1975 "name": "Meta: Llama 3 8B Instruct",
1976 "cost_per_1m_in": 0.03,
1977 "cost_per_1m_out": 0.06,
1978 "cost_per_1m_in_cached": 0,
1979 "cost_per_1m_out_cached": 0,
1980 "context_window": 8192,
1981 "default_max_tokens": 8192,
1982 "can_reason": false,
1983 "has_reasoning_efforts": false,
1984 "supports_attachments": false
1985 },
1986 {
1987 "id": "mistralai/mixtral-8x22b-instruct",
1988 "name": "Mistral: Mixtral 8x22B Instruct",
1989 "cost_per_1m_in": 2,
1990 "cost_per_1m_out": 6,
1991 "cost_per_1m_in_cached": 0,
1992 "cost_per_1m_out_cached": 0,
1993 "context_window": 65536,
1994 "default_max_tokens": 6553,
1995 "can_reason": false,
1996 "has_reasoning_efforts": false,
1997 "supports_attachments": false
1998 },
1999 {
2000 "id": "google/gemini-pro-1.5",
2001 "name": "Google: Gemini 1.5 Pro",
2002 "cost_per_1m_in": 1.25,
2003 "cost_per_1m_out": 5,
2004 "cost_per_1m_in_cached": 0,
2005 "cost_per_1m_out_cached": 0,
2006 "context_window": 2000000,
2007 "default_max_tokens": 4096,
2008 "can_reason": false,
2009 "has_reasoning_efforts": false,
2010 "supports_attachments": true
2011 },
2012 {
2013 "id": "openai/gpt-4-turbo",
2014 "name": "OpenAI: GPT-4 Turbo",
2015 "cost_per_1m_in": 10,
2016 "cost_per_1m_out": 30,
2017 "cost_per_1m_in_cached": 0,
2018 "cost_per_1m_out_cached": 0,
2019 "context_window": 128000,
2020 "default_max_tokens": 2048,
2021 "can_reason": false,
2022 "has_reasoning_efforts": false,
2023 "supports_attachments": true
2024 },
2025 {
2026 "id": "cohere/command-r-plus",
2027 "name": "Cohere: Command R+",
2028 "cost_per_1m_in": 3,
2029 "cost_per_1m_out": 15,
2030 "cost_per_1m_in_cached": 0,
2031 "cost_per_1m_out_cached": 0,
2032 "context_window": 128000,
2033 "default_max_tokens": 2000,
2034 "can_reason": false,
2035 "has_reasoning_efforts": false,
2036 "supports_attachments": false
2037 },
2038 {
2039 "id": "cohere/command-r-plus-04-2024",
2040 "name": "Cohere: Command R+ (04-2024)",
2041 "cost_per_1m_in": 3,
2042 "cost_per_1m_out": 15,
2043 "cost_per_1m_in_cached": 0,
2044 "cost_per_1m_out_cached": 0,
2045 "context_window": 128000,
2046 "default_max_tokens": 2000,
2047 "can_reason": false,
2048 "has_reasoning_efforts": false,
2049 "supports_attachments": false
2050 },
2051 {
2052 "id": "cohere/command-r",
2053 "name": "Cohere: Command R",
2054 "cost_per_1m_in": 0.5,
2055 "cost_per_1m_out": 1.5,
2056 "cost_per_1m_in_cached": 0,
2057 "cost_per_1m_out_cached": 0,
2058 "context_window": 128000,
2059 "default_max_tokens": 2000,
2060 "can_reason": false,
2061 "has_reasoning_efforts": false,
2062 "supports_attachments": false
2063 },
2064 {
2065 "id": "anthropic/claude-3-haiku",
2066 "name": "Anthropic: Claude 3 Haiku",
2067 "cost_per_1m_in": 0.25,
2068 "cost_per_1m_out": 1.25,
2069 "cost_per_1m_in_cached": 0.3,
2070 "cost_per_1m_out_cached": 0.03,
2071 "context_window": 200000,
2072 "default_max_tokens": 2048,
2073 "can_reason": false,
2074 "has_reasoning_efforts": false,
2075 "supports_attachments": true
2076 },
2077 {
2078 "id": "anthropic/claude-3-opus",
2079 "name": "Anthropic: Claude 3 Opus",
2080 "cost_per_1m_in": 15,
2081 "cost_per_1m_out": 75,
2082 "cost_per_1m_in_cached": 18.75,
2083 "cost_per_1m_out_cached": 1.5,
2084 "context_window": 200000,
2085 "default_max_tokens": 2048,
2086 "can_reason": false,
2087 "has_reasoning_efforts": false,
2088 "supports_attachments": true
2089 },
2090 {
2091 "id": "cohere/command-r-03-2024",
2092 "name": "Cohere: Command R (03-2024)",
2093 "cost_per_1m_in": 0.5,
2094 "cost_per_1m_out": 1.5,
2095 "cost_per_1m_in_cached": 0,
2096 "cost_per_1m_out_cached": 0,
2097 "context_window": 128000,
2098 "default_max_tokens": 2000,
2099 "can_reason": false,
2100 "has_reasoning_efforts": false,
2101 "supports_attachments": false
2102 },
2103 {
2104 "id": "mistralai/mistral-large",
2105 "name": "Mistral Large",
2106 "cost_per_1m_in": 2,
2107 "cost_per_1m_out": 6,
2108 "cost_per_1m_in_cached": 0,
2109 "cost_per_1m_out_cached": 0,
2110 "context_window": 128000,
2111 "default_max_tokens": 12800,
2112 "can_reason": false,
2113 "has_reasoning_efforts": false,
2114 "supports_attachments": false
2115 },
2116 {
2117 "id": "openai/gpt-4-turbo-preview",
2118 "name": "OpenAI: GPT-4 Turbo Preview",
2119 "cost_per_1m_in": 10,
2120 "cost_per_1m_out": 30,
2121 "cost_per_1m_in_cached": 0,
2122 "cost_per_1m_out_cached": 0,
2123 "context_window": 128000,
2124 "default_max_tokens": 2048,
2125 "can_reason": false,
2126 "has_reasoning_efforts": false,
2127 "supports_attachments": false
2128 },
2129 {
2130 "id": "openai/gpt-3.5-turbo-0613",
2131 "name": "OpenAI: GPT-3.5 Turbo (older v0613)",
2132 "cost_per_1m_in": 1,
2133 "cost_per_1m_out": 2,
2134 "cost_per_1m_in_cached": 0,
2135 "cost_per_1m_out_cached": 0,
2136 "context_window": 4095,
2137 "default_max_tokens": 2048,
2138 "can_reason": false,
2139 "has_reasoning_efforts": false,
2140 "supports_attachments": false
2141 },
2142 {
2143 "id": "mistralai/mistral-tiny",
2144 "name": "Mistral Tiny",
2145 "cost_per_1m_in": 0.25,
2146 "cost_per_1m_out": 0.25,
2147 "cost_per_1m_in_cached": 0,
2148 "cost_per_1m_out_cached": 0,
2149 "context_window": 32768,
2150 "default_max_tokens": 3276,
2151 "can_reason": false,
2152 "has_reasoning_efforts": false,
2153 "supports_attachments": false
2154 },
2155 {
2156 "id": "mistralai/mistral-small",
2157 "name": "Mistral Small",
2158 "cost_per_1m_in": 0.19999999999999998,
2159 "cost_per_1m_out": 0.6,
2160 "cost_per_1m_in_cached": 0,
2161 "cost_per_1m_out_cached": 0,
2162 "context_window": 32768,
2163 "default_max_tokens": 3276,
2164 "can_reason": false,
2165 "has_reasoning_efforts": false,
2166 "supports_attachments": false
2167 },
2168 {
2169 "id": "mistralai/mixtral-8x7b-instruct",
2170 "name": "Mistral: Mixtral 8x7B Instruct",
2171 "cost_per_1m_in": 0.08,
2172 "cost_per_1m_out": 0.24,
2173 "cost_per_1m_in_cached": 0,
2174 "cost_per_1m_out_cached": 0,
2175 "context_window": 32768,
2176 "default_max_tokens": 8192,
2177 "can_reason": false,
2178 "has_reasoning_efforts": false,
2179 "supports_attachments": false
2180 },
2181 {
2182 "id": "openai/gpt-4-1106-preview",
2183 "name": "OpenAI: GPT-4 Turbo (older v1106)",
2184 "cost_per_1m_in": 10,
2185 "cost_per_1m_out": 30,
2186 "cost_per_1m_in_cached": 0,
2187 "cost_per_1m_out_cached": 0,
2188 "context_window": 128000,
2189 "default_max_tokens": 2048,
2190 "can_reason": false,
2191 "has_reasoning_efforts": false,
2192 "supports_attachments": false
2193 },
2194 {
2195 "id": "openai/gpt-3.5-turbo-16k",
2196 "name": "OpenAI: GPT-3.5 Turbo 16k",
2197 "cost_per_1m_in": 3,
2198 "cost_per_1m_out": 4,
2199 "cost_per_1m_in_cached": 0,
2200 "cost_per_1m_out_cached": 0,
2201 "context_window": 16385,
2202 "default_max_tokens": 2048,
2203 "can_reason": false,
2204 "has_reasoning_efforts": false,
2205 "supports_attachments": false
2206 },
2207 {
2208 "id": "openai/gpt-3.5-turbo",
2209 "name": "OpenAI: GPT-3.5 Turbo",
2210 "cost_per_1m_in": 0.5,
2211 "cost_per_1m_out": 1.5,
2212 "cost_per_1m_in_cached": 0,
2213 "cost_per_1m_out_cached": 0,
2214 "context_window": 16385,
2215 "default_max_tokens": 2048,
2216 "can_reason": false,
2217 "has_reasoning_efforts": false,
2218 "supports_attachments": false
2219 },
2220 {
2221 "id": "openai/gpt-4-0314",
2222 "name": "OpenAI: GPT-4 (older v0314)",
2223 "cost_per_1m_in": 30,
2224 "cost_per_1m_out": 60,
2225 "cost_per_1m_in_cached": 0,
2226 "cost_per_1m_out_cached": 0,
2227 "context_window": 8191,
2228 "default_max_tokens": 2048,
2229 "can_reason": false,
2230 "has_reasoning_efforts": false,
2231 "supports_attachments": false
2232 },
2233 {
2234 "id": "openai/gpt-4",
2235 "name": "OpenAI: GPT-4",
2236 "cost_per_1m_in": 30,
2237 "cost_per_1m_out": 60,
2238 "cost_per_1m_in_cached": 0,
2239 "cost_per_1m_out_cached": 0,
2240 "context_window": 8191,
2241 "default_max_tokens": 2048,
2242 "can_reason": false,
2243 "has_reasoning_efforts": false,
2244 "supports_attachments": false
2245 }
2246 ],
2247 "default_headers": {
2248 "HTTP-Referer": "https://charm.land",
2249 "X-Title": "Crush"
2250 }
2251}