1{
2 "name": "Vercel",
3 "id": "vercel",
4 "api_key": "$VERCEL_API_KEY",
5 "api_endpoint": "https://ai-gateway.vercel.sh/v1",
6 "type": "vercel",
7 "default_large_model_id": "anthropic/claude-sonnet-4",
8 "default_small_model_id": "anthropic/claude-haiku-4.5",
9 "models": [
10 {
11 "id": "anthropic/claude-3-haiku",
12 "name": "Claude 3 Haiku",
13 "cost_per_1m_in": 0.25,
14 "cost_per_1m_out": 1.25,
15 "cost_per_1m_in_cached": 0.03,
16 "cost_per_1m_out_cached": 0.3,
17 "context_window": 200000,
18 "default_max_tokens": 4096,
19 "can_reason": false,
20 "supports_attachments": true,
21 "options": {}
22 },
23 {
24 "id": "anthropic/claude-3.5-haiku",
25 "name": "Claude 3.5 Haiku",
26 "cost_per_1m_in": 0.8,
27 "cost_per_1m_out": 4,
28 "cost_per_1m_in_cached": 0.08,
29 "cost_per_1m_out_cached": 1,
30 "context_window": 200000,
31 "default_max_tokens": 8000,
32 "can_reason": false,
33 "supports_attachments": true,
34 "options": {}
35 },
36 {
37 "id": "anthropic/claude-3.5-sonnet",
38 "name": "Claude 3.5 Sonnet",
39 "cost_per_1m_in": 3,
40 "cost_per_1m_out": 15,
41 "cost_per_1m_in_cached": 0.3,
42 "cost_per_1m_out_cached": 3.75,
43 "context_window": 200000,
44 "default_max_tokens": 8000,
45 "can_reason": false,
46 "supports_attachments": true,
47 "options": {}
48 },
49 {
50 "id": "anthropic/claude-3.5-sonnet-20240620",
51 "name": "Claude 3.5 Sonnet (2024-06-20)",
52 "cost_per_1m_in": 3,
53 "cost_per_1m_out": 15,
54 "cost_per_1m_in_cached": 0,
55 "cost_per_1m_out_cached": 0,
56 "context_window": 200000,
57 "default_max_tokens": 8000,
58 "can_reason": false,
59 "supports_attachments": true,
60 "options": {}
61 },
62 {
63 "id": "anthropic/claude-3.7-sonnet",
64 "name": "Claude 3.7 Sonnet",
65 "cost_per_1m_in": 3,
66 "cost_per_1m_out": 15,
67 "cost_per_1m_in_cached": 0.3,
68 "cost_per_1m_out_cached": 3.75,
69 "context_window": 200000,
70 "default_max_tokens": 8000,
71 "can_reason": true,
72 "reasoning_levels": [
73 "none",
74 "minimal",
75 "low",
76 "medium",
77 "high",
78 "xhigh"
79 ],
80 "default_reasoning_effort": "medium",
81 "supports_attachments": true,
82 "options": {}
83 },
84 {
85 "id": "anthropic/claude-haiku-4.5",
86 "name": "Claude Haiku 4.5",
87 "cost_per_1m_in": 1,
88 "cost_per_1m_out": 5,
89 "cost_per_1m_in_cached": 0.1,
90 "cost_per_1m_out_cached": 1.25,
91 "context_window": 200000,
92 "default_max_tokens": 8000,
93 "can_reason": true,
94 "reasoning_levels": [
95 "none",
96 "minimal",
97 "low",
98 "medium",
99 "high",
100 "xhigh"
101 ],
102 "default_reasoning_effort": "medium",
103 "supports_attachments": true,
104 "options": {}
105 },
106 {
107 "id": "anthropic/claude-opus-4",
108 "name": "Claude Opus 4",
109 "cost_per_1m_in": 15,
110 "cost_per_1m_out": 75,
111 "cost_per_1m_in_cached": 1.5,
112 "cost_per_1m_out_cached": 18.75,
113 "context_window": 200000,
114 "default_max_tokens": 8000,
115 "can_reason": true,
116 "reasoning_levels": [
117 "none",
118 "minimal",
119 "low",
120 "medium",
121 "high",
122 "xhigh"
123 ],
124 "default_reasoning_effort": "medium",
125 "supports_attachments": true,
126 "options": {}
127 },
128 {
129 "id": "anthropic/claude-opus-4.1",
130 "name": "Claude Opus 4.1",
131 "cost_per_1m_in": 15,
132 "cost_per_1m_out": 75,
133 "cost_per_1m_in_cached": 1.5,
134 "cost_per_1m_out_cached": 18.75,
135 "context_window": 200000,
136 "default_max_tokens": 8000,
137 "can_reason": true,
138 "reasoning_levels": [
139 "none",
140 "minimal",
141 "low",
142 "medium",
143 "high",
144 "xhigh"
145 ],
146 "default_reasoning_effort": "medium",
147 "supports_attachments": true,
148 "options": {}
149 },
150 {
151 "id": "anthropic/claude-opus-4.5",
152 "name": "Claude Opus 4.5",
153 "cost_per_1m_in": 5,
154 "cost_per_1m_out": 25,
155 "cost_per_1m_in_cached": 0.5,
156 "cost_per_1m_out_cached": 6.25,
157 "context_window": 200000,
158 "default_max_tokens": 8000,
159 "can_reason": true,
160 "reasoning_levels": [
161 "none",
162 "minimal",
163 "low",
164 "medium",
165 "high",
166 "xhigh"
167 ],
168 "default_reasoning_effort": "medium",
169 "supports_attachments": true,
170 "options": {}
171 },
172 {
173 "id": "anthropic/claude-opus-4.6",
174 "name": "Claude Opus 4.6",
175 "cost_per_1m_in": 5,
176 "cost_per_1m_out": 25,
177 "cost_per_1m_in_cached": 0.5,
178 "cost_per_1m_out_cached": 6.25,
179 "context_window": 1000000,
180 "default_max_tokens": 8000,
181 "can_reason": true,
182 "reasoning_levels": [
183 "none",
184 "minimal",
185 "low",
186 "medium",
187 "high",
188 "xhigh"
189 ],
190 "default_reasoning_effort": "medium",
191 "supports_attachments": true,
192 "options": {}
193 },
194 {
195 "id": "anthropic/claude-sonnet-4",
196 "name": "Claude Sonnet 4",
197 "cost_per_1m_in": 3,
198 "cost_per_1m_out": 15,
199 "cost_per_1m_in_cached": 0.3,
200 "cost_per_1m_out_cached": 3.75,
201 "context_window": 1000000,
202 "default_max_tokens": 8000,
203 "can_reason": true,
204 "reasoning_levels": [
205 "none",
206 "minimal",
207 "low",
208 "medium",
209 "high",
210 "xhigh"
211 ],
212 "default_reasoning_effort": "medium",
213 "supports_attachments": true,
214 "options": {}
215 },
216 {
217 "id": "anthropic/claude-sonnet-4.5",
218 "name": "Claude Sonnet 4.5",
219 "cost_per_1m_in": 3,
220 "cost_per_1m_out": 15,
221 "cost_per_1m_in_cached": 0.3,
222 "cost_per_1m_out_cached": 3.75,
223 "context_window": 1000000,
224 "default_max_tokens": 8000,
225 "can_reason": true,
226 "reasoning_levels": [
227 "none",
228 "minimal",
229 "low",
230 "medium",
231 "high",
232 "xhigh"
233 ],
234 "default_reasoning_effort": "medium",
235 "supports_attachments": true,
236 "options": {}
237 },
238 {
239 "id": "anthropic/claude-sonnet-4.6",
240 "name": "Claude Sonnet 4.6",
241 "cost_per_1m_in": 3,
242 "cost_per_1m_out": 15,
243 "cost_per_1m_in_cached": 0.3,
244 "cost_per_1m_out_cached": 3.75,
245 "context_window": 1000000,
246 "default_max_tokens": 8000,
247 "can_reason": true,
248 "reasoning_levels": [
249 "none",
250 "minimal",
251 "low",
252 "medium",
253 "high",
254 "xhigh"
255 ],
256 "default_reasoning_effort": "medium",
257 "supports_attachments": true,
258 "options": {}
259 },
260 {
261 "id": "cohere/command-a",
262 "name": "Command A",
263 "cost_per_1m_in": 2.5,
264 "cost_per_1m_out": 10,
265 "cost_per_1m_in_cached": 0,
266 "cost_per_1m_out_cached": 0,
267 "context_window": 256000,
268 "default_max_tokens": 8000,
269 "can_reason": false,
270 "supports_attachments": false,
271 "options": {}
272 },
273 {
274 "id": "deepseek/deepseek-v3",
275 "name": "DeepSeek V3 0324",
276 "cost_per_1m_in": 0.77,
277 "cost_per_1m_out": 0.77,
278 "cost_per_1m_in_cached": 0,
279 "cost_per_1m_out_cached": 0,
280 "context_window": 163840,
281 "default_max_tokens": 8000,
282 "can_reason": false,
283 "supports_attachments": false,
284 "options": {}
285 },
286 {
287 "id": "deepseek/deepseek-v3.1-terminus",
288 "name": "DeepSeek V3.1 Terminus",
289 "cost_per_1m_in": 0.27,
290 "cost_per_1m_out": 1,
291 "cost_per_1m_in_cached": 0,
292 "cost_per_1m_out_cached": 0,
293 "context_window": 131072,
294 "default_max_tokens": 8000,
295 "can_reason": true,
296 "reasoning_levels": [
297 "low",
298 "medium",
299 "high"
300 ],
301 "default_reasoning_effort": "medium",
302 "supports_attachments": false,
303 "options": {}
304 },
305 {
306 "id": "deepseek/deepseek-v3.2",
307 "name": "DeepSeek V3.2",
308 "cost_per_1m_in": 0.26,
309 "cost_per_1m_out": 0.38,
310 "cost_per_1m_in_cached": 0.13,
311 "cost_per_1m_out_cached": 0,
312 "context_window": 128000,
313 "default_max_tokens": 8000,
314 "can_reason": false,
315 "supports_attachments": false,
316 "options": {}
317 },
318 {
319 "id": "deepseek/deepseek-v3.2-thinking",
320 "name": "DeepSeek V3.2 Thinking",
321 "cost_per_1m_in": 0.28,
322 "cost_per_1m_out": 0.42,
323 "cost_per_1m_in_cached": 0.028,
324 "cost_per_1m_out_cached": 0,
325 "context_window": 128000,
326 "default_max_tokens": 8000,
327 "can_reason": true,
328 "reasoning_levels": [
329 "low",
330 "medium",
331 "high"
332 ],
333 "default_reasoning_effort": "medium",
334 "supports_attachments": false,
335 "options": {}
336 },
337 {
338 "id": "deepseek/deepseek-v3.1",
339 "name": "DeepSeek-V3.1",
340 "cost_per_1m_in": 0.21,
341 "cost_per_1m_out": 0.79,
342 "cost_per_1m_in_cached": 0,
343 "cost_per_1m_out_cached": 0,
344 "context_window": 163840,
345 "default_max_tokens": 8000,
346 "can_reason": true,
347 "reasoning_levels": [
348 "low",
349 "medium",
350 "high"
351 ],
352 "default_reasoning_effort": "medium",
353 "supports_attachments": false,
354 "options": {}
355 },
356 {
357 "id": "mistral/devstral-2",
358 "name": "Devstral 2",
359 "cost_per_1m_in": 0,
360 "cost_per_1m_out": 0,
361 "cost_per_1m_in_cached": 0,
362 "cost_per_1m_out_cached": 0,
363 "context_window": 256000,
364 "default_max_tokens": 8000,
365 "can_reason": false,
366 "supports_attachments": false,
367 "options": {}
368 },
369 {
370 "id": "mistral/devstral-small",
371 "name": "Devstral Small 1.1",
372 "cost_per_1m_in": 0.1,
373 "cost_per_1m_out": 0.3,
374 "cost_per_1m_in_cached": 0,
375 "cost_per_1m_out_cached": 0,
376 "context_window": 128000,
377 "default_max_tokens": 8000,
378 "can_reason": false,
379 "supports_attachments": false,
380 "options": {}
381 },
382 {
383 "id": "mistral/devstral-small-2",
384 "name": "Devstral Small 2",
385 "cost_per_1m_in": 0,
386 "cost_per_1m_out": 0,
387 "cost_per_1m_in_cached": 0,
388 "cost_per_1m_out_cached": 0,
389 "context_window": 256000,
390 "default_max_tokens": 8000,
391 "can_reason": false,
392 "supports_attachments": false,
393 "options": {}
394 },
395 {
396 "id": "zai/glm-4.5-air",
397 "name": "GLM 4.5 Air",
398 "cost_per_1m_in": 0.2,
399 "cost_per_1m_out": 1.1,
400 "cost_per_1m_in_cached": 0.03,
401 "cost_per_1m_out_cached": 0,
402 "context_window": 128000,
403 "default_max_tokens": 8000,
404 "can_reason": true,
405 "reasoning_levels": [
406 "low",
407 "medium",
408 "high"
409 ],
410 "default_reasoning_effort": "medium",
411 "supports_attachments": false,
412 "options": {}
413 },
414 {
415 "id": "zai/glm-4.5v",
416 "name": "GLM 4.5V",
417 "cost_per_1m_in": 0.6,
418 "cost_per_1m_out": 1.8,
419 "cost_per_1m_in_cached": 0,
420 "cost_per_1m_out_cached": 0,
421 "context_window": 65536,
422 "default_max_tokens": 8000,
423 "can_reason": true,
424 "reasoning_levels": [
425 "low",
426 "medium",
427 "high"
428 ],
429 "default_reasoning_effort": "medium",
430 "supports_attachments": true,
431 "options": {}
432 },
433 {
434 "id": "zai/glm-4.6",
435 "name": "GLM 4.6",
436 "cost_per_1m_in": 0.45,
437 "cost_per_1m_out": 1.8,
438 "cost_per_1m_in_cached": 0.11,
439 "cost_per_1m_out_cached": 0,
440 "context_window": 200000,
441 "default_max_tokens": 8000,
442 "can_reason": true,
443 "reasoning_levels": [
444 "low",
445 "medium",
446 "high"
447 ],
448 "default_reasoning_effort": "medium",
449 "supports_attachments": false,
450 "options": {}
451 },
452 {
453 "id": "zai/glm-4.7",
454 "name": "GLM 4.7",
455 "cost_per_1m_in": 0.43,
456 "cost_per_1m_out": 1.75,
457 "cost_per_1m_in_cached": 0.08,
458 "cost_per_1m_out_cached": 0,
459 "context_window": 202752,
460 "default_max_tokens": 8000,
461 "can_reason": true,
462 "reasoning_levels": [
463 "low",
464 "medium",
465 "high"
466 ],
467 "default_reasoning_effort": "medium",
468 "supports_attachments": false,
469 "options": {}
470 },
471 {
472 "id": "zai/glm-4.7-flash",
473 "name": "GLM 4.7 Flash",
474 "cost_per_1m_in": 0.07,
475 "cost_per_1m_out": 0.4,
476 "cost_per_1m_in_cached": 0,
477 "cost_per_1m_out_cached": 0,
478 "context_window": 200000,
479 "default_max_tokens": 8000,
480 "can_reason": true,
481 "reasoning_levels": [
482 "low",
483 "medium",
484 "high"
485 ],
486 "default_reasoning_effort": "medium",
487 "supports_attachments": false,
488 "options": {}
489 },
490 {
491 "id": "zai/glm-4.7-flashx",
492 "name": "GLM 4.7 FlashX",
493 "cost_per_1m_in": 0.06,
494 "cost_per_1m_out": 0.4,
495 "cost_per_1m_in_cached": 0.01,
496 "cost_per_1m_out_cached": 0,
497 "context_window": 200000,
498 "default_max_tokens": 8000,
499 "can_reason": true,
500 "reasoning_levels": [
501 "low",
502 "medium",
503 "high"
504 ],
505 "default_reasoning_effort": "medium",
506 "supports_attachments": false,
507 "options": {}
508 },
509 {
510 "id": "zai/glm-5",
511 "name": "GLM 5",
512 "cost_per_1m_in": 0.8,
513 "cost_per_1m_out": 2.56,
514 "cost_per_1m_in_cached": 0.16,
515 "cost_per_1m_out_cached": 0,
516 "context_window": 202800,
517 "default_max_tokens": 8000,
518 "can_reason": true,
519 "reasoning_levels": [
520 "low",
521 "medium",
522 "high"
523 ],
524 "default_reasoning_effort": "medium",
525 "supports_attachments": false,
526 "options": {}
527 },
528 {
529 "id": "zai/glm-4.5",
530 "name": "GLM-4.5",
531 "cost_per_1m_in": 0.6,
532 "cost_per_1m_out": 2.2,
533 "cost_per_1m_in_cached": 0,
534 "cost_per_1m_out_cached": 0,
535 "context_window": 131072,
536 "default_max_tokens": 8000,
537 "can_reason": true,
538 "reasoning_levels": [
539 "low",
540 "medium",
541 "high"
542 ],
543 "default_reasoning_effort": "medium",
544 "supports_attachments": false,
545 "options": {}
546 },
547 {
548 "id": "zai/glm-4.6v",
549 "name": "GLM-4.6V",
550 "cost_per_1m_in": 0.3,
551 "cost_per_1m_out": 0.9,
552 "cost_per_1m_in_cached": 0.05,
553 "cost_per_1m_out_cached": 0,
554 "context_window": 128000,
555 "default_max_tokens": 8000,
556 "can_reason": true,
557 "reasoning_levels": [
558 "low",
559 "medium",
560 "high"
561 ],
562 "default_reasoning_effort": "medium",
563 "supports_attachments": true,
564 "options": {}
565 },
566 {
567 "id": "zai/glm-4.6v-flash",
568 "name": "GLM-4.6V-Flash",
569 "cost_per_1m_in": 0,
570 "cost_per_1m_out": 0,
571 "cost_per_1m_in_cached": 0,
572 "cost_per_1m_out_cached": 0,
573 "context_window": 128000,
574 "default_max_tokens": 8000,
575 "can_reason": true,
576 "reasoning_levels": [
577 "low",
578 "medium",
579 "high"
580 ],
581 "default_reasoning_effort": "medium",
582 "supports_attachments": true,
583 "options": {}
584 },
585 {
586 "id": "openai/gpt-5-chat",
587 "name": "GPT 5 Chat",
588 "cost_per_1m_in": 1.25,
589 "cost_per_1m_out": 10,
590 "cost_per_1m_in_cached": 0.125,
591 "cost_per_1m_out_cached": 0,
592 "context_window": 128000,
593 "default_max_tokens": 8000,
594 "can_reason": true,
595 "reasoning_levels": [
596 "low",
597 "medium",
598 "high"
599 ],
600 "default_reasoning_effort": "medium",
601 "supports_attachments": true,
602 "options": {}
603 },
604 {
605 "id": "openai/gpt-5.1-codex-max",
606 "name": "GPT 5.1 Codex Max",
607 "cost_per_1m_in": 1.25,
608 "cost_per_1m_out": 10,
609 "cost_per_1m_in_cached": 0.125,
610 "cost_per_1m_out_cached": 0,
611 "context_window": 400000,
612 "default_max_tokens": 8000,
613 "can_reason": true,
614 "reasoning_levels": [
615 "low",
616 "medium",
617 "high"
618 ],
619 "default_reasoning_effort": "medium",
620 "supports_attachments": true,
621 "options": {}
622 },
623 {
624 "id": "openai/gpt-5.1-codex-mini",
625 "name": "GPT 5.1 Codex Mini",
626 "cost_per_1m_in": 0.25,
627 "cost_per_1m_out": 2,
628 "cost_per_1m_in_cached": 0.025,
629 "cost_per_1m_out_cached": 0,
630 "context_window": 400000,
631 "default_max_tokens": 8000,
632 "can_reason": true,
633 "reasoning_levels": [
634 "low",
635 "medium",
636 "high"
637 ],
638 "default_reasoning_effort": "medium",
639 "supports_attachments": true,
640 "options": {}
641 },
642 {
643 "id": "openai/gpt-5.1-thinking",
644 "name": "GPT 5.1 Thinking",
645 "cost_per_1m_in": 1.25,
646 "cost_per_1m_out": 10,
647 "cost_per_1m_in_cached": 0.13,
648 "cost_per_1m_out_cached": 0,
649 "context_window": 400000,
650 "default_max_tokens": 8000,
651 "can_reason": true,
652 "reasoning_levels": [
653 "low",
654 "medium",
655 "high"
656 ],
657 "default_reasoning_effort": "medium",
658 "supports_attachments": true,
659 "options": {}
660 },
661 {
662 "id": "openai/gpt-5.2",
663 "name": "GPT 5.2",
664 "cost_per_1m_in": 1.75,
665 "cost_per_1m_out": 14,
666 "cost_per_1m_in_cached": 0.18,
667 "cost_per_1m_out_cached": 0,
668 "context_window": 400000,
669 "default_max_tokens": 8000,
670 "can_reason": true,
671 "reasoning_levels": [
672 "low",
673 "medium",
674 "high"
675 ],
676 "default_reasoning_effort": "medium",
677 "supports_attachments": true,
678 "options": {}
679 },
680 {
681 "id": "openai/gpt-5.2-pro",
682 "name": "GPT 5.2 ",
683 "cost_per_1m_in": 21,
684 "cost_per_1m_out": 168,
685 "cost_per_1m_in_cached": 0,
686 "cost_per_1m_out_cached": 0,
687 "context_window": 400000,
688 "default_max_tokens": 8000,
689 "can_reason": true,
690 "reasoning_levels": [
691 "low",
692 "medium",
693 "high"
694 ],
695 "default_reasoning_effort": "medium",
696 "supports_attachments": true,
697 "options": {}
698 },
699 {
700 "id": "openai/gpt-5.2-chat",
701 "name": "GPT 5.2 Chat",
702 "cost_per_1m_in": 1.75,
703 "cost_per_1m_out": 14,
704 "cost_per_1m_in_cached": 0.175,
705 "cost_per_1m_out_cached": 0,
706 "context_window": 128000,
707 "default_max_tokens": 8000,
708 "can_reason": true,
709 "reasoning_levels": [
710 "low",
711 "medium",
712 "high"
713 ],
714 "default_reasoning_effort": "medium",
715 "supports_attachments": true,
716 "options": {}
717 },
718 {
719 "id": "openai/gpt-5.2-codex",
720 "name": "GPT 5.2 Codex",
721 "cost_per_1m_in": 1.75,
722 "cost_per_1m_out": 14,
723 "cost_per_1m_in_cached": 0.175,
724 "cost_per_1m_out_cached": 0,
725 "context_window": 400000,
726 "default_max_tokens": 8000,
727 "can_reason": true,
728 "reasoning_levels": [
729 "low",
730 "medium",
731 "high"
732 ],
733 "default_reasoning_effort": "medium",
734 "supports_attachments": true,
735 "options": {}
736 },
737 {
738 "id": "openai/gpt-5.3-codex",
739 "name": "GPT 5.3 Codex",
740 "cost_per_1m_in": 1.75,
741 "cost_per_1m_out": 14,
742 "cost_per_1m_in_cached": 0.175,
743 "cost_per_1m_out_cached": 0,
744 "context_window": 400000,
745 "default_max_tokens": 8000,
746 "can_reason": true,
747 "reasoning_levels": [
748 "low",
749 "medium",
750 "high"
751 ],
752 "default_reasoning_effort": "medium",
753 "supports_attachments": true,
754 "options": {}
755 },
756 {
757 "id": "openai/gpt-5.4",
758 "name": "GPT 5.4",
759 "cost_per_1m_in": 2.5,
760 "cost_per_1m_out": 15,
761 "cost_per_1m_in_cached": 0.25,
762 "cost_per_1m_out_cached": 0,
763 "context_window": 200000,
764 "default_max_tokens": 8000,
765 "can_reason": true,
766 "reasoning_levels": [
767 "low",
768 "medium",
769 "high"
770 ],
771 "default_reasoning_effort": "medium",
772 "supports_attachments": true,
773 "options": {}
774 },
775 {
776 "id": "openai/gpt-5.4-pro",
777 "name": "GPT 5.4 Pro",
778 "cost_per_1m_in": 30,
779 "cost_per_1m_out": 180,
780 "cost_per_1m_in_cached": 0,
781 "cost_per_1m_out_cached": 0,
782 "context_window": 200000,
783 "default_max_tokens": 8000,
784 "can_reason": true,
785 "reasoning_levels": [
786 "low",
787 "medium",
788 "high"
789 ],
790 "default_reasoning_effort": "medium",
791 "supports_attachments": true,
792 "options": {}
793 },
794 {
795 "id": "openai/gpt-4-turbo",
796 "name": "GPT-4 Turbo",
797 "cost_per_1m_in": 10,
798 "cost_per_1m_out": 30,
799 "cost_per_1m_in_cached": 0,
800 "cost_per_1m_out_cached": 0,
801 "context_window": 128000,
802 "default_max_tokens": 4096,
803 "can_reason": false,
804 "supports_attachments": true,
805 "options": {}
806 },
807 {
808 "id": "openai/gpt-4.1",
809 "name": "GPT-4.1",
810 "cost_per_1m_in": 2,
811 "cost_per_1m_out": 8,
812 "cost_per_1m_in_cached": 0.5,
813 "cost_per_1m_out_cached": 0,
814 "context_window": 1047576,
815 "default_max_tokens": 8000,
816 "can_reason": false,
817 "supports_attachments": true,
818 "options": {}
819 },
820 {
821 "id": "openai/gpt-4.1-mini",
822 "name": "GPT-4.1 mini",
823 "cost_per_1m_in": 0.4,
824 "cost_per_1m_out": 1.6,
825 "cost_per_1m_in_cached": 0.1,
826 "cost_per_1m_out_cached": 0,
827 "context_window": 1047576,
828 "default_max_tokens": 8000,
829 "can_reason": false,
830 "supports_attachments": true,
831 "options": {}
832 },
833 {
834 "id": "openai/gpt-4.1-nano",
835 "name": "GPT-4.1 nano",
836 "cost_per_1m_in": 0.1,
837 "cost_per_1m_out": 0.4,
838 "cost_per_1m_in_cached": 0.03,
839 "cost_per_1m_out_cached": 0,
840 "context_window": 1047576,
841 "default_max_tokens": 8000,
842 "can_reason": false,
843 "supports_attachments": true,
844 "options": {}
845 },
846 {
847 "id": "openai/gpt-4o",
848 "name": "GPT-4o",
849 "cost_per_1m_in": 2.5,
850 "cost_per_1m_out": 10,
851 "cost_per_1m_in_cached": 1.25,
852 "cost_per_1m_out_cached": 0,
853 "context_window": 128000,
854 "default_max_tokens": 8000,
855 "can_reason": false,
856 "supports_attachments": true,
857 "options": {}
858 },
859 {
860 "id": "openai/gpt-4o-mini",
861 "name": "GPT-4o mini",
862 "cost_per_1m_in": 0.15,
863 "cost_per_1m_out": 0.6,
864 "cost_per_1m_in_cached": 0.075,
865 "cost_per_1m_out_cached": 0,
866 "context_window": 128000,
867 "default_max_tokens": 8000,
868 "can_reason": false,
869 "supports_attachments": true,
870 "options": {}
871 },
872 {
873 "id": "openai/gpt-5",
874 "name": "GPT-5",
875 "cost_per_1m_in": 1.25,
876 "cost_per_1m_out": 10,
877 "cost_per_1m_in_cached": 0.13,
878 "cost_per_1m_out_cached": 0,
879 "context_window": 400000,
880 "default_max_tokens": 8000,
881 "can_reason": true,
882 "reasoning_levels": [
883 "low",
884 "medium",
885 "high"
886 ],
887 "default_reasoning_effort": "medium",
888 "supports_attachments": true,
889 "options": {}
890 },
891 {
892 "id": "openai/gpt-5-mini",
893 "name": "GPT-5 mini",
894 "cost_per_1m_in": 0.25,
895 "cost_per_1m_out": 2,
896 "cost_per_1m_in_cached": 0.03,
897 "cost_per_1m_out_cached": 0,
898 "context_window": 400000,
899 "default_max_tokens": 8000,
900 "can_reason": true,
901 "reasoning_levels": [
902 "low",
903 "medium",
904 "high"
905 ],
906 "default_reasoning_effort": "medium",
907 "supports_attachments": true,
908 "options": {}
909 },
910 {
911 "id": "openai/gpt-5-nano",
912 "name": "GPT-5 nano",
913 "cost_per_1m_in": 0.05,
914 "cost_per_1m_out": 0.4,
915 "cost_per_1m_in_cached": 0.01,
916 "cost_per_1m_out_cached": 0,
917 "context_window": 400000,
918 "default_max_tokens": 8000,
919 "can_reason": true,
920 "reasoning_levels": [
921 "low",
922 "medium",
923 "high"
924 ],
925 "default_reasoning_effort": "medium",
926 "supports_attachments": true,
927 "options": {}
928 },
929 {
930 "id": "openai/gpt-5-pro",
931 "name": "GPT-5 pro",
932 "cost_per_1m_in": 15,
933 "cost_per_1m_out": 120,
934 "cost_per_1m_in_cached": 0,
935 "cost_per_1m_out_cached": 0,
936 "context_window": 400000,
937 "default_max_tokens": 8000,
938 "can_reason": true,
939 "reasoning_levels": [
940 "low",
941 "medium",
942 "high"
943 ],
944 "default_reasoning_effort": "medium",
945 "supports_attachments": true,
946 "options": {}
947 },
948 {
949 "id": "openai/gpt-5-codex",
950 "name": "GPT-5-Codex",
951 "cost_per_1m_in": 1.25,
952 "cost_per_1m_out": 10,
953 "cost_per_1m_in_cached": 0.13,
954 "cost_per_1m_out_cached": 0,
955 "context_window": 400000,
956 "default_max_tokens": 8000,
957 "can_reason": true,
958 "reasoning_levels": [
959 "low",
960 "medium",
961 "high"
962 ],
963 "default_reasoning_effort": "medium",
964 "supports_attachments": true,
965 "options": {}
966 },
967 {
968 "id": "openai/gpt-5.1-instant",
969 "name": "GPT-5.1 Instant",
970 "cost_per_1m_in": 1.25,
971 "cost_per_1m_out": 10,
972 "cost_per_1m_in_cached": 0.13,
973 "cost_per_1m_out_cached": 0,
974 "context_window": 128000,
975 "default_max_tokens": 8000,
976 "can_reason": true,
977 "reasoning_levels": [
978 "low",
979 "medium",
980 "high"
981 ],
982 "default_reasoning_effort": "medium",
983 "supports_attachments": true,
984 "options": {}
985 },
986 {
987 "id": "openai/gpt-5.1-codex",
988 "name": "GPT-5.1-Codex",
989 "cost_per_1m_in": 1.25,
990 "cost_per_1m_out": 10,
991 "cost_per_1m_in_cached": 0.13,
992 "cost_per_1m_out_cached": 0,
993 "context_window": 400000,
994 "default_max_tokens": 8000,
995 "can_reason": true,
996 "reasoning_levels": [
997 "low",
998 "medium",
999 "high"
1000 ],
1001 "default_reasoning_effort": "medium",
1002 "supports_attachments": true,
1003 "options": {}
1004 },
1005 {
1006 "id": "openai/gpt-5.3-chat",
1007 "name": "GPT-5.3 Chat",
1008 "cost_per_1m_in": 1.75,
1009 "cost_per_1m_out": 14,
1010 "cost_per_1m_in_cached": 0.175,
1011 "cost_per_1m_out_cached": 0,
1012 "context_window": 128000,
1013 "default_max_tokens": 8000,
1014 "can_reason": true,
1015 "reasoning_levels": [
1016 "low",
1017 "medium",
1018 "high"
1019 ],
1020 "default_reasoning_effort": "medium",
1021 "supports_attachments": true,
1022 "options": {}
1023 },
1024 {
1025 "id": "google/gemini-2.5-flash",
1026 "name": "Gemini 2.5 Flash",
1027 "cost_per_1m_in": 0.3,
1028 "cost_per_1m_out": 2.5,
1029 "cost_per_1m_in_cached": 0,
1030 "cost_per_1m_out_cached": 0,
1031 "context_window": 1000000,
1032 "default_max_tokens": 8000,
1033 "can_reason": true,
1034 "reasoning_levels": [
1035 "low",
1036 "medium",
1037 "high"
1038 ],
1039 "default_reasoning_effort": "medium",
1040 "supports_attachments": false,
1041 "options": {}
1042 },
1043 {
1044 "id": "google/gemini-2.5-flash-lite",
1045 "name": "Gemini 2.5 Flash Lite",
1046 "cost_per_1m_in": 0.1,
1047 "cost_per_1m_out": 0.4,
1048 "cost_per_1m_in_cached": 0.01,
1049 "cost_per_1m_out_cached": 0,
1050 "context_window": 1048576,
1051 "default_max_tokens": 8000,
1052 "can_reason": true,
1053 "reasoning_levels": [
1054 "low",
1055 "medium",
1056 "high"
1057 ],
1058 "default_reasoning_effort": "medium",
1059 "supports_attachments": true,
1060 "options": {}
1061 },
1062 {
1063 "id": "google/gemini-2.5-flash-lite-preview-09-2025",
1064 "name": "Gemini 2.5 Flash Lite Preview 09-2025",
1065 "cost_per_1m_in": 0.1,
1066 "cost_per_1m_out": 0.4,
1067 "cost_per_1m_in_cached": 0.01,
1068 "cost_per_1m_out_cached": 0,
1069 "context_window": 1048576,
1070 "default_max_tokens": 8000,
1071 "can_reason": true,
1072 "reasoning_levels": [
1073 "low",
1074 "medium",
1075 "high"
1076 ],
1077 "default_reasoning_effort": "medium",
1078 "supports_attachments": true,
1079 "options": {}
1080 },
1081 {
1082 "id": "google/gemini-2.5-flash-preview-09-2025",
1083 "name": "Gemini 2.5 Flash Preview 09-2025",
1084 "cost_per_1m_in": 0.3,
1085 "cost_per_1m_out": 2.5,
1086 "cost_per_1m_in_cached": 0.03,
1087 "cost_per_1m_out_cached": 0,
1088 "context_window": 1000000,
1089 "default_max_tokens": 8000,
1090 "can_reason": true,
1091 "reasoning_levels": [
1092 "low",
1093 "medium",
1094 "high"
1095 ],
1096 "default_reasoning_effort": "medium",
1097 "supports_attachments": true,
1098 "options": {}
1099 },
1100 {
1101 "id": "google/gemini-2.5-pro",
1102 "name": "Gemini 2.5 Pro",
1103 "cost_per_1m_in": 1.25,
1104 "cost_per_1m_out": 10,
1105 "cost_per_1m_in_cached": 0,
1106 "cost_per_1m_out_cached": 0,
1107 "context_window": 1048576,
1108 "default_max_tokens": 8000,
1109 "can_reason": true,
1110 "reasoning_levels": [
1111 "low",
1112 "medium",
1113 "high"
1114 ],
1115 "default_reasoning_effort": "medium",
1116 "supports_attachments": false,
1117 "options": {}
1118 },
1119 {
1120 "id": "google/gemini-3-flash",
1121 "name": "Gemini 3 Flash",
1122 "cost_per_1m_in": 0.5,
1123 "cost_per_1m_out": 3,
1124 "cost_per_1m_in_cached": 0.05,
1125 "cost_per_1m_out_cached": 0,
1126 "context_window": 1000000,
1127 "default_max_tokens": 8000,
1128 "can_reason": true,
1129 "reasoning_levels": [
1130 "low",
1131 "medium",
1132 "high"
1133 ],
1134 "default_reasoning_effort": "medium",
1135 "supports_attachments": true,
1136 "options": {}
1137 },
1138 {
1139 "id": "google/gemini-3-pro-preview",
1140 "name": "Gemini 3 Pro Preview",
1141 "cost_per_1m_in": 2,
1142 "cost_per_1m_out": 12,
1143 "cost_per_1m_in_cached": 0.2,
1144 "cost_per_1m_out_cached": 0,
1145 "context_window": 1000000,
1146 "default_max_tokens": 8000,
1147 "can_reason": true,
1148 "reasoning_levels": [
1149 "low",
1150 "medium",
1151 "high"
1152 ],
1153 "default_reasoning_effort": "medium",
1154 "supports_attachments": true,
1155 "options": {}
1156 },
1157 {
1158 "id": "google/gemini-3.1-flash-lite-preview",
1159 "name": "Gemini 3.1 Flash Lite Preview",
1160 "cost_per_1m_in": 0.25,
1161 "cost_per_1m_out": 1.5,
1162 "cost_per_1m_in_cached": 0,
1163 "cost_per_1m_out_cached": 0,
1164 "context_window": 1000000,
1165 "default_max_tokens": 8000,
1166 "can_reason": true,
1167 "reasoning_levels": [
1168 "low",
1169 "medium",
1170 "high"
1171 ],
1172 "default_reasoning_effort": "medium",
1173 "supports_attachments": true,
1174 "options": {}
1175 },
1176 {
1177 "id": "google/gemini-3.1-pro-preview",
1178 "name": "Gemini 3.1 Pro Preview",
1179 "cost_per_1m_in": 2,
1180 "cost_per_1m_out": 12,
1181 "cost_per_1m_in_cached": 0.2,
1182 "cost_per_1m_out_cached": 0,
1183 "context_window": 1000000,
1184 "default_max_tokens": 8000,
1185 "can_reason": true,
1186 "reasoning_levels": [
1187 "low",
1188 "medium",
1189 "high"
1190 ],
1191 "default_reasoning_effort": "medium",
1192 "supports_attachments": true,
1193 "options": {}
1194 },
1195 {
1196 "id": "xai/grok-2-vision",
1197 "name": "Grok 2 Vision",
1198 "cost_per_1m_in": 2,
1199 "cost_per_1m_out": 10,
1200 "cost_per_1m_in_cached": 0,
1201 "cost_per_1m_out_cached": 0,
1202 "context_window": 32768,
1203 "default_max_tokens": 8000,
1204 "can_reason": false,
1205 "supports_attachments": true,
1206 "options": {}
1207 },
1208 {
1209 "id": "xai/grok-3",
1210 "name": "Grok 3 Beta",
1211 "cost_per_1m_in": 3,
1212 "cost_per_1m_out": 15,
1213 "cost_per_1m_in_cached": 0,
1214 "cost_per_1m_out_cached": 0,
1215 "context_window": 131072,
1216 "default_max_tokens": 8000,
1217 "can_reason": false,
1218 "supports_attachments": false,
1219 "options": {}
1220 },
1221 {
1222 "id": "xai/grok-3-fast",
1223 "name": "Grok 3 Fast Beta",
1224 "cost_per_1m_in": 5,
1225 "cost_per_1m_out": 25,
1226 "cost_per_1m_in_cached": 0,
1227 "cost_per_1m_out_cached": 0,
1228 "context_window": 131072,
1229 "default_max_tokens": 8000,
1230 "can_reason": false,
1231 "supports_attachments": false,
1232 "options": {}
1233 },
1234 {
1235 "id": "xai/grok-3-mini",
1236 "name": "Grok 3 Mini Beta",
1237 "cost_per_1m_in": 0.3,
1238 "cost_per_1m_out": 0.5,
1239 "cost_per_1m_in_cached": 0,
1240 "cost_per_1m_out_cached": 0,
1241 "context_window": 131072,
1242 "default_max_tokens": 8000,
1243 "can_reason": false,
1244 "supports_attachments": false,
1245 "options": {}
1246 },
1247 {
1248 "id": "xai/grok-3-mini-fast",
1249 "name": "Grok 3 Mini Fast Beta",
1250 "cost_per_1m_in": 0.6,
1251 "cost_per_1m_out": 4,
1252 "cost_per_1m_in_cached": 0,
1253 "cost_per_1m_out_cached": 0,
1254 "context_window": 131072,
1255 "default_max_tokens": 8000,
1256 "can_reason": false,
1257 "supports_attachments": false,
1258 "options": {}
1259 },
1260 {
1261 "id": "xai/grok-4",
1262 "name": "Grok 4",
1263 "cost_per_1m_in": 3,
1264 "cost_per_1m_out": 15,
1265 "cost_per_1m_in_cached": 0,
1266 "cost_per_1m_out_cached": 0,
1267 "context_window": 256000,
1268 "default_max_tokens": 8000,
1269 "can_reason": true,
1270 "reasoning_levels": [
1271 "low",
1272 "medium",
1273 "high"
1274 ],
1275 "default_reasoning_effort": "medium",
1276 "supports_attachments": true,
1277 "options": {}
1278 },
1279 {
1280 "id": "xai/grok-4-fast-non-reasoning",
1281 "name": "Grok 4 Fast Non-Reasoning",
1282 "cost_per_1m_in": 0.2,
1283 "cost_per_1m_out": 0.5,
1284 "cost_per_1m_in_cached": 0.05,
1285 "cost_per_1m_out_cached": 0,
1286 "context_window": 2000000,
1287 "default_max_tokens": 8000,
1288 "can_reason": false,
1289 "supports_attachments": false,
1290 "options": {}
1291 },
1292 {
1293 "id": "xai/grok-4-fast-reasoning",
1294 "name": "Grok 4 Fast Reasoning",
1295 "cost_per_1m_in": 0.2,
1296 "cost_per_1m_out": 0.5,
1297 "cost_per_1m_in_cached": 0.05,
1298 "cost_per_1m_out_cached": 0,
1299 "context_window": 2000000,
1300 "default_max_tokens": 8000,
1301 "can_reason": true,
1302 "reasoning_levels": [
1303 "low",
1304 "medium",
1305 "high"
1306 ],
1307 "default_reasoning_effort": "medium",
1308 "supports_attachments": false,
1309 "options": {}
1310 },
1311 {
1312 "id": "xai/grok-4.1-fast-non-reasoning",
1313 "name": "Grok 4.1 Fast Non-Reasoning",
1314 "cost_per_1m_in": 0.2,
1315 "cost_per_1m_out": 0.5,
1316 "cost_per_1m_in_cached": 0.05,
1317 "cost_per_1m_out_cached": 0,
1318 "context_window": 2000000,
1319 "default_max_tokens": 8000,
1320 "can_reason": false,
1321 "supports_attachments": false,
1322 "options": {}
1323 },
1324 {
1325 "id": "xai/grok-4.1-fast-reasoning",
1326 "name": "Grok 4.1 Fast Reasoning",
1327 "cost_per_1m_in": 0.2,
1328 "cost_per_1m_out": 0.5,
1329 "cost_per_1m_in_cached": 0.05,
1330 "cost_per_1m_out_cached": 0,
1331 "context_window": 2000000,
1332 "default_max_tokens": 8000,
1333 "can_reason": true,
1334 "reasoning_levels": [
1335 "low",
1336 "medium",
1337 "high"
1338 ],
1339 "default_reasoning_effort": "medium",
1340 "supports_attachments": false,
1341 "options": {}
1342 },
1343 {
1344 "id": "xai/grok-code-fast-1",
1345 "name": "Grok Code Fast 1",
1346 "cost_per_1m_in": 0.2,
1347 "cost_per_1m_out": 1.5,
1348 "cost_per_1m_in_cached": 0.02,
1349 "cost_per_1m_out_cached": 0,
1350 "context_window": 256000,
1351 "default_max_tokens": 8000,
1352 "can_reason": true,
1353 "reasoning_levels": [
1354 "low",
1355 "medium",
1356 "high"
1357 ],
1358 "default_reasoning_effort": "medium",
1359 "supports_attachments": false,
1360 "options": {}
1361 },
1362 {
1363 "id": "prime-intellect/intellect-3",
1364 "name": "INTELLECT 3",
1365 "cost_per_1m_in": 0.2,
1366 "cost_per_1m_out": 1.1,
1367 "cost_per_1m_in_cached": 0,
1368 "cost_per_1m_out_cached": 0,
1369 "context_window": 131072,
1370 "default_max_tokens": 8000,
1371 "can_reason": true,
1372 "reasoning_levels": [
1373 "low",
1374 "medium",
1375 "high"
1376 ],
1377 "default_reasoning_effort": "medium",
1378 "supports_attachments": false,
1379 "options": {}
1380 },
1381 {
1382 "id": "moonshotai/kimi-k2",
1383 "name": "Kimi K2",
1384 "cost_per_1m_in": 0.5,
1385 "cost_per_1m_out": 2,
1386 "cost_per_1m_in_cached": 0,
1387 "cost_per_1m_out_cached": 0,
1388 "context_window": 131072,
1389 "default_max_tokens": 8000,
1390 "can_reason": false,
1391 "supports_attachments": false,
1392 "options": {}
1393 },
1394 {
1395 "id": "moonshotai/kimi-k2-thinking",
1396 "name": "Kimi K2 Thinking",
1397 "cost_per_1m_in": 0.47,
1398 "cost_per_1m_out": 2,
1399 "cost_per_1m_in_cached": 0.141,
1400 "cost_per_1m_out_cached": 0,
1401 "context_window": 216144,
1402 "default_max_tokens": 8000,
1403 "can_reason": true,
1404 "reasoning_levels": [
1405 "low",
1406 "medium",
1407 "high"
1408 ],
1409 "default_reasoning_effort": "medium",
1410 "supports_attachments": false,
1411 "options": {}
1412 },
1413 {
1414 "id": "moonshotai/kimi-k2-thinking-turbo",
1415 "name": "Kimi K2 Thinking Turbo",
1416 "cost_per_1m_in": 1.15,
1417 "cost_per_1m_out": 8,
1418 "cost_per_1m_in_cached": 0.15,
1419 "cost_per_1m_out_cached": 0,
1420 "context_window": 262114,
1421 "default_max_tokens": 8000,
1422 "can_reason": true,
1423 "reasoning_levels": [
1424 "low",
1425 "medium",
1426 "high"
1427 ],
1428 "default_reasoning_effort": "medium",
1429 "supports_attachments": false,
1430 "options": {}
1431 },
1432 {
1433 "id": "moonshotai/kimi-k2-turbo",
1434 "name": "Kimi K2 Turbo",
1435 "cost_per_1m_in": 2.4,
1436 "cost_per_1m_out": 10,
1437 "cost_per_1m_in_cached": 0,
1438 "cost_per_1m_out_cached": 0,
1439 "context_window": 256000,
1440 "default_max_tokens": 8000,
1441 "can_reason": false,
1442 "supports_attachments": false,
1443 "options": {}
1444 },
1445 {
1446 "id": "moonshotai/kimi-k2.5",
1447 "name": "Kimi K2.5",
1448 "cost_per_1m_in": 0.5,
1449 "cost_per_1m_out": 2.8,
1450 "cost_per_1m_in_cached": 0,
1451 "cost_per_1m_out_cached": 0,
1452 "context_window": 256000,
1453 "default_max_tokens": 8000,
1454 "can_reason": true,
1455 "reasoning_levels": [
1456 "low",
1457 "medium",
1458 "high"
1459 ],
1460 "default_reasoning_effort": "medium",
1461 "supports_attachments": true,
1462 "options": {}
1463 },
1464 {
1465 "id": "meta/llama-3.1-70b",
1466 "name": "Llama 3.1 70B Instruct",
1467 "cost_per_1m_in": 0.4,
1468 "cost_per_1m_out": 0.4,
1469 "cost_per_1m_in_cached": 0,
1470 "cost_per_1m_out_cached": 0,
1471 "context_window": 131072,
1472 "default_max_tokens": 8000,
1473 "can_reason": false,
1474 "supports_attachments": false,
1475 "options": {}
1476 },
1477 {
1478 "id": "meta/llama-3.1-8b",
1479 "name": "Llama 3.1 8B Instruct",
1480 "cost_per_1m_in": 0.03,
1481 "cost_per_1m_out": 0.05,
1482 "cost_per_1m_in_cached": 0,
1483 "cost_per_1m_out_cached": 0,
1484 "context_window": 131072,
1485 "default_max_tokens": 8000,
1486 "can_reason": false,
1487 "supports_attachments": false,
1488 "options": {}
1489 },
1490 {
1491 "id": "meta/llama-3.2-11b",
1492 "name": "Llama 3.2 11B Vision Instruct",
1493 "cost_per_1m_in": 0.16,
1494 "cost_per_1m_out": 0.16,
1495 "cost_per_1m_in_cached": 0,
1496 "cost_per_1m_out_cached": 0,
1497 "context_window": 128000,
1498 "default_max_tokens": 8000,
1499 "can_reason": false,
1500 "supports_attachments": true,
1501 "options": {}
1502 },
1503 {
1504 "id": "meta/llama-3.2-90b",
1505 "name": "Llama 3.2 90B Vision Instruct",
1506 "cost_per_1m_in": 0.72,
1507 "cost_per_1m_out": 0.72,
1508 "cost_per_1m_in_cached": 0,
1509 "cost_per_1m_out_cached": 0,
1510 "context_window": 128000,
1511 "default_max_tokens": 8000,
1512 "can_reason": false,
1513 "supports_attachments": true,
1514 "options": {}
1515 },
1516 {
1517 "id": "meta/llama-3.3-70b",
1518 "name": "Llama 3.3 70B Instruct",
1519 "cost_per_1m_in": 0.72,
1520 "cost_per_1m_out": 0.72,
1521 "cost_per_1m_in_cached": 0,
1522 "cost_per_1m_out_cached": 0,
1523 "context_window": 128000,
1524 "default_max_tokens": 8000,
1525 "can_reason": false,
1526 "supports_attachments": false,
1527 "options": {}
1528 },
1529 {
1530 "id": "meta/llama-4-maverick",
1531 "name": "Llama 4 Maverick 17B Instruct",
1532 "cost_per_1m_in": 0.15,
1533 "cost_per_1m_out": 0.6,
1534 "cost_per_1m_in_cached": 0,
1535 "cost_per_1m_out_cached": 0,
1536 "context_window": 131072,
1537 "default_max_tokens": 8000,
1538 "can_reason": false,
1539 "supports_attachments": true,
1540 "options": {}
1541 },
1542 {
1543 "id": "meta/llama-4-scout",
1544 "name": "Llama 4 Scout 17B Instruct",
1545 "cost_per_1m_in": 0.08,
1546 "cost_per_1m_out": 0.3,
1547 "cost_per_1m_in_cached": 0,
1548 "cost_per_1m_out_cached": 0,
1549 "context_window": 131072,
1550 "default_max_tokens": 8000,
1551 "can_reason": false,
1552 "supports_attachments": true,
1553 "options": {}
1554 },
1555 {
1556 "id": "meituan/longcat-flash-chat",
1557 "name": "LongCat Flash Chat",
1558 "cost_per_1m_in": 0,
1559 "cost_per_1m_out": 0,
1560 "cost_per_1m_in_cached": 0,
1561 "cost_per_1m_out_cached": 0,
1562 "context_window": 128000,
1563 "default_max_tokens": 8000,
1564 "can_reason": false,
1565 "supports_attachments": false,
1566 "options": {}
1567 },
1568 {
1569 "id": "meituan/longcat-flash-thinking",
1570 "name": "LongCat Flash Thinking",
1571 "cost_per_1m_in": 0.15,
1572 "cost_per_1m_out": 1.5,
1573 "cost_per_1m_in_cached": 0,
1574 "cost_per_1m_out_cached": 0,
1575 "context_window": 128000,
1576 "default_max_tokens": 8000,
1577 "can_reason": true,
1578 "reasoning_levels": [
1579 "low",
1580 "medium",
1581 "high"
1582 ],
1583 "default_reasoning_effort": "medium",
1584 "supports_attachments": false,
1585 "options": {}
1586 },
1587 {
1588 "id": "inception/mercury-2",
1589 "name": "Mercury 2",
1590 "cost_per_1m_in": 0.25,
1591 "cost_per_1m_out": 0.75,
1592 "cost_per_1m_in_cached": 0.025,
1593 "cost_per_1m_out_cached": 0,
1594 "context_window": 128000,
1595 "default_max_tokens": 8000,
1596 "can_reason": true,
1597 "reasoning_levels": [
1598 "low",
1599 "medium",
1600 "high"
1601 ],
1602 "default_reasoning_effort": "medium",
1603 "supports_attachments": false,
1604 "options": {}
1605 },
1606 {
1607 "id": "inception/mercury-coder-small",
1608 "name": "Mercury Coder Small Beta",
1609 "cost_per_1m_in": 0.25,
1610 "cost_per_1m_out": 1,
1611 "cost_per_1m_in_cached": 0,
1612 "cost_per_1m_out_cached": 0,
1613 "context_window": 32000,
1614 "default_max_tokens": 8000,
1615 "can_reason": false,
1616 "supports_attachments": false,
1617 "options": {}
1618 },
1619 {
1620 "id": "xiaomi/mimo-v2-flash",
1621 "name": "MiMo V2 Flash",
1622 "cost_per_1m_in": 0.09,
1623 "cost_per_1m_out": 0.29,
1624 "cost_per_1m_in_cached": 0,
1625 "cost_per_1m_out_cached": 0,
1626 "context_window": 262144,
1627 "default_max_tokens": 8000,
1628 "can_reason": true,
1629 "reasoning_levels": [
1630 "low",
1631 "medium",
1632 "high"
1633 ],
1634 "default_reasoning_effort": "medium",
1635 "supports_attachments": false,
1636 "options": {}
1637 },
1638 {
1639 "id": "minimax/minimax-m2",
1640 "name": "MiniMax M2",
1641 "cost_per_1m_in": 0.3,
1642 "cost_per_1m_out": 1.2,
1643 "cost_per_1m_in_cached": 0.03,
1644 "cost_per_1m_out_cached": 0.375,
1645 "context_window": 205000,
1646 "default_max_tokens": 8000,
1647 "can_reason": true,
1648 "reasoning_levels": [
1649 "low",
1650 "medium",
1651 "high"
1652 ],
1653 "default_reasoning_effort": "medium",
1654 "supports_attachments": false,
1655 "options": {}
1656 },
1657 {
1658 "id": "minimax/minimax-m2.1",
1659 "name": "MiniMax M2.1",
1660 "cost_per_1m_in": 0.3,
1661 "cost_per_1m_out": 1.2,
1662 "cost_per_1m_in_cached": 0.15,
1663 "cost_per_1m_out_cached": 0,
1664 "context_window": 204800,
1665 "default_max_tokens": 8000,
1666 "can_reason": true,
1667 "reasoning_levels": [
1668 "low",
1669 "medium",
1670 "high"
1671 ],
1672 "default_reasoning_effort": "medium",
1673 "supports_attachments": false,
1674 "options": {}
1675 },
1676 {
1677 "id": "minimax/minimax-m2.1-lightning",
1678 "name": "MiniMax M2.1 Lightning",
1679 "cost_per_1m_in": 0.3,
1680 "cost_per_1m_out": 2.4,
1681 "cost_per_1m_in_cached": 0.03,
1682 "cost_per_1m_out_cached": 0.375,
1683 "context_window": 204800,
1684 "default_max_tokens": 8000,
1685 "can_reason": true,
1686 "reasoning_levels": [
1687 "low",
1688 "medium",
1689 "high"
1690 ],
1691 "default_reasoning_effort": "medium",
1692 "supports_attachments": false,
1693 "options": {}
1694 },
1695 {
1696 "id": "minimax/minimax-m2.5",
1697 "name": "MiniMax M2.5",
1698 "cost_per_1m_in": 0.3,
1699 "cost_per_1m_out": 1.2,
1700 "cost_per_1m_in_cached": 0.03,
1701 "cost_per_1m_out_cached": 0.375,
1702 "context_window": 204800,
1703 "default_max_tokens": 8000,
1704 "can_reason": true,
1705 "reasoning_levels": [
1706 "low",
1707 "medium",
1708 "high"
1709 ],
1710 "default_reasoning_effort": "medium",
1711 "supports_attachments": false,
1712 "options": {}
1713 },
1714 {
1715 "id": "minimax/minimax-m2.5-highspeed",
1716 "name": "MiniMax M2.5 High Speed",
1717 "cost_per_1m_in": 0.6,
1718 "cost_per_1m_out": 2.4,
1719 "cost_per_1m_in_cached": 0.03,
1720 "cost_per_1m_out_cached": 0.375,
1721 "context_window": 0,
1722 "default_max_tokens": 0,
1723 "can_reason": true,
1724 "reasoning_levels": [
1725 "low",
1726 "medium",
1727 "high"
1728 ],
1729 "default_reasoning_effort": "medium",
1730 "supports_attachments": false,
1731 "options": {}
1732 },
1733 {
1734 "id": "mistral/ministral-3b",
1735 "name": "Ministral 3B",
1736 "cost_per_1m_in": 0.04,
1737 "cost_per_1m_out": 0.04,
1738 "cost_per_1m_in_cached": 0,
1739 "cost_per_1m_out_cached": 0,
1740 "context_window": 128000,
1741 "default_max_tokens": 4000,
1742 "can_reason": false,
1743 "supports_attachments": false,
1744 "options": {}
1745 },
1746 {
1747 "id": "mistral/ministral-8b",
1748 "name": "Ministral 8B",
1749 "cost_per_1m_in": 0.1,
1750 "cost_per_1m_out": 0.1,
1751 "cost_per_1m_in_cached": 0,
1752 "cost_per_1m_out_cached": 0,
1753 "context_window": 128000,
1754 "default_max_tokens": 4000,
1755 "can_reason": false,
1756 "supports_attachments": false,
1757 "options": {}
1758 },
1759 {
1760 "id": "mistral/codestral",
1761 "name": "Mistral Codestral",
1762 "cost_per_1m_in": 0.3,
1763 "cost_per_1m_out": 0.9,
1764 "cost_per_1m_in_cached": 0,
1765 "cost_per_1m_out_cached": 0,
1766 "context_window": 128000,
1767 "default_max_tokens": 4000,
1768 "can_reason": false,
1769 "supports_attachments": false,
1770 "options": {}
1771 },
1772 {
1773 "id": "mistral/mistral-medium",
1774 "name": "Mistral Medium 3.1",
1775 "cost_per_1m_in": 0.4,
1776 "cost_per_1m_out": 2,
1777 "cost_per_1m_in_cached": 0,
1778 "cost_per_1m_out_cached": 0,
1779 "context_window": 128000,
1780 "default_max_tokens": 8000,
1781 "can_reason": false,
1782 "supports_attachments": true,
1783 "options": {}
1784 },
1785 {
1786 "id": "mistral/mistral-small",
1787 "name": "Mistral Small",
1788 "cost_per_1m_in": 0.1,
1789 "cost_per_1m_out": 0.3,
1790 "cost_per_1m_in_cached": 0,
1791 "cost_per_1m_out_cached": 0,
1792 "context_window": 32000,
1793 "default_max_tokens": 4000,
1794 "can_reason": false,
1795 "supports_attachments": true,
1796 "options": {}
1797 },
1798 {
1799 "id": "nvidia/nemotron-nano-12b-v2-vl",
1800 "name": "Nvidia Nemotron Nano 12B V2 VL",
1801 "cost_per_1m_in": 0.2,
1802 "cost_per_1m_out": 0.6,
1803 "cost_per_1m_in_cached": 0,
1804 "cost_per_1m_out_cached": 0,
1805 "context_window": 131072,
1806 "default_max_tokens": 8000,
1807 "can_reason": true,
1808 "reasoning_levels": [
1809 "low",
1810 "medium",
1811 "high"
1812 ],
1813 "default_reasoning_effort": "medium",
1814 "supports_attachments": true,
1815 "options": {}
1816 },
1817 {
1818 "id": "nvidia/nemotron-nano-9b-v2",
1819 "name": "Nvidia Nemotron Nano 9B V2",
1820 "cost_per_1m_in": 0.04,
1821 "cost_per_1m_out": 0.16,
1822 "cost_per_1m_in_cached": 0,
1823 "cost_per_1m_out_cached": 0,
1824 "context_window": 131072,
1825 "default_max_tokens": 8000,
1826 "can_reason": true,
1827 "reasoning_levels": [
1828 "low",
1829 "medium",
1830 "high"
1831 ],
1832 "default_reasoning_effort": "medium",
1833 "supports_attachments": false,
1834 "options": {}
1835 },
1836 {
1837 "id": "mistral/pixtral-12b",
1838 "name": "Pixtral 12B 2409",
1839 "cost_per_1m_in": 0.15,
1840 "cost_per_1m_out": 0.15,
1841 "cost_per_1m_in_cached": 0,
1842 "cost_per_1m_out_cached": 0,
1843 "context_window": 128000,
1844 "default_max_tokens": 4000,
1845 "can_reason": false,
1846 "supports_attachments": true,
1847 "options": {}
1848 },
1849 {
1850 "id": "mistral/pixtral-large",
1851 "name": "Pixtral Large",
1852 "cost_per_1m_in": 2,
1853 "cost_per_1m_out": 6,
1854 "cost_per_1m_in_cached": 0,
1855 "cost_per_1m_out_cached": 0,
1856 "context_window": 128000,
1857 "default_max_tokens": 4000,
1858 "can_reason": false,
1859 "supports_attachments": true,
1860 "options": {}
1861 },
1862 {
1863 "id": "alibaba/qwen-3-32b",
1864 "name": "Qwen 3 32B",
1865 "cost_per_1m_in": 0.1,
1866 "cost_per_1m_out": 0.3,
1867 "cost_per_1m_in_cached": 0,
1868 "cost_per_1m_out_cached": 0,
1869 "context_window": 40960,
1870 "default_max_tokens": 8000,
1871 "can_reason": true,
1872 "reasoning_levels": [
1873 "low",
1874 "medium",
1875 "high"
1876 ],
1877 "default_reasoning_effort": "medium",
1878 "supports_attachments": false,
1879 "options": {}
1880 },
1881 {
1882 "id": "alibaba/qwen3-coder-30b-a3b",
1883 "name": "Qwen 3 Coder 30B A3B Instruct",
1884 "cost_per_1m_in": 0.07,
1885 "cost_per_1m_out": 0.27,
1886 "cost_per_1m_in_cached": 0,
1887 "cost_per_1m_out_cached": 0,
1888 "context_window": 160000,
1889 "default_max_tokens": 8000,
1890 "can_reason": true,
1891 "reasoning_levels": [
1892 "low",
1893 "medium",
1894 "high"
1895 ],
1896 "default_reasoning_effort": "medium",
1897 "supports_attachments": false,
1898 "options": {}
1899 },
1900 {
1901 "id": "alibaba/qwen3-max-thinking",
1902 "name": "Qwen 3 Max Thinking",
1903 "cost_per_1m_in": 1.2,
1904 "cost_per_1m_out": 6,
1905 "cost_per_1m_in_cached": 0.24,
1906 "cost_per_1m_out_cached": 0,
1907 "context_window": 256000,
1908 "default_max_tokens": 8000,
1909 "can_reason": true,
1910 "reasoning_levels": [
1911 "low",
1912 "medium",
1913 "high"
1914 ],
1915 "default_reasoning_effort": "medium",
1916 "supports_attachments": false,
1917 "options": {}
1918 },
1919 {
1920 "id": "alibaba/qwen3.5-flash",
1921 "name": "Qwen 3.5 Flash",
1922 "cost_per_1m_in": 0.1,
1923 "cost_per_1m_out": 0.4,
1924 "cost_per_1m_in_cached": 0.001,
1925 "cost_per_1m_out_cached": 0.125,
1926 "context_window": 1000000,
1927 "default_max_tokens": 8000,
1928 "can_reason": true,
1929 "reasoning_levels": [
1930 "low",
1931 "medium",
1932 "high"
1933 ],
1934 "default_reasoning_effort": "medium",
1935 "supports_attachments": true,
1936 "options": {}
1937 },
1938 {
1939 "id": "alibaba/qwen3.5-plus",
1940 "name": "Qwen 3.5 Plus",
1941 "cost_per_1m_in": 0.4,
1942 "cost_per_1m_out": 2.4,
1943 "cost_per_1m_in_cached": 0.04,
1944 "cost_per_1m_out_cached": 0.5,
1945 "context_window": 1000000,
1946 "default_max_tokens": 8000,
1947 "can_reason": true,
1948 "reasoning_levels": [
1949 "low",
1950 "medium",
1951 "high"
1952 ],
1953 "default_reasoning_effort": "medium",
1954 "supports_attachments": true,
1955 "options": {}
1956 },
1957 {
1958 "id": "alibaba/qwen3-235b-a22b-thinking",
1959 "name": "Qwen3 235B A22B Thinking 2507",
1960 "cost_per_1m_in": 0.3,
1961 "cost_per_1m_out": 2.9,
1962 "cost_per_1m_in_cached": 0,
1963 "cost_per_1m_out_cached": 0,
1964 "context_window": 262114,
1965 "default_max_tokens": 8000,
1966 "can_reason": true,
1967 "reasoning_levels": [
1968 "low",
1969 "medium",
1970 "high"
1971 ],
1972 "default_reasoning_effort": "medium",
1973 "supports_attachments": true,
1974 "options": {}
1975 },
1976 {
1977 "id": "alibaba/qwen3-coder",
1978 "name": "Qwen3 Coder 480B A35B Instruct",
1979 "cost_per_1m_in": 0.4,
1980 "cost_per_1m_out": 1.6,
1981 "cost_per_1m_in_cached": 0,
1982 "cost_per_1m_out_cached": 0,
1983 "context_window": 262144,
1984 "default_max_tokens": 8000,
1985 "can_reason": false,
1986 "supports_attachments": false,
1987 "options": {}
1988 },
1989 {
1990 "id": "alibaba/qwen3-coder-next",
1991 "name": "Qwen3 Coder Next",
1992 "cost_per_1m_in": 0.5,
1993 "cost_per_1m_out": 1.2,
1994 "cost_per_1m_in_cached": 0,
1995 "cost_per_1m_out_cached": 0,
1996 "context_window": 256000,
1997 "default_max_tokens": 8000,
1998 "can_reason": false,
1999 "supports_attachments": false,
2000 "options": {}
2001 },
2002 {
2003 "id": "alibaba/qwen3-coder-plus",
2004 "name": "Qwen3 Coder Plus",
2005 "cost_per_1m_in": 1,
2006 "cost_per_1m_out": 5,
2007 "cost_per_1m_in_cached": 0.2,
2008 "cost_per_1m_out_cached": 0,
2009 "context_window": 1000000,
2010 "default_max_tokens": 8000,
2011 "can_reason": false,
2012 "supports_attachments": false,
2013 "options": {}
2014 },
2015 {
2016 "id": "alibaba/qwen3-max-preview",
2017 "name": "Qwen3 Max Preview",
2018 "cost_per_1m_in": 1.2,
2019 "cost_per_1m_out": 6,
2020 "cost_per_1m_in_cached": 0.24,
2021 "cost_per_1m_out_cached": 0,
2022 "context_window": 262144,
2023 "default_max_tokens": 8000,
2024 "can_reason": false,
2025 "supports_attachments": false,
2026 "options": {}
2027 },
2028 {
2029 "id": "alibaba/qwen3-vl-thinking",
2030 "name": "Qwen3 VL 235B A22B Thinking",
2031 "cost_per_1m_in": 0.22,
2032 "cost_per_1m_out": 0.88,
2033 "cost_per_1m_in_cached": 0,
2034 "cost_per_1m_out_cached": 0,
2035 "context_window": 256000,
2036 "default_max_tokens": 8000,
2037 "can_reason": true,
2038 "reasoning_levels": [
2039 "low",
2040 "medium",
2041 "high"
2042 ],
2043 "default_reasoning_effort": "medium",
2044 "supports_attachments": true,
2045 "options": {}
2046 },
2047 {
2048 "id": "alibaba/qwen-3-14b",
2049 "name": "Qwen3-14B",
2050 "cost_per_1m_in": 0.06,
2051 "cost_per_1m_out": 0.24,
2052 "cost_per_1m_in_cached": 0,
2053 "cost_per_1m_out_cached": 0,
2054 "context_window": 40960,
2055 "default_max_tokens": 8000,
2056 "can_reason": true,
2057 "reasoning_levels": [
2058 "low",
2059 "medium",
2060 "high"
2061 ],
2062 "default_reasoning_effort": "medium",
2063 "supports_attachments": false,
2064 "options": {}
2065 },
2066 {
2067 "id": "alibaba/qwen-3-235b",
2068 "name": "Qwen3-235B-A22B",
2069 "cost_per_1m_in": 0.071,
2070 "cost_per_1m_out": 0.463,
2071 "cost_per_1m_in_cached": 0,
2072 "cost_per_1m_out_cached": 0,
2073 "context_window": 40960,
2074 "default_max_tokens": 8000,
2075 "can_reason": false,
2076 "supports_attachments": false,
2077 "options": {}
2078 },
2079 {
2080 "id": "alibaba/qwen-3-30b",
2081 "name": "Qwen3-30B-A3B",
2082 "cost_per_1m_in": 0.08,
2083 "cost_per_1m_out": 0.29,
2084 "cost_per_1m_in_cached": 0,
2085 "cost_per_1m_out_cached": 0,
2086 "context_window": 40960,
2087 "default_max_tokens": 8000,
2088 "can_reason": true,
2089 "reasoning_levels": [
2090 "low",
2091 "medium",
2092 "high"
2093 ],
2094 "default_reasoning_effort": "medium",
2095 "supports_attachments": false,
2096 "options": {}
2097 },
2098 {
2099 "id": "bytedance/seed-1.6",
2100 "name": "Seed 1.6",
2101 "cost_per_1m_in": 0.25,
2102 "cost_per_1m_out": 2,
2103 "cost_per_1m_in_cached": 0.05,
2104 "cost_per_1m_out_cached": 0,
2105 "context_window": 256000,
2106 "default_max_tokens": 8000,
2107 "can_reason": true,
2108 "reasoning_levels": [
2109 "low",
2110 "medium",
2111 "high"
2112 ],
2113 "default_reasoning_effort": "medium",
2114 "supports_attachments": false,
2115 "options": {}
2116 },
2117 {
2118 "id": "perplexity/sonar",
2119 "name": "Sonar",
2120 "cost_per_1m_in": 1,
2121 "cost_per_1m_out": 1,
2122 "cost_per_1m_in_cached": 0,
2123 "cost_per_1m_out_cached": 0,
2124 "context_window": 127000,
2125 "default_max_tokens": 8000,
2126 "can_reason": false,
2127 "supports_attachments": true,
2128 "options": {}
2129 },
2130 {
2131 "id": "perplexity/sonar-pro",
2132 "name": "Sonar Pro",
2133 "cost_per_1m_in": 3,
2134 "cost_per_1m_out": 15,
2135 "cost_per_1m_in_cached": 0,
2136 "cost_per_1m_out_cached": 0,
2137 "context_window": 200000,
2138 "default_max_tokens": 8000,
2139 "can_reason": false,
2140 "supports_attachments": true,
2141 "options": {}
2142 },
2143 {
2144 "id": "arcee-ai/trinity-large-preview",
2145 "name": "Trinity Large Preview",
2146 "cost_per_1m_in": 0.25,
2147 "cost_per_1m_out": 1,
2148 "cost_per_1m_in_cached": 0,
2149 "cost_per_1m_out_cached": 0,
2150 "context_window": 131000,
2151 "default_max_tokens": 8000,
2152 "can_reason": false,
2153 "supports_attachments": false,
2154 "options": {}
2155 },
2156 {
2157 "id": "openai/gpt-oss-120b",
2158 "name": "gpt-oss-120b",
2159 "cost_per_1m_in": 0.1,
2160 "cost_per_1m_out": 0.5,
2161 "cost_per_1m_in_cached": 0,
2162 "cost_per_1m_out_cached": 0,
2163 "context_window": 131072,
2164 "default_max_tokens": 8000,
2165 "can_reason": true,
2166 "reasoning_levels": [
2167 "low",
2168 "medium",
2169 "high"
2170 ],
2171 "default_reasoning_effort": "medium",
2172 "supports_attachments": false,
2173 "options": {}
2174 },
2175 {
2176 "id": "openai/gpt-oss-20b",
2177 "name": "gpt-oss-20b",
2178 "cost_per_1m_in": 0.07,
2179 "cost_per_1m_out": 0.3,
2180 "cost_per_1m_in_cached": 0,
2181 "cost_per_1m_out_cached": 0,
2182 "context_window": 128000,
2183 "default_max_tokens": 8000,
2184 "can_reason": true,
2185 "reasoning_levels": [
2186 "low",
2187 "medium",
2188 "high"
2189 ],
2190 "default_reasoning_effort": "medium",
2191 "supports_attachments": false,
2192 "options": {}
2193 },
2194 {
2195 "id": "openai/gpt-oss-safeguard-20b",
2196 "name": "gpt-oss-safeguard-20b",
2197 "cost_per_1m_in": 0.075,
2198 "cost_per_1m_out": 0.3,
2199 "cost_per_1m_in_cached": 0.037,
2200 "cost_per_1m_out_cached": 0,
2201 "context_window": 131072,
2202 "default_max_tokens": 8000,
2203 "can_reason": true,
2204 "reasoning_levels": [
2205 "low",
2206 "medium",
2207 "high"
2208 ],
2209 "default_reasoning_effort": "medium",
2210 "supports_attachments": false,
2211 "options": {}
2212 },
2213 {
2214 "id": "openai/o1",
2215 "name": "o1",
2216 "cost_per_1m_in": 15,
2217 "cost_per_1m_out": 60,
2218 "cost_per_1m_in_cached": 7.5,
2219 "cost_per_1m_out_cached": 0,
2220 "context_window": 200000,
2221 "default_max_tokens": 8000,
2222 "can_reason": true,
2223 "reasoning_levels": [
2224 "low",
2225 "medium",
2226 "high"
2227 ],
2228 "default_reasoning_effort": "medium",
2229 "supports_attachments": true,
2230 "options": {}
2231 },
2232 {
2233 "id": "openai/o3",
2234 "name": "o3",
2235 "cost_per_1m_in": 2,
2236 "cost_per_1m_out": 8,
2237 "cost_per_1m_in_cached": 0.5,
2238 "cost_per_1m_out_cached": 0,
2239 "context_window": 200000,
2240 "default_max_tokens": 8000,
2241 "can_reason": true,
2242 "reasoning_levels": [
2243 "low",
2244 "medium",
2245 "high"
2246 ],
2247 "default_reasoning_effort": "medium",
2248 "supports_attachments": true,
2249 "options": {}
2250 },
2251 {
2252 "id": "openai/o3-pro",
2253 "name": "o3 Pro",
2254 "cost_per_1m_in": 20,
2255 "cost_per_1m_out": 80,
2256 "cost_per_1m_in_cached": 0,
2257 "cost_per_1m_out_cached": 0,
2258 "context_window": 200000,
2259 "default_max_tokens": 8000,
2260 "can_reason": true,
2261 "reasoning_levels": [
2262 "low",
2263 "medium",
2264 "high"
2265 ],
2266 "default_reasoning_effort": "medium",
2267 "supports_attachments": true,
2268 "options": {}
2269 },
2270 {
2271 "id": "openai/o3-deep-research",
2272 "name": "o3-deep-research",
2273 "cost_per_1m_in": 10,
2274 "cost_per_1m_out": 40,
2275 "cost_per_1m_in_cached": 2.5,
2276 "cost_per_1m_out_cached": 0,
2277 "context_window": 200000,
2278 "default_max_tokens": 8000,
2279 "can_reason": true,
2280 "reasoning_levels": [
2281 "low",
2282 "medium",
2283 "high"
2284 ],
2285 "default_reasoning_effort": "medium",
2286 "supports_attachments": true,
2287 "options": {}
2288 },
2289 {
2290 "id": "openai/o3-mini",
2291 "name": "o3-mini",
2292 "cost_per_1m_in": 1.1,
2293 "cost_per_1m_out": 4.4,
2294 "cost_per_1m_in_cached": 0.55,
2295 "cost_per_1m_out_cached": 0,
2296 "context_window": 200000,
2297 "default_max_tokens": 8000,
2298 "can_reason": true,
2299 "reasoning_levels": [
2300 "low",
2301 "medium",
2302 "high"
2303 ],
2304 "default_reasoning_effort": "medium",
2305 "supports_attachments": false,
2306 "options": {}
2307 },
2308 {
2309 "id": "openai/o4-mini",
2310 "name": "o4-mini",
2311 "cost_per_1m_in": 1.1,
2312 "cost_per_1m_out": 4.4,
2313 "cost_per_1m_in_cached": 0.275,
2314 "cost_per_1m_out_cached": 0,
2315 "context_window": 200000,
2316 "default_max_tokens": 8000,
2317 "can_reason": true,
2318 "reasoning_levels": [
2319 "low",
2320 "medium",
2321 "high"
2322 ],
2323 "default_reasoning_effort": "medium",
2324 "supports_attachments": true,
2325 "options": {}
2326 },
2327 {
2328 "id": "vercel/v0-1.0-md",
2329 "name": "v0-1.0-md",
2330 "cost_per_1m_in": 3,
2331 "cost_per_1m_out": 15,
2332 "cost_per_1m_in_cached": 0,
2333 "cost_per_1m_out_cached": 0,
2334 "context_window": 128000,
2335 "default_max_tokens": 8000,
2336 "can_reason": false,
2337 "supports_attachments": true,
2338 "options": {}
2339 },
2340 {
2341 "id": "vercel/v0-1.5-md",
2342 "name": "v0-1.5-md",
2343 "cost_per_1m_in": 3,
2344 "cost_per_1m_out": 15,
2345 "cost_per_1m_in_cached": 0,
2346 "cost_per_1m_out_cached": 0,
2347 "context_window": 128000,
2348 "default_max_tokens": 8000,
2349 "can_reason": false,
2350 "supports_attachments": true,
2351 "options": {}
2352 }
2353 ],
2354 "default_headers": {
2355 "HTTP-Referer": "https://charm.land",
2356 "X-Title": "Crush"
2357 }
2358}