1{
2 "name": "Vercel",
3 "id": "vercel",
4 "api_key": "$VERCEL_API_KEY",
5 "api_endpoint": "https://ai-gateway.vercel.sh/v1",
6 "type": "vercel",
7 "default_large_model_id": "anthropic/claude-sonnet-4",
8 "default_small_model_id": "anthropic/claude-haiku-4.5",
9 "models": [
10 {
11 "id": "anthropic/claude-3-haiku",
12 "name": "Claude 3 Haiku",
13 "cost_per_1m_in": 0.25,
14 "cost_per_1m_out": 1.25,
15 "cost_per_1m_in_cached": 0.03,
16 "cost_per_1m_out_cached": 0.3,
17 "context_window": 200000,
18 "default_max_tokens": 4096,
19 "can_reason": false,
20 "supports_attachments": true,
21 "options": {}
22 },
23 {
24 "id": "anthropic/claude-3.5-haiku",
25 "name": "Claude 3.5 Haiku",
26 "cost_per_1m_in": 0.8,
27 "cost_per_1m_out": 4,
28 "cost_per_1m_in_cached": 0.08,
29 "cost_per_1m_out_cached": 1,
30 "context_window": 200000,
31 "default_max_tokens": 8000,
32 "can_reason": false,
33 "supports_attachments": true,
34 "options": {}
35 },
36 {
37 "id": "anthropic/claude-3.5-sonnet",
38 "name": "Claude 3.5 Sonnet",
39 "cost_per_1m_in": 3,
40 "cost_per_1m_out": 15,
41 "cost_per_1m_in_cached": 0.3,
42 "cost_per_1m_out_cached": 3.75,
43 "context_window": 200000,
44 "default_max_tokens": 8000,
45 "can_reason": false,
46 "supports_attachments": true,
47 "options": {}
48 },
49 {
50 "id": "anthropic/claude-3.5-sonnet-20240620",
51 "name": "Claude 3.5 Sonnet (2024-06-20)",
52 "cost_per_1m_in": 3,
53 "cost_per_1m_out": 15,
54 "cost_per_1m_in_cached": 0.3,
55 "cost_per_1m_out_cached": 3.75,
56 "context_window": 200000,
57 "default_max_tokens": 8000,
58 "can_reason": false,
59 "supports_attachments": true,
60 "options": {}
61 },
62 {
63 "id": "anthropic/claude-3.7-sonnet",
64 "name": "Claude 3.7 Sonnet",
65 "cost_per_1m_in": 3,
66 "cost_per_1m_out": 15,
67 "cost_per_1m_in_cached": 0.3,
68 "cost_per_1m_out_cached": 3.75,
69 "context_window": 200000,
70 "default_max_tokens": 8000,
71 "can_reason": true,
72 "reasoning_levels": [
73 "none",
74 "minimal",
75 "low",
76 "medium",
77 "high",
78 "xhigh"
79 ],
80 "default_reasoning_effort": "medium",
81 "supports_attachments": true,
82 "options": {}
83 },
84 {
85 "id": "anthropic/claude-haiku-4.5",
86 "name": "Claude Haiku 4.5",
87 "cost_per_1m_in": 1,
88 "cost_per_1m_out": 5,
89 "cost_per_1m_in_cached": 0.1,
90 "cost_per_1m_out_cached": 1.25,
91 "context_window": 200000,
92 "default_max_tokens": 8000,
93 "can_reason": true,
94 "reasoning_levels": [
95 "none",
96 "minimal",
97 "low",
98 "medium",
99 "high",
100 "xhigh"
101 ],
102 "default_reasoning_effort": "medium",
103 "supports_attachments": true,
104 "options": {}
105 },
106 {
107 "id": "anthropic/claude-opus-4",
108 "name": "Claude Opus 4",
109 "cost_per_1m_in": 15,
110 "cost_per_1m_out": 75,
111 "cost_per_1m_in_cached": 1.5,
112 "cost_per_1m_out_cached": 18.75,
113 "context_window": 200000,
114 "default_max_tokens": 8000,
115 "can_reason": true,
116 "reasoning_levels": [
117 "none",
118 "minimal",
119 "low",
120 "medium",
121 "high",
122 "xhigh"
123 ],
124 "default_reasoning_effort": "medium",
125 "supports_attachments": true,
126 "options": {}
127 },
128 {
129 "id": "anthropic/claude-opus-4.1",
130 "name": "Claude Opus 4.1",
131 "cost_per_1m_in": 15,
132 "cost_per_1m_out": 75,
133 "cost_per_1m_in_cached": 1.5,
134 "cost_per_1m_out_cached": 18.75,
135 "context_window": 200000,
136 "default_max_tokens": 8000,
137 "can_reason": true,
138 "reasoning_levels": [
139 "none",
140 "minimal",
141 "low",
142 "medium",
143 "high",
144 "xhigh"
145 ],
146 "default_reasoning_effort": "medium",
147 "supports_attachments": true,
148 "options": {}
149 },
150 {
151 "id": "anthropic/claude-opus-4.5",
152 "name": "Claude Opus 4.5",
153 "cost_per_1m_in": 5,
154 "cost_per_1m_out": 25,
155 "cost_per_1m_in_cached": 0.5,
156 "cost_per_1m_out_cached": 6.25,
157 "context_window": 200000,
158 "default_max_tokens": 8000,
159 "can_reason": true,
160 "reasoning_levels": [
161 "none",
162 "minimal",
163 "low",
164 "medium",
165 "high",
166 "xhigh"
167 ],
168 "default_reasoning_effort": "medium",
169 "supports_attachments": true,
170 "options": {}
171 },
172 {
173 "id": "anthropic/claude-opus-4.6",
174 "name": "Claude Opus 4.6",
175 "cost_per_1m_in": 5,
176 "cost_per_1m_out": 25,
177 "cost_per_1m_in_cached": 0.5,
178 "cost_per_1m_out_cached": 6.25,
179 "context_window": 1000000,
180 "default_max_tokens": 8000,
181 "can_reason": true,
182 "reasoning_levels": [
183 "none",
184 "minimal",
185 "low",
186 "medium",
187 "high",
188 "xhigh"
189 ],
190 "default_reasoning_effort": "medium",
191 "supports_attachments": true,
192 "options": {}
193 },
194 {
195 "id": "anthropic/claude-sonnet-4",
196 "name": "Claude Sonnet 4",
197 "cost_per_1m_in": 3,
198 "cost_per_1m_out": 15,
199 "cost_per_1m_in_cached": 0.3,
200 "cost_per_1m_out_cached": 3.75,
201 "context_window": 1000000,
202 "default_max_tokens": 8000,
203 "can_reason": true,
204 "reasoning_levels": [
205 "none",
206 "minimal",
207 "low",
208 "medium",
209 "high",
210 "xhigh"
211 ],
212 "default_reasoning_effort": "medium",
213 "supports_attachments": true,
214 "options": {}
215 },
216 {
217 "id": "anthropic/claude-sonnet-4.5",
218 "name": "Claude Sonnet 4.5",
219 "cost_per_1m_in": 3,
220 "cost_per_1m_out": 15,
221 "cost_per_1m_in_cached": 0.3,
222 "cost_per_1m_out_cached": 3.75,
223 "context_window": 1000000,
224 "default_max_tokens": 8000,
225 "can_reason": true,
226 "reasoning_levels": [
227 "none",
228 "minimal",
229 "low",
230 "medium",
231 "high",
232 "xhigh"
233 ],
234 "default_reasoning_effort": "medium",
235 "supports_attachments": true,
236 "options": {}
237 },
238 {
239 "id": "anthropic/claude-sonnet-4.6",
240 "name": "Claude Sonnet 4.6",
241 "cost_per_1m_in": 3,
242 "cost_per_1m_out": 15,
243 "cost_per_1m_in_cached": 0.3,
244 "cost_per_1m_out_cached": 3.75,
245 "context_window": 1000000,
246 "default_max_tokens": 8000,
247 "can_reason": true,
248 "reasoning_levels": [
249 "none",
250 "minimal",
251 "low",
252 "medium",
253 "high",
254 "xhigh"
255 ],
256 "default_reasoning_effort": "medium",
257 "supports_attachments": true,
258 "options": {}
259 },
260 {
261 "id": "cohere/command-a",
262 "name": "Command A",
263 "cost_per_1m_in": 2.5,
264 "cost_per_1m_out": 10,
265 "cost_per_1m_in_cached": 0,
266 "cost_per_1m_out_cached": 0,
267 "context_window": 256000,
268 "default_max_tokens": 8000,
269 "can_reason": false,
270 "supports_attachments": false,
271 "options": {}
272 },
273 {
274 "id": "deepseek/deepseek-v3",
275 "name": "DeepSeek V3 0324",
276 "cost_per_1m_in": 0.77,
277 "cost_per_1m_out": 0.77,
278 "cost_per_1m_in_cached": 0,
279 "cost_per_1m_out_cached": 0,
280 "context_window": 163840,
281 "default_max_tokens": 8000,
282 "can_reason": false,
283 "supports_attachments": false,
284 "options": {}
285 },
286 {
287 "id": "deepseek/deepseek-v3.1-terminus",
288 "name": "DeepSeek V3.1 Terminus",
289 "cost_per_1m_in": 0.27,
290 "cost_per_1m_out": 1,
291 "cost_per_1m_in_cached": 0.135,
292 "cost_per_1m_out_cached": 0,
293 "context_window": 131072,
294 "default_max_tokens": 8000,
295 "can_reason": true,
296 "reasoning_levels": [
297 "low",
298 "medium",
299 "high"
300 ],
301 "default_reasoning_effort": "medium",
302 "supports_attachments": false,
303 "options": {}
304 },
305 {
306 "id": "deepseek/deepseek-v3.2",
307 "name": "DeepSeek V3.2",
308 "cost_per_1m_in": 0.28,
309 "cost_per_1m_out": 0.42,
310 "cost_per_1m_in_cached": 0.028,
311 "cost_per_1m_out_cached": 0,
312 "context_window": 128000,
313 "default_max_tokens": 8000,
314 "can_reason": false,
315 "supports_attachments": false,
316 "options": {}
317 },
318 {
319 "id": "deepseek/deepseek-v3.2-thinking",
320 "name": "DeepSeek V3.2 Thinking",
321 "cost_per_1m_in": 0.28,
322 "cost_per_1m_out": 0.42,
323 "cost_per_1m_in_cached": 0.028,
324 "cost_per_1m_out_cached": 0,
325 "context_window": 128000,
326 "default_max_tokens": 8000,
327 "can_reason": true,
328 "reasoning_levels": [
329 "low",
330 "medium",
331 "high"
332 ],
333 "default_reasoning_effort": "medium",
334 "supports_attachments": false,
335 "options": {}
336 },
337 {
338 "id": "deepseek/deepseek-r1",
339 "name": "DeepSeek-R1",
340 "cost_per_1m_in": 1.35,
341 "cost_per_1m_out": 5.4,
342 "cost_per_1m_in_cached": 0,
343 "cost_per_1m_out_cached": 0,
344 "context_window": 128000,
345 "default_max_tokens": 8000,
346 "can_reason": true,
347 "reasoning_levels": [
348 "low",
349 "medium",
350 "high"
351 ],
352 "default_reasoning_effort": "medium",
353 "supports_attachments": false,
354 "options": {}
355 },
356 {
357 "id": "deepseek/deepseek-v3.1",
358 "name": "DeepSeek-V3.1",
359 "cost_per_1m_in": 0.5,
360 "cost_per_1m_out": 1.5,
361 "cost_per_1m_in_cached": 0,
362 "cost_per_1m_out_cached": 0,
363 "context_window": 163840,
364 "default_max_tokens": 8000,
365 "can_reason": true,
366 "reasoning_levels": [
367 "low",
368 "medium",
369 "high"
370 ],
371 "default_reasoning_effort": "medium",
372 "supports_attachments": false,
373 "options": {}
374 },
375 {
376 "id": "mistral/devstral-2",
377 "name": "Devstral 2",
378 "cost_per_1m_in": 0.4,
379 "cost_per_1m_out": 2,
380 "cost_per_1m_in_cached": 0,
381 "cost_per_1m_out_cached": 0,
382 "context_window": 256000,
383 "default_max_tokens": 8000,
384 "can_reason": false,
385 "supports_attachments": false,
386 "options": {}
387 },
388 {
389 "id": "mistral/devstral-small",
390 "name": "Devstral Small 1.1",
391 "cost_per_1m_in": 0.1,
392 "cost_per_1m_out": 0.3,
393 "cost_per_1m_in_cached": 0,
394 "cost_per_1m_out_cached": 0,
395 "context_window": 128000,
396 "default_max_tokens": 8000,
397 "can_reason": false,
398 "supports_attachments": false,
399 "options": {}
400 },
401 {
402 "id": "mistral/devstral-small-2",
403 "name": "Devstral Small 2",
404 "cost_per_1m_in": 0.1,
405 "cost_per_1m_out": 0.3,
406 "cost_per_1m_in_cached": 0,
407 "cost_per_1m_out_cached": 0,
408 "context_window": 256000,
409 "default_max_tokens": 8000,
410 "can_reason": false,
411 "supports_attachments": false,
412 "options": {}
413 },
414 {
415 "id": "zai/glm-4.5-air",
416 "name": "GLM 4.5 Air",
417 "cost_per_1m_in": 0.2,
418 "cost_per_1m_out": 1.1,
419 "cost_per_1m_in_cached": 0.03,
420 "cost_per_1m_out_cached": 0,
421 "context_window": 128000,
422 "default_max_tokens": 8000,
423 "can_reason": true,
424 "reasoning_levels": [
425 "low",
426 "medium",
427 "high"
428 ],
429 "default_reasoning_effort": "medium",
430 "supports_attachments": false,
431 "options": {}
432 },
433 {
434 "id": "zai/glm-4.5v",
435 "name": "GLM 4.5V",
436 "cost_per_1m_in": 0.6,
437 "cost_per_1m_out": 1.8,
438 "cost_per_1m_in_cached": 0.11,
439 "cost_per_1m_out_cached": 0,
440 "context_window": 66000,
441 "default_max_tokens": 8000,
442 "can_reason": false,
443 "supports_attachments": true,
444 "options": {}
445 },
446 {
447 "id": "zai/glm-4.6",
448 "name": "GLM 4.6",
449 "cost_per_1m_in": 0.6,
450 "cost_per_1m_out": 2.2,
451 "cost_per_1m_in_cached": 0.11,
452 "cost_per_1m_out_cached": 0,
453 "context_window": 200000,
454 "default_max_tokens": 8000,
455 "can_reason": true,
456 "reasoning_levels": [
457 "low",
458 "medium",
459 "high"
460 ],
461 "default_reasoning_effort": "medium",
462 "supports_attachments": false,
463 "options": {}
464 },
465 {
466 "id": "zai/glm-4.7",
467 "name": "GLM 4.7",
468 "cost_per_1m_in": 0.6,
469 "cost_per_1m_out": 2.2,
470 "cost_per_1m_in_cached": 0,
471 "cost_per_1m_out_cached": 0,
472 "context_window": 200000,
473 "default_max_tokens": 8000,
474 "can_reason": true,
475 "reasoning_levels": [
476 "low",
477 "medium",
478 "high"
479 ],
480 "default_reasoning_effort": "medium",
481 "supports_attachments": false,
482 "options": {}
483 },
484 {
485 "id": "zai/glm-4.7-flash",
486 "name": "GLM 4.7 Flash",
487 "cost_per_1m_in": 0.07,
488 "cost_per_1m_out": 0.4,
489 "cost_per_1m_in_cached": 0,
490 "cost_per_1m_out_cached": 0,
491 "context_window": 200000,
492 "default_max_tokens": 8000,
493 "can_reason": true,
494 "reasoning_levels": [
495 "low",
496 "medium",
497 "high"
498 ],
499 "default_reasoning_effort": "medium",
500 "supports_attachments": false,
501 "options": {}
502 },
503 {
504 "id": "zai/glm-4.7-flashx",
505 "name": "GLM 4.7 FlashX",
506 "cost_per_1m_in": 0.06,
507 "cost_per_1m_out": 0.4,
508 "cost_per_1m_in_cached": 0.01,
509 "cost_per_1m_out_cached": 0,
510 "context_window": 200000,
511 "default_max_tokens": 8000,
512 "can_reason": true,
513 "reasoning_levels": [
514 "low",
515 "medium",
516 "high"
517 ],
518 "default_reasoning_effort": "medium",
519 "supports_attachments": false,
520 "options": {}
521 },
522 {
523 "id": "zai/glm-5",
524 "name": "GLM 5",
525 "cost_per_1m_in": 1,
526 "cost_per_1m_out": 3.2,
527 "cost_per_1m_in_cached": 0.2,
528 "cost_per_1m_out_cached": 0,
529 "context_window": 202800,
530 "default_max_tokens": 8000,
531 "can_reason": true,
532 "reasoning_levels": [
533 "low",
534 "medium",
535 "high"
536 ],
537 "default_reasoning_effort": "medium",
538 "supports_attachments": false,
539 "options": {}
540 },
541 {
542 "id": "zai/glm-5-turbo",
543 "name": "GLM 5 Turbo",
544 "cost_per_1m_in": 1.2,
545 "cost_per_1m_out": 4,
546 "cost_per_1m_in_cached": 0.24,
547 "cost_per_1m_out_cached": 0,
548 "context_window": 202800,
549 "default_max_tokens": 8000,
550 "can_reason": true,
551 "reasoning_levels": [
552 "low",
553 "medium",
554 "high"
555 ],
556 "default_reasoning_effort": "medium",
557 "supports_attachments": false,
558 "options": {}
559 },
560 {
561 "id": "zai/glm-4.5",
562 "name": "GLM-4.5",
563 "cost_per_1m_in": 0.6,
564 "cost_per_1m_out": 2.2,
565 "cost_per_1m_in_cached": 0.11,
566 "cost_per_1m_out_cached": 0,
567 "context_window": 128000,
568 "default_max_tokens": 8000,
569 "can_reason": true,
570 "reasoning_levels": [
571 "low",
572 "medium",
573 "high"
574 ],
575 "default_reasoning_effort": "medium",
576 "supports_attachments": false,
577 "options": {}
578 },
579 {
580 "id": "zai/glm-4.6v",
581 "name": "GLM-4.6V",
582 "cost_per_1m_in": 0.3,
583 "cost_per_1m_out": 0.9,
584 "cost_per_1m_in_cached": 0.05,
585 "cost_per_1m_out_cached": 0,
586 "context_window": 128000,
587 "default_max_tokens": 8000,
588 "can_reason": true,
589 "reasoning_levels": [
590 "low",
591 "medium",
592 "high"
593 ],
594 "default_reasoning_effort": "medium",
595 "supports_attachments": true,
596 "options": {}
597 },
598 {
599 "id": "zai/glm-4.6v-flash",
600 "name": "GLM-4.6V-Flash",
601 "cost_per_1m_in": 0,
602 "cost_per_1m_out": 0,
603 "cost_per_1m_in_cached": 0,
604 "cost_per_1m_out_cached": 0,
605 "context_window": 128000,
606 "default_max_tokens": 8000,
607 "can_reason": true,
608 "reasoning_levels": [
609 "low",
610 "medium",
611 "high"
612 ],
613 "default_reasoning_effort": "medium",
614 "supports_attachments": true,
615 "options": {}
616 },
617 {
618 "id": "openai/gpt-5-chat",
619 "name": "GPT 5 Chat",
620 "cost_per_1m_in": 1.25,
621 "cost_per_1m_out": 10,
622 "cost_per_1m_in_cached": 0.125,
623 "cost_per_1m_out_cached": 0,
624 "context_window": 128000,
625 "default_max_tokens": 8000,
626 "can_reason": true,
627 "reasoning_levels": [
628 "low",
629 "medium",
630 "high"
631 ],
632 "default_reasoning_effort": "medium",
633 "supports_attachments": true,
634 "options": {}
635 },
636 {
637 "id": "openai/gpt-5.1-codex-max",
638 "name": "GPT 5.1 Codex Max",
639 "cost_per_1m_in": 1.25,
640 "cost_per_1m_out": 10,
641 "cost_per_1m_in_cached": 0.125,
642 "cost_per_1m_out_cached": 0,
643 "context_window": 400000,
644 "default_max_tokens": 8000,
645 "can_reason": true,
646 "reasoning_levels": [
647 "low",
648 "medium",
649 "high"
650 ],
651 "default_reasoning_effort": "medium",
652 "supports_attachments": true,
653 "options": {}
654 },
655 {
656 "id": "openai/gpt-5.1-codex-mini",
657 "name": "GPT 5.1 Codex Mini",
658 "cost_per_1m_in": 0.25,
659 "cost_per_1m_out": 2,
660 "cost_per_1m_in_cached": 0.025,
661 "cost_per_1m_out_cached": 0,
662 "context_window": 400000,
663 "default_max_tokens": 8000,
664 "can_reason": true,
665 "reasoning_levels": [
666 "low",
667 "medium",
668 "high"
669 ],
670 "default_reasoning_effort": "medium",
671 "supports_attachments": true,
672 "options": {}
673 },
674 {
675 "id": "openai/gpt-5.1-thinking",
676 "name": "GPT 5.1 Thinking",
677 "cost_per_1m_in": 1.25,
678 "cost_per_1m_out": 10,
679 "cost_per_1m_in_cached": 0.125,
680 "cost_per_1m_out_cached": 0,
681 "context_window": 400000,
682 "default_max_tokens": 8000,
683 "can_reason": true,
684 "reasoning_levels": [
685 "low",
686 "medium",
687 "high"
688 ],
689 "default_reasoning_effort": "medium",
690 "supports_attachments": true,
691 "options": {}
692 },
693 {
694 "id": "openai/gpt-5.2",
695 "name": "GPT 5.2",
696 "cost_per_1m_in": 1.75,
697 "cost_per_1m_out": 14,
698 "cost_per_1m_in_cached": 0.175,
699 "cost_per_1m_out_cached": 0,
700 "context_window": 400000,
701 "default_max_tokens": 8000,
702 "can_reason": true,
703 "reasoning_levels": [
704 "low",
705 "medium",
706 "high"
707 ],
708 "default_reasoning_effort": "medium",
709 "supports_attachments": true,
710 "options": {}
711 },
712 {
713 "id": "openai/gpt-5.2-pro",
714 "name": "GPT 5.2 ",
715 "cost_per_1m_in": 21,
716 "cost_per_1m_out": 168,
717 "cost_per_1m_in_cached": 0,
718 "cost_per_1m_out_cached": 0,
719 "context_window": 400000,
720 "default_max_tokens": 8000,
721 "can_reason": true,
722 "reasoning_levels": [
723 "low",
724 "medium",
725 "high"
726 ],
727 "default_reasoning_effort": "medium",
728 "supports_attachments": true,
729 "options": {}
730 },
731 {
732 "id": "openai/gpt-5.2-chat",
733 "name": "GPT 5.2 Chat",
734 "cost_per_1m_in": 1.75,
735 "cost_per_1m_out": 14,
736 "cost_per_1m_in_cached": 0.175,
737 "cost_per_1m_out_cached": 0,
738 "context_window": 128000,
739 "default_max_tokens": 8000,
740 "can_reason": true,
741 "reasoning_levels": [
742 "low",
743 "medium",
744 "high"
745 ],
746 "default_reasoning_effort": "medium",
747 "supports_attachments": true,
748 "options": {}
749 },
750 {
751 "id": "openai/gpt-5.2-codex",
752 "name": "GPT 5.2 Codex",
753 "cost_per_1m_in": 1.75,
754 "cost_per_1m_out": 14,
755 "cost_per_1m_in_cached": 0.175,
756 "cost_per_1m_out_cached": 0,
757 "context_window": 400000,
758 "default_max_tokens": 8000,
759 "can_reason": true,
760 "reasoning_levels": [
761 "low",
762 "medium",
763 "high"
764 ],
765 "default_reasoning_effort": "medium",
766 "supports_attachments": true,
767 "options": {}
768 },
769 {
770 "id": "openai/gpt-5.3-codex",
771 "name": "GPT 5.3 Codex",
772 "cost_per_1m_in": 1.75,
773 "cost_per_1m_out": 14,
774 "cost_per_1m_in_cached": 0.175,
775 "cost_per_1m_out_cached": 0,
776 "context_window": 400000,
777 "default_max_tokens": 8000,
778 "can_reason": true,
779 "reasoning_levels": [
780 "low",
781 "medium",
782 "high"
783 ],
784 "default_reasoning_effort": "medium",
785 "supports_attachments": true,
786 "options": {}
787 },
788 {
789 "id": "openai/gpt-5.4",
790 "name": "GPT 5.4",
791 "cost_per_1m_in": 2.5,
792 "cost_per_1m_out": 15,
793 "cost_per_1m_in_cached": 0.25,
794 "cost_per_1m_out_cached": 0,
795 "context_window": 1050000,
796 "default_max_tokens": 8000,
797 "can_reason": true,
798 "reasoning_levels": [
799 "low",
800 "medium",
801 "high"
802 ],
803 "default_reasoning_effort": "medium",
804 "supports_attachments": true,
805 "options": {}
806 },
807 {
808 "id": "openai/gpt-5.4-mini",
809 "name": "GPT 5.4 Mini",
810 "cost_per_1m_in": 0.75,
811 "cost_per_1m_out": 4.5,
812 "cost_per_1m_in_cached": 0.075,
813 "cost_per_1m_out_cached": 0,
814 "context_window": 400000,
815 "default_max_tokens": 8000,
816 "can_reason": true,
817 "reasoning_levels": [
818 "low",
819 "medium",
820 "high"
821 ],
822 "default_reasoning_effort": "medium",
823 "supports_attachments": true,
824 "options": {}
825 },
826 {
827 "id": "openai/gpt-5.4-nano",
828 "name": "GPT 5.4 Nano",
829 "cost_per_1m_in": 0.2,
830 "cost_per_1m_out": 1.25,
831 "cost_per_1m_in_cached": 0.02,
832 "cost_per_1m_out_cached": 0,
833 "context_window": 400000,
834 "default_max_tokens": 8000,
835 "can_reason": true,
836 "reasoning_levels": [
837 "low",
838 "medium",
839 "high"
840 ],
841 "default_reasoning_effort": "medium",
842 "supports_attachments": true,
843 "options": {}
844 },
845 {
846 "id": "openai/gpt-5.4-pro",
847 "name": "GPT 5.4 Pro",
848 "cost_per_1m_in": 30,
849 "cost_per_1m_out": 180,
850 "cost_per_1m_in_cached": 0,
851 "cost_per_1m_out_cached": 0,
852 "context_window": 1050000,
853 "default_max_tokens": 8000,
854 "can_reason": true,
855 "reasoning_levels": [
856 "low",
857 "medium",
858 "high"
859 ],
860 "default_reasoning_effort": "medium",
861 "supports_attachments": true,
862 "options": {}
863 },
864 {
865 "id": "openai/gpt-4-turbo",
866 "name": "GPT-4 Turbo",
867 "cost_per_1m_in": 10,
868 "cost_per_1m_out": 30,
869 "cost_per_1m_in_cached": 0,
870 "cost_per_1m_out_cached": 0,
871 "context_window": 128000,
872 "default_max_tokens": 4096,
873 "can_reason": false,
874 "supports_attachments": true,
875 "options": {}
876 },
877 {
878 "id": "openai/gpt-4.1",
879 "name": "GPT-4.1",
880 "cost_per_1m_in": 2,
881 "cost_per_1m_out": 8,
882 "cost_per_1m_in_cached": 0.5,
883 "cost_per_1m_out_cached": 0,
884 "context_window": 1047576,
885 "default_max_tokens": 8000,
886 "can_reason": false,
887 "supports_attachments": true,
888 "options": {}
889 },
890 {
891 "id": "openai/gpt-4.1-mini",
892 "name": "GPT-4.1 mini",
893 "cost_per_1m_in": 0.4,
894 "cost_per_1m_out": 1.6,
895 "cost_per_1m_in_cached": 0.1,
896 "cost_per_1m_out_cached": 0,
897 "context_window": 1047576,
898 "default_max_tokens": 8000,
899 "can_reason": false,
900 "supports_attachments": true,
901 "options": {}
902 },
903 {
904 "id": "openai/gpt-4.1-nano",
905 "name": "GPT-4.1 nano",
906 "cost_per_1m_in": 0.1,
907 "cost_per_1m_out": 0.4,
908 "cost_per_1m_in_cached": 0.025,
909 "cost_per_1m_out_cached": 0,
910 "context_window": 1047576,
911 "default_max_tokens": 8000,
912 "can_reason": false,
913 "supports_attachments": true,
914 "options": {}
915 },
916 {
917 "id": "openai/gpt-4o",
918 "name": "GPT-4o",
919 "cost_per_1m_in": 2.5,
920 "cost_per_1m_out": 10,
921 "cost_per_1m_in_cached": 1.25,
922 "cost_per_1m_out_cached": 0,
923 "context_window": 128000,
924 "default_max_tokens": 8000,
925 "can_reason": false,
926 "supports_attachments": true,
927 "options": {}
928 },
929 {
930 "id": "openai/gpt-4o-mini",
931 "name": "GPT-4o mini",
932 "cost_per_1m_in": 0.15,
933 "cost_per_1m_out": 0.6,
934 "cost_per_1m_in_cached": 0.075,
935 "cost_per_1m_out_cached": 0,
936 "context_window": 128000,
937 "default_max_tokens": 8000,
938 "can_reason": false,
939 "supports_attachments": true,
940 "options": {}
941 },
942 {
943 "id": "openai/gpt-5",
944 "name": "GPT-5",
945 "cost_per_1m_in": 1.25,
946 "cost_per_1m_out": 10,
947 "cost_per_1m_in_cached": 0.125,
948 "cost_per_1m_out_cached": 0,
949 "context_window": 400000,
950 "default_max_tokens": 8000,
951 "can_reason": true,
952 "reasoning_levels": [
953 "low",
954 "medium",
955 "high"
956 ],
957 "default_reasoning_effort": "medium",
958 "supports_attachments": true,
959 "options": {}
960 },
961 {
962 "id": "openai/gpt-5-mini",
963 "name": "GPT-5 mini",
964 "cost_per_1m_in": 0.25,
965 "cost_per_1m_out": 2,
966 "cost_per_1m_in_cached": 0.025,
967 "cost_per_1m_out_cached": 0,
968 "context_window": 400000,
969 "default_max_tokens": 8000,
970 "can_reason": true,
971 "reasoning_levels": [
972 "low",
973 "medium",
974 "high"
975 ],
976 "default_reasoning_effort": "medium",
977 "supports_attachments": true,
978 "options": {}
979 },
980 {
981 "id": "openai/gpt-5-nano",
982 "name": "GPT-5 nano",
983 "cost_per_1m_in": 0.05,
984 "cost_per_1m_out": 0.4,
985 "cost_per_1m_in_cached": 0.005,
986 "cost_per_1m_out_cached": 0,
987 "context_window": 400000,
988 "default_max_tokens": 8000,
989 "can_reason": true,
990 "reasoning_levels": [
991 "low",
992 "medium",
993 "high"
994 ],
995 "default_reasoning_effort": "medium",
996 "supports_attachments": true,
997 "options": {}
998 },
999 {
1000 "id": "openai/gpt-5-pro",
1001 "name": "GPT-5 pro",
1002 "cost_per_1m_in": 15,
1003 "cost_per_1m_out": 120,
1004 "cost_per_1m_in_cached": 0,
1005 "cost_per_1m_out_cached": 0,
1006 "context_window": 400000,
1007 "default_max_tokens": 8000,
1008 "can_reason": true,
1009 "reasoning_levels": [
1010 "low",
1011 "medium",
1012 "high"
1013 ],
1014 "default_reasoning_effort": "medium",
1015 "supports_attachments": true,
1016 "options": {}
1017 },
1018 {
1019 "id": "openai/gpt-5-codex",
1020 "name": "GPT-5-Codex",
1021 "cost_per_1m_in": 1.25,
1022 "cost_per_1m_out": 10,
1023 "cost_per_1m_in_cached": 0.125,
1024 "cost_per_1m_out_cached": 0,
1025 "context_window": 400000,
1026 "default_max_tokens": 8000,
1027 "can_reason": true,
1028 "reasoning_levels": [
1029 "low",
1030 "medium",
1031 "high"
1032 ],
1033 "default_reasoning_effort": "medium",
1034 "supports_attachments": false,
1035 "options": {}
1036 },
1037 {
1038 "id": "openai/gpt-5.1-instant",
1039 "name": "GPT-5.1 Instant",
1040 "cost_per_1m_in": 1.25,
1041 "cost_per_1m_out": 10,
1042 "cost_per_1m_in_cached": 0.125,
1043 "cost_per_1m_out_cached": 0,
1044 "context_window": 128000,
1045 "default_max_tokens": 8000,
1046 "can_reason": true,
1047 "reasoning_levels": [
1048 "low",
1049 "medium",
1050 "high"
1051 ],
1052 "default_reasoning_effort": "medium",
1053 "supports_attachments": true,
1054 "options": {}
1055 },
1056 {
1057 "id": "openai/gpt-5.1-codex",
1058 "name": "GPT-5.1-Codex",
1059 "cost_per_1m_in": 1.25,
1060 "cost_per_1m_out": 10,
1061 "cost_per_1m_in_cached": 0.125,
1062 "cost_per_1m_out_cached": 0,
1063 "context_window": 400000,
1064 "default_max_tokens": 8000,
1065 "can_reason": true,
1066 "reasoning_levels": [
1067 "low",
1068 "medium",
1069 "high"
1070 ],
1071 "default_reasoning_effort": "medium",
1072 "supports_attachments": true,
1073 "options": {}
1074 },
1075 {
1076 "id": "openai/gpt-5.3-chat",
1077 "name": "GPT-5.3 Chat",
1078 "cost_per_1m_in": 1.75,
1079 "cost_per_1m_out": 14,
1080 "cost_per_1m_in_cached": 0.175,
1081 "cost_per_1m_out_cached": 0,
1082 "context_window": 128000,
1083 "default_max_tokens": 8000,
1084 "can_reason": true,
1085 "reasoning_levels": [
1086 "low",
1087 "medium",
1088 "high"
1089 ],
1090 "default_reasoning_effort": "medium",
1091 "supports_attachments": true,
1092 "options": {}
1093 },
1094 {
1095 "id": "google/gemini-2.0-flash",
1096 "name": "Gemini 2.0 Flash",
1097 "cost_per_1m_in": 0.15,
1098 "cost_per_1m_out": 0.6,
1099 "cost_per_1m_in_cached": 0.025,
1100 "cost_per_1m_out_cached": 0,
1101 "context_window": 1048576,
1102 "default_max_tokens": 8000,
1103 "can_reason": false,
1104 "supports_attachments": true,
1105 "options": {}
1106 },
1107 {
1108 "id": "google/gemini-2.0-flash-lite",
1109 "name": "Gemini 2.0 Flash Lite",
1110 "cost_per_1m_in": 0.075,
1111 "cost_per_1m_out": 0.3,
1112 "cost_per_1m_in_cached": 0.02,
1113 "cost_per_1m_out_cached": 0,
1114 "context_window": 1048576,
1115 "default_max_tokens": 8000,
1116 "can_reason": false,
1117 "supports_attachments": true,
1118 "options": {}
1119 },
1120 {
1121 "id": "google/gemini-2.5-flash",
1122 "name": "Gemini 2.5 Flash",
1123 "cost_per_1m_in": 0.3,
1124 "cost_per_1m_out": 2.5,
1125 "cost_per_1m_in_cached": 0.03,
1126 "cost_per_1m_out_cached": 0,
1127 "context_window": 1000000,
1128 "default_max_tokens": 8000,
1129 "can_reason": true,
1130 "reasoning_levels": [
1131 "low",
1132 "medium",
1133 "high"
1134 ],
1135 "default_reasoning_effort": "medium",
1136 "supports_attachments": true,
1137 "options": {}
1138 },
1139 {
1140 "id": "google/gemini-2.5-flash-lite",
1141 "name": "Gemini 2.5 Flash Lite",
1142 "cost_per_1m_in": 0.1,
1143 "cost_per_1m_out": 0.4,
1144 "cost_per_1m_in_cached": 0.01,
1145 "cost_per_1m_out_cached": 0,
1146 "context_window": 1048576,
1147 "default_max_tokens": 8000,
1148 "can_reason": true,
1149 "reasoning_levels": [
1150 "low",
1151 "medium",
1152 "high"
1153 ],
1154 "default_reasoning_effort": "medium",
1155 "supports_attachments": true,
1156 "options": {}
1157 },
1158 {
1159 "id": "google/gemini-2.5-pro",
1160 "name": "Gemini 2.5 Pro",
1161 "cost_per_1m_in": 1.25,
1162 "cost_per_1m_out": 10,
1163 "cost_per_1m_in_cached": 0.125,
1164 "cost_per_1m_out_cached": 0,
1165 "context_window": 1048576,
1166 "default_max_tokens": 8000,
1167 "can_reason": true,
1168 "reasoning_levels": [
1169 "low",
1170 "medium",
1171 "high"
1172 ],
1173 "default_reasoning_effort": "medium",
1174 "supports_attachments": true,
1175 "options": {}
1176 },
1177 {
1178 "id": "google/gemini-3-flash",
1179 "name": "Gemini 3 Flash",
1180 "cost_per_1m_in": 0.5,
1181 "cost_per_1m_out": 3,
1182 "cost_per_1m_in_cached": 0.05,
1183 "cost_per_1m_out_cached": 0,
1184 "context_window": 1000000,
1185 "default_max_tokens": 8000,
1186 "can_reason": true,
1187 "reasoning_levels": [
1188 "low",
1189 "medium",
1190 "high"
1191 ],
1192 "default_reasoning_effort": "medium",
1193 "supports_attachments": true,
1194 "options": {}
1195 },
1196 {
1197 "id": "google/gemini-3-pro-preview",
1198 "name": "Gemini 3 Pro Preview",
1199 "cost_per_1m_in": 2,
1200 "cost_per_1m_out": 12,
1201 "cost_per_1m_in_cached": 0.2,
1202 "cost_per_1m_out_cached": 0,
1203 "context_window": 1000000,
1204 "default_max_tokens": 8000,
1205 "can_reason": true,
1206 "reasoning_levels": [
1207 "low",
1208 "medium",
1209 "high"
1210 ],
1211 "default_reasoning_effort": "medium",
1212 "supports_attachments": true,
1213 "options": {}
1214 },
1215 {
1216 "id": "google/gemini-3.1-flash-lite-preview",
1217 "name": "Gemini 3.1 Flash Lite Preview",
1218 "cost_per_1m_in": 0.25,
1219 "cost_per_1m_out": 1.5,
1220 "cost_per_1m_in_cached": 0,
1221 "cost_per_1m_out_cached": 0,
1222 "context_window": 1000000,
1223 "default_max_tokens": 8000,
1224 "can_reason": true,
1225 "reasoning_levels": [
1226 "low",
1227 "medium",
1228 "high"
1229 ],
1230 "default_reasoning_effort": "medium",
1231 "supports_attachments": true,
1232 "options": {}
1233 },
1234 {
1235 "id": "google/gemini-3.1-pro-preview",
1236 "name": "Gemini 3.1 Pro Preview",
1237 "cost_per_1m_in": 2,
1238 "cost_per_1m_out": 12,
1239 "cost_per_1m_in_cached": 0.2,
1240 "cost_per_1m_out_cached": 0,
1241 "context_window": 1000000,
1242 "default_max_tokens": 8000,
1243 "can_reason": true,
1244 "reasoning_levels": [
1245 "low",
1246 "medium",
1247 "high"
1248 ],
1249 "default_reasoning_effort": "medium",
1250 "supports_attachments": true,
1251 "options": {}
1252 },
1253 {
1254 "id": "xai/grok-2-vision",
1255 "name": "Grok 2 Vision",
1256 "cost_per_1m_in": 2,
1257 "cost_per_1m_out": 10,
1258 "cost_per_1m_in_cached": 0,
1259 "cost_per_1m_out_cached": 0,
1260 "context_window": 32768,
1261 "default_max_tokens": 8000,
1262 "can_reason": false,
1263 "supports_attachments": true,
1264 "options": {}
1265 },
1266 {
1267 "id": "xai/grok-3",
1268 "name": "Grok 3 Beta",
1269 "cost_per_1m_in": 3,
1270 "cost_per_1m_out": 15,
1271 "cost_per_1m_in_cached": 0.75,
1272 "cost_per_1m_out_cached": 0,
1273 "context_window": 131072,
1274 "default_max_tokens": 8000,
1275 "can_reason": false,
1276 "supports_attachments": false,
1277 "options": {}
1278 },
1279 {
1280 "id": "xai/grok-3-fast",
1281 "name": "Grok 3 Fast Beta",
1282 "cost_per_1m_in": 5,
1283 "cost_per_1m_out": 25,
1284 "cost_per_1m_in_cached": 1.25,
1285 "cost_per_1m_out_cached": 0,
1286 "context_window": 131072,
1287 "default_max_tokens": 8000,
1288 "can_reason": false,
1289 "supports_attachments": false,
1290 "options": {}
1291 },
1292 {
1293 "id": "xai/grok-3-mini",
1294 "name": "Grok 3 Mini Beta",
1295 "cost_per_1m_in": 0.3,
1296 "cost_per_1m_out": 0.5,
1297 "cost_per_1m_in_cached": 0.075,
1298 "cost_per_1m_out_cached": 0,
1299 "context_window": 131072,
1300 "default_max_tokens": 8000,
1301 "can_reason": false,
1302 "supports_attachments": false,
1303 "options": {}
1304 },
1305 {
1306 "id": "xai/grok-3-mini-fast",
1307 "name": "Grok 3 Mini Fast Beta",
1308 "cost_per_1m_in": 0.6,
1309 "cost_per_1m_out": 4,
1310 "cost_per_1m_in_cached": 0,
1311 "cost_per_1m_out_cached": 0,
1312 "context_window": 131072,
1313 "default_max_tokens": 8000,
1314 "can_reason": false,
1315 "supports_attachments": false,
1316 "options": {}
1317 },
1318 {
1319 "id": "xai/grok-4",
1320 "name": "Grok 4",
1321 "cost_per_1m_in": 3,
1322 "cost_per_1m_out": 15,
1323 "cost_per_1m_in_cached": 0.75,
1324 "cost_per_1m_out_cached": 0,
1325 "context_window": 256000,
1326 "default_max_tokens": 8000,
1327 "can_reason": true,
1328 "reasoning_levels": [
1329 "low",
1330 "medium",
1331 "high"
1332 ],
1333 "default_reasoning_effort": "medium",
1334 "supports_attachments": true,
1335 "options": {}
1336 },
1337 {
1338 "id": "xai/grok-4-fast-non-reasoning",
1339 "name": "Grok 4 Fast Non-Reasoning",
1340 "cost_per_1m_in": 0.2,
1341 "cost_per_1m_out": 0.5,
1342 "cost_per_1m_in_cached": 0.05,
1343 "cost_per_1m_out_cached": 0,
1344 "context_window": 2000000,
1345 "default_max_tokens": 8000,
1346 "can_reason": false,
1347 "supports_attachments": false,
1348 "options": {}
1349 },
1350 {
1351 "id": "xai/grok-4-fast-reasoning",
1352 "name": "Grok 4 Fast Reasoning",
1353 "cost_per_1m_in": 0.2,
1354 "cost_per_1m_out": 0.5,
1355 "cost_per_1m_in_cached": 0.05,
1356 "cost_per_1m_out_cached": 0,
1357 "context_window": 2000000,
1358 "default_max_tokens": 8000,
1359 "can_reason": true,
1360 "reasoning_levels": [
1361 "low",
1362 "medium",
1363 "high"
1364 ],
1365 "default_reasoning_effort": "medium",
1366 "supports_attachments": false,
1367 "options": {}
1368 },
1369 {
1370 "id": "xai/grok-4.1-fast-non-reasoning",
1371 "name": "Grok 4.1 Fast Non-Reasoning",
1372 "cost_per_1m_in": 0.2,
1373 "cost_per_1m_out": 0.5,
1374 "cost_per_1m_in_cached": 0.05,
1375 "cost_per_1m_out_cached": 0,
1376 "context_window": 2000000,
1377 "default_max_tokens": 8000,
1378 "can_reason": false,
1379 "supports_attachments": false,
1380 "options": {}
1381 },
1382 {
1383 "id": "xai/grok-4.1-fast-reasoning",
1384 "name": "Grok 4.1 Fast Reasoning",
1385 "cost_per_1m_in": 0.2,
1386 "cost_per_1m_out": 0.5,
1387 "cost_per_1m_in_cached": 0.05,
1388 "cost_per_1m_out_cached": 0,
1389 "context_window": 2000000,
1390 "default_max_tokens": 8000,
1391 "can_reason": true,
1392 "reasoning_levels": [
1393 "low",
1394 "medium",
1395 "high"
1396 ],
1397 "default_reasoning_effort": "medium",
1398 "supports_attachments": false,
1399 "options": {}
1400 },
1401 {
1402 "id": "xai/grok-4.20-non-reasoning-beta",
1403 "name": "Grok 4.20 Beta Non-Reasoning",
1404 "cost_per_1m_in": 2,
1405 "cost_per_1m_out": 6,
1406 "cost_per_1m_in_cached": 0.2,
1407 "cost_per_1m_out_cached": 0,
1408 "context_window": 2000000,
1409 "default_max_tokens": 8000,
1410 "can_reason": false,
1411 "supports_attachments": true,
1412 "options": {}
1413 },
1414 {
1415 "id": "xai/grok-4.20-reasoning-beta",
1416 "name": "Grok 4.20 Beta Reasoning",
1417 "cost_per_1m_in": 2,
1418 "cost_per_1m_out": 6,
1419 "cost_per_1m_in_cached": 0.2,
1420 "cost_per_1m_out_cached": 0,
1421 "context_window": 2000000,
1422 "default_max_tokens": 8000,
1423 "can_reason": true,
1424 "reasoning_levels": [
1425 "low",
1426 "medium",
1427 "high"
1428 ],
1429 "default_reasoning_effort": "medium",
1430 "supports_attachments": true,
1431 "options": {}
1432 },
1433 {
1434 "id": "xai/grok-4.20-multi-agent-beta",
1435 "name": "Grok 4.20 Multi Agent Beta",
1436 "cost_per_1m_in": 2,
1437 "cost_per_1m_out": 6,
1438 "cost_per_1m_in_cached": 0.2,
1439 "cost_per_1m_out_cached": 0,
1440 "context_window": 2000000,
1441 "default_max_tokens": 8000,
1442 "can_reason": true,
1443 "reasoning_levels": [
1444 "low",
1445 "medium",
1446 "high"
1447 ],
1448 "default_reasoning_effort": "medium",
1449 "supports_attachments": false,
1450 "options": {}
1451 },
1452 {
1453 "id": "xai/grok-4.20-multi-agent",
1454 "name": "Grok 4.20 Multi-Agent",
1455 "cost_per_1m_in": 2,
1456 "cost_per_1m_out": 6,
1457 "cost_per_1m_in_cached": 0.2,
1458 "cost_per_1m_out_cached": 0,
1459 "context_window": 2000000,
1460 "default_max_tokens": 8000,
1461 "can_reason": true,
1462 "reasoning_levels": [
1463 "low",
1464 "medium",
1465 "high"
1466 ],
1467 "default_reasoning_effort": "medium",
1468 "supports_attachments": false,
1469 "options": {}
1470 },
1471 {
1472 "id": "xai/grok-4.20-non-reasoning",
1473 "name": "Grok 4.20 Non-Reasoning",
1474 "cost_per_1m_in": 2,
1475 "cost_per_1m_out": 6,
1476 "cost_per_1m_in_cached": 0.2,
1477 "cost_per_1m_out_cached": 0,
1478 "context_window": 2000000,
1479 "default_max_tokens": 8000,
1480 "can_reason": false,
1481 "supports_attachments": true,
1482 "options": {}
1483 },
1484 {
1485 "id": "xai/grok-4.20-reasoning",
1486 "name": "Grok 4.20 Reasoning",
1487 "cost_per_1m_in": 2,
1488 "cost_per_1m_out": 6,
1489 "cost_per_1m_in_cached": 0.2,
1490 "cost_per_1m_out_cached": 0,
1491 "context_window": 2000000,
1492 "default_max_tokens": 8000,
1493 "can_reason": true,
1494 "reasoning_levels": [
1495 "low",
1496 "medium",
1497 "high"
1498 ],
1499 "default_reasoning_effort": "medium",
1500 "supports_attachments": true,
1501 "options": {}
1502 },
1503 {
1504 "id": "xai/grok-code-fast-1",
1505 "name": "Grok Code Fast 1",
1506 "cost_per_1m_in": 0.2,
1507 "cost_per_1m_out": 1.5,
1508 "cost_per_1m_in_cached": 0.02,
1509 "cost_per_1m_out_cached": 0,
1510 "context_window": 256000,
1511 "default_max_tokens": 8000,
1512 "can_reason": true,
1513 "reasoning_levels": [
1514 "low",
1515 "medium",
1516 "high"
1517 ],
1518 "default_reasoning_effort": "medium",
1519 "supports_attachments": false,
1520 "options": {}
1521 },
1522 {
1523 "id": "prime-intellect/intellect-3",
1524 "name": "INTELLECT 3",
1525 "cost_per_1m_in": 0.2,
1526 "cost_per_1m_out": 1.1,
1527 "cost_per_1m_in_cached": 0,
1528 "cost_per_1m_out_cached": 0,
1529 "context_window": 131072,
1530 "default_max_tokens": 8000,
1531 "can_reason": true,
1532 "reasoning_levels": [
1533 "low",
1534 "medium",
1535 "high"
1536 ],
1537 "default_reasoning_effort": "medium",
1538 "supports_attachments": false,
1539 "options": {}
1540 },
1541 {
1542 "id": "moonshotai/kimi-k2",
1543 "name": "Kimi K2",
1544 "cost_per_1m_in": 0.6,
1545 "cost_per_1m_out": 2.5,
1546 "cost_per_1m_in_cached": 0.15,
1547 "cost_per_1m_out_cached": 0,
1548 "context_window": 131072,
1549 "default_max_tokens": 8000,
1550 "can_reason": false,
1551 "supports_attachments": false,
1552 "options": {}
1553 },
1554 {
1555 "id": "moonshotai/kimi-k2-0905",
1556 "name": "Kimi K2 0905",
1557 "cost_per_1m_in": 0.6,
1558 "cost_per_1m_out": 2.5,
1559 "cost_per_1m_in_cached": 0.15,
1560 "cost_per_1m_out_cached": 0,
1561 "context_window": 256000,
1562 "default_max_tokens": 8000,
1563 "can_reason": false,
1564 "supports_attachments": false,
1565 "options": {}
1566 },
1567 {
1568 "id": "moonshotai/kimi-k2-thinking",
1569 "name": "Kimi K2 Thinking",
1570 "cost_per_1m_in": 0.6,
1571 "cost_per_1m_out": 2.5,
1572 "cost_per_1m_in_cached": 0.15,
1573 "cost_per_1m_out_cached": 0,
1574 "context_window": 262114,
1575 "default_max_tokens": 8000,
1576 "can_reason": true,
1577 "reasoning_levels": [
1578 "low",
1579 "medium",
1580 "high"
1581 ],
1582 "default_reasoning_effort": "medium",
1583 "supports_attachments": false,
1584 "options": {}
1585 },
1586 {
1587 "id": "moonshotai/kimi-k2-thinking-turbo",
1588 "name": "Kimi K2 Thinking Turbo",
1589 "cost_per_1m_in": 1.15,
1590 "cost_per_1m_out": 8,
1591 "cost_per_1m_in_cached": 0.15,
1592 "cost_per_1m_out_cached": 0,
1593 "context_window": 262114,
1594 "default_max_tokens": 8000,
1595 "can_reason": true,
1596 "reasoning_levels": [
1597 "low",
1598 "medium",
1599 "high"
1600 ],
1601 "default_reasoning_effort": "medium",
1602 "supports_attachments": false,
1603 "options": {}
1604 },
1605 {
1606 "id": "moonshotai/kimi-k2-turbo",
1607 "name": "Kimi K2 Turbo",
1608 "cost_per_1m_in": 1.15,
1609 "cost_per_1m_out": 8,
1610 "cost_per_1m_in_cached": 0.15,
1611 "cost_per_1m_out_cached": 0,
1612 "context_window": 256000,
1613 "default_max_tokens": 8000,
1614 "can_reason": false,
1615 "supports_attachments": false,
1616 "options": {}
1617 },
1618 {
1619 "id": "moonshotai/kimi-k2.5",
1620 "name": "Kimi K2.5",
1621 "cost_per_1m_in": 0.6,
1622 "cost_per_1m_out": 3,
1623 "cost_per_1m_in_cached": 0.1,
1624 "cost_per_1m_out_cached": 0,
1625 "context_window": 262114,
1626 "default_max_tokens": 8000,
1627 "can_reason": true,
1628 "reasoning_levels": [
1629 "low",
1630 "medium",
1631 "high"
1632 ],
1633 "default_reasoning_effort": "medium",
1634 "supports_attachments": true,
1635 "options": {}
1636 },
1637 {
1638 "id": "meta/llama-3.1-70b",
1639 "name": "Llama 3.1 70B Instruct",
1640 "cost_per_1m_in": 0.72,
1641 "cost_per_1m_out": 0.72,
1642 "cost_per_1m_in_cached": 0,
1643 "cost_per_1m_out_cached": 0,
1644 "context_window": 128000,
1645 "default_max_tokens": 8000,
1646 "can_reason": false,
1647 "supports_attachments": false,
1648 "options": {}
1649 },
1650 {
1651 "id": "meta/llama-3.1-8b",
1652 "name": "Llama 3.1 8B Instruct",
1653 "cost_per_1m_in": 0.1,
1654 "cost_per_1m_out": 0.1,
1655 "cost_per_1m_in_cached": 0.1,
1656 "cost_per_1m_out_cached": 0,
1657 "context_window": 128000,
1658 "default_max_tokens": 8000,
1659 "can_reason": false,
1660 "supports_attachments": false,
1661 "options": {}
1662 },
1663 {
1664 "id": "meta/llama-3.2-11b",
1665 "name": "Llama 3.2 11B Vision Instruct",
1666 "cost_per_1m_in": 0.16,
1667 "cost_per_1m_out": 0.16,
1668 "cost_per_1m_in_cached": 0,
1669 "cost_per_1m_out_cached": 0,
1670 "context_window": 128000,
1671 "default_max_tokens": 8000,
1672 "can_reason": false,
1673 "supports_attachments": true,
1674 "options": {}
1675 },
1676 {
1677 "id": "meta/llama-3.2-90b",
1678 "name": "Llama 3.2 90B Vision Instruct",
1679 "cost_per_1m_in": 0.72,
1680 "cost_per_1m_out": 0.72,
1681 "cost_per_1m_in_cached": 0,
1682 "cost_per_1m_out_cached": 0,
1683 "context_window": 128000,
1684 "default_max_tokens": 8000,
1685 "can_reason": false,
1686 "supports_attachments": true,
1687 "options": {}
1688 },
1689 {
1690 "id": "meta/llama-3.3-70b",
1691 "name": "Llama 3.3 70B Instruct",
1692 "cost_per_1m_in": 0.72,
1693 "cost_per_1m_out": 0.72,
1694 "cost_per_1m_in_cached": 0,
1695 "cost_per_1m_out_cached": 0,
1696 "context_window": 128000,
1697 "default_max_tokens": 8000,
1698 "can_reason": false,
1699 "supports_attachments": false,
1700 "options": {}
1701 },
1702 {
1703 "id": "meta/llama-4-maverick",
1704 "name": "Llama 4 Maverick 17B Instruct",
1705 "cost_per_1m_in": 0.24,
1706 "cost_per_1m_out": 0.97,
1707 "cost_per_1m_in_cached": 0,
1708 "cost_per_1m_out_cached": 0,
1709 "context_window": 128000,
1710 "default_max_tokens": 8000,
1711 "can_reason": false,
1712 "supports_attachments": true,
1713 "options": {}
1714 },
1715 {
1716 "id": "meta/llama-4-scout",
1717 "name": "Llama 4 Scout 17B Instruct",
1718 "cost_per_1m_in": 0.17,
1719 "cost_per_1m_out": 0.66,
1720 "cost_per_1m_in_cached": 0,
1721 "cost_per_1m_out_cached": 0,
1722 "context_window": 128000,
1723 "default_max_tokens": 8000,
1724 "can_reason": false,
1725 "supports_attachments": true,
1726 "options": {}
1727 },
1728 {
1729 "id": "meituan/longcat-flash-chat",
1730 "name": "LongCat Flash Chat",
1731 "cost_per_1m_in": 0,
1732 "cost_per_1m_out": 0,
1733 "cost_per_1m_in_cached": 0,
1734 "cost_per_1m_out_cached": 0,
1735 "context_window": 128000,
1736 "default_max_tokens": 8000,
1737 "can_reason": false,
1738 "supports_attachments": false,
1739 "options": {}
1740 },
1741 {
1742 "id": "meituan/longcat-flash-thinking",
1743 "name": "LongCat Flash Thinking",
1744 "cost_per_1m_in": 0.15,
1745 "cost_per_1m_out": 1.5,
1746 "cost_per_1m_in_cached": 0,
1747 "cost_per_1m_out_cached": 0,
1748 "context_window": 128000,
1749 "default_max_tokens": 8000,
1750 "can_reason": true,
1751 "reasoning_levels": [
1752 "low",
1753 "medium",
1754 "high"
1755 ],
1756 "default_reasoning_effort": "medium",
1757 "supports_attachments": false,
1758 "options": {}
1759 },
1760 {
1761 "id": "inception/mercury-2",
1762 "name": "Mercury 2",
1763 "cost_per_1m_in": 0.25,
1764 "cost_per_1m_out": 0.75,
1765 "cost_per_1m_in_cached": 0.025,
1766 "cost_per_1m_out_cached": 0,
1767 "context_window": 128000,
1768 "default_max_tokens": 8000,
1769 "can_reason": true,
1770 "reasoning_levels": [
1771 "low",
1772 "medium",
1773 "high"
1774 ],
1775 "default_reasoning_effort": "medium",
1776 "supports_attachments": false,
1777 "options": {}
1778 },
1779 {
1780 "id": "inception/mercury-coder-small",
1781 "name": "Mercury Coder Small Beta",
1782 "cost_per_1m_in": 0.25,
1783 "cost_per_1m_out": 1,
1784 "cost_per_1m_in_cached": 0,
1785 "cost_per_1m_out_cached": 0,
1786 "context_window": 32000,
1787 "default_max_tokens": 8000,
1788 "can_reason": false,
1789 "supports_attachments": false,
1790 "options": {}
1791 },
1792 {
1793 "id": "xiaomi/mimo-v2-flash",
1794 "name": "MiMo V2 Flash",
1795 "cost_per_1m_in": 0.1,
1796 "cost_per_1m_out": 0.3,
1797 "cost_per_1m_in_cached": 0.02,
1798 "cost_per_1m_out_cached": 0,
1799 "context_window": 262144,
1800 "default_max_tokens": 8000,
1801 "can_reason": true,
1802 "reasoning_levels": [
1803 "low",
1804 "medium",
1805 "high"
1806 ],
1807 "default_reasoning_effort": "medium",
1808 "supports_attachments": false,
1809 "options": {}
1810 },
1811 {
1812 "id": "xiaomi/mimo-v2-pro",
1813 "name": "MiMo V2 Pro",
1814 "cost_per_1m_in": 1,
1815 "cost_per_1m_out": 3,
1816 "cost_per_1m_in_cached": 0.2,
1817 "cost_per_1m_out_cached": 0,
1818 "context_window": 1000000,
1819 "default_max_tokens": 8000,
1820 "can_reason": true,
1821 "reasoning_levels": [
1822 "low",
1823 "medium",
1824 "high"
1825 ],
1826 "default_reasoning_effort": "medium",
1827 "supports_attachments": false,
1828 "options": {}
1829 },
1830 {
1831 "id": "minimax/minimax-m2",
1832 "name": "MiniMax M2",
1833 "cost_per_1m_in": 0.3,
1834 "cost_per_1m_out": 1.2,
1835 "cost_per_1m_in_cached": 0.03,
1836 "cost_per_1m_out_cached": 0.375,
1837 "context_window": 205000,
1838 "default_max_tokens": 8000,
1839 "can_reason": true,
1840 "reasoning_levels": [
1841 "low",
1842 "medium",
1843 "high"
1844 ],
1845 "default_reasoning_effort": "medium",
1846 "supports_attachments": false,
1847 "options": {}
1848 },
1849 {
1850 "id": "minimax/minimax-m2.1",
1851 "name": "MiniMax M2.1",
1852 "cost_per_1m_in": 0.3,
1853 "cost_per_1m_out": 1.2,
1854 "cost_per_1m_in_cached": 0.03,
1855 "cost_per_1m_out_cached": 0.375,
1856 "context_window": 204800,
1857 "default_max_tokens": 8000,
1858 "can_reason": true,
1859 "reasoning_levels": [
1860 "low",
1861 "medium",
1862 "high"
1863 ],
1864 "default_reasoning_effort": "medium",
1865 "supports_attachments": false,
1866 "options": {}
1867 },
1868 {
1869 "id": "minimax/minimax-m2.1-lightning",
1870 "name": "MiniMax M2.1 Lightning",
1871 "cost_per_1m_in": 0.3,
1872 "cost_per_1m_out": 2.4,
1873 "cost_per_1m_in_cached": 0.03,
1874 "cost_per_1m_out_cached": 0.375,
1875 "context_window": 204800,
1876 "default_max_tokens": 8000,
1877 "can_reason": true,
1878 "reasoning_levels": [
1879 "low",
1880 "medium",
1881 "high"
1882 ],
1883 "default_reasoning_effort": "medium",
1884 "supports_attachments": false,
1885 "options": {}
1886 },
1887 {
1888 "id": "minimax/minimax-m2.5",
1889 "name": "MiniMax M2.5",
1890 "cost_per_1m_in": 0.3,
1891 "cost_per_1m_out": 1.2,
1892 "cost_per_1m_in_cached": 0.03,
1893 "cost_per_1m_out_cached": 0.375,
1894 "context_window": 204800,
1895 "default_max_tokens": 8000,
1896 "can_reason": true,
1897 "reasoning_levels": [
1898 "low",
1899 "medium",
1900 "high"
1901 ],
1902 "default_reasoning_effort": "medium",
1903 "supports_attachments": false,
1904 "options": {}
1905 },
1906 {
1907 "id": "minimax/minimax-m2.5-highspeed",
1908 "name": "MiniMax M2.5 High Speed",
1909 "cost_per_1m_in": 0.6,
1910 "cost_per_1m_out": 2.4,
1911 "cost_per_1m_in_cached": 0.03,
1912 "cost_per_1m_out_cached": 0.375,
1913 "context_window": 204800,
1914 "default_max_tokens": 8000,
1915 "can_reason": true,
1916 "reasoning_levels": [
1917 "low",
1918 "medium",
1919 "high"
1920 ],
1921 "default_reasoning_effort": "medium",
1922 "supports_attachments": false,
1923 "options": {}
1924 },
1925 {
1926 "id": "minimax/minimax-m2.7-highspeed",
1927 "name": "MiniMax M2.7 High Speed",
1928 "cost_per_1m_in": 0.6,
1929 "cost_per_1m_out": 2.4,
1930 "cost_per_1m_in_cached": 0.06,
1931 "cost_per_1m_out_cached": 0.375,
1932 "context_window": 204800,
1933 "default_max_tokens": 8000,
1934 "can_reason": true,
1935 "reasoning_levels": [
1936 "low",
1937 "medium",
1938 "high"
1939 ],
1940 "default_reasoning_effort": "medium",
1941 "supports_attachments": true,
1942 "options": {}
1943 },
1944 {
1945 "id": "minimax/minimax-m2.7",
1946 "name": "Minimax M2.7",
1947 "cost_per_1m_in": 0.3,
1948 "cost_per_1m_out": 1.2,
1949 "cost_per_1m_in_cached": 0.06,
1950 "cost_per_1m_out_cached": 0.375,
1951 "context_window": 204800,
1952 "default_max_tokens": 8000,
1953 "can_reason": true,
1954 "reasoning_levels": [
1955 "low",
1956 "medium",
1957 "high"
1958 ],
1959 "default_reasoning_effort": "medium",
1960 "supports_attachments": true,
1961 "options": {}
1962 },
1963 {
1964 "id": "mistral/ministral-3b",
1965 "name": "Ministral 3B",
1966 "cost_per_1m_in": 0.1,
1967 "cost_per_1m_out": 0.1,
1968 "cost_per_1m_in_cached": 0,
1969 "cost_per_1m_out_cached": 0,
1970 "context_window": 128000,
1971 "default_max_tokens": 4000,
1972 "can_reason": false,
1973 "supports_attachments": false,
1974 "options": {}
1975 },
1976 {
1977 "id": "mistral/ministral-8b",
1978 "name": "Ministral 8B",
1979 "cost_per_1m_in": 0.15,
1980 "cost_per_1m_out": 0.15,
1981 "cost_per_1m_in_cached": 0,
1982 "cost_per_1m_out_cached": 0,
1983 "context_window": 128000,
1984 "default_max_tokens": 4000,
1985 "can_reason": false,
1986 "supports_attachments": false,
1987 "options": {}
1988 },
1989 {
1990 "id": "mistral/codestral",
1991 "name": "Mistral Codestral",
1992 "cost_per_1m_in": 0.3,
1993 "cost_per_1m_out": 0.9,
1994 "cost_per_1m_in_cached": 0,
1995 "cost_per_1m_out_cached": 0,
1996 "context_window": 128000,
1997 "default_max_tokens": 4000,
1998 "can_reason": false,
1999 "supports_attachments": false,
2000 "options": {}
2001 },
2002 {
2003 "id": "mistral/mistral-medium",
2004 "name": "Mistral Medium 3.1",
2005 "cost_per_1m_in": 0.4,
2006 "cost_per_1m_out": 2,
2007 "cost_per_1m_in_cached": 0,
2008 "cost_per_1m_out_cached": 0,
2009 "context_window": 128000,
2010 "default_max_tokens": 8000,
2011 "can_reason": false,
2012 "supports_attachments": true,
2013 "options": {}
2014 },
2015 {
2016 "id": "mistral/mistral-small",
2017 "name": "Mistral Small",
2018 "cost_per_1m_in": 0.1,
2019 "cost_per_1m_out": 0.3,
2020 "cost_per_1m_in_cached": 0,
2021 "cost_per_1m_out_cached": 0,
2022 "context_window": 32000,
2023 "default_max_tokens": 4000,
2024 "can_reason": false,
2025 "supports_attachments": true,
2026 "options": {}
2027 },
2028 {
2029 "id": "nvidia/nemotron-nano-12b-v2-vl",
2030 "name": "Nvidia Nemotron Nano 12B V2 VL",
2031 "cost_per_1m_in": 0.2,
2032 "cost_per_1m_out": 0.6,
2033 "cost_per_1m_in_cached": 0,
2034 "cost_per_1m_out_cached": 0,
2035 "context_window": 131072,
2036 "default_max_tokens": 8000,
2037 "can_reason": true,
2038 "reasoning_levels": [
2039 "low",
2040 "medium",
2041 "high"
2042 ],
2043 "default_reasoning_effort": "medium",
2044 "supports_attachments": true,
2045 "options": {}
2046 },
2047 {
2048 "id": "nvidia/nemotron-nano-9b-v2",
2049 "name": "Nvidia Nemotron Nano 9B V2",
2050 "cost_per_1m_in": 0.06,
2051 "cost_per_1m_out": 0.23,
2052 "cost_per_1m_in_cached": 0,
2053 "cost_per_1m_out_cached": 0,
2054 "context_window": 131072,
2055 "default_max_tokens": 8000,
2056 "can_reason": true,
2057 "reasoning_levels": [
2058 "low",
2059 "medium",
2060 "high"
2061 ],
2062 "default_reasoning_effort": "medium",
2063 "supports_attachments": false,
2064 "options": {}
2065 },
2066 {
2067 "id": "mistral/pixtral-12b",
2068 "name": "Pixtral 12B 2409",
2069 "cost_per_1m_in": 0.15,
2070 "cost_per_1m_out": 0.15,
2071 "cost_per_1m_in_cached": 0,
2072 "cost_per_1m_out_cached": 0,
2073 "context_window": 128000,
2074 "default_max_tokens": 4000,
2075 "can_reason": false,
2076 "supports_attachments": true,
2077 "options": {}
2078 },
2079 {
2080 "id": "mistral/pixtral-large",
2081 "name": "Pixtral Large",
2082 "cost_per_1m_in": 2,
2083 "cost_per_1m_out": 6,
2084 "cost_per_1m_in_cached": 0,
2085 "cost_per_1m_out_cached": 0,
2086 "context_window": 128000,
2087 "default_max_tokens": 4000,
2088 "can_reason": false,
2089 "supports_attachments": true,
2090 "options": {}
2091 },
2092 {
2093 "id": "alibaba/qwen-3-32b",
2094 "name": "Qwen 3 32B",
2095 "cost_per_1m_in": 0.29,
2096 "cost_per_1m_out": 0.59,
2097 "cost_per_1m_in_cached": 0.145,
2098 "cost_per_1m_out_cached": 0,
2099 "context_window": 131072,
2100 "default_max_tokens": 8000,
2101 "can_reason": true,
2102 "reasoning_levels": [
2103 "low",
2104 "medium",
2105 "high"
2106 ],
2107 "default_reasoning_effort": "medium",
2108 "supports_attachments": false,
2109 "options": {}
2110 },
2111 {
2112 "id": "alibaba/qwen3-coder-30b-a3b",
2113 "name": "Qwen 3 Coder 30B A3B Instruct",
2114 "cost_per_1m_in": 0.15,
2115 "cost_per_1m_out": 0.6,
2116 "cost_per_1m_in_cached": 0,
2117 "cost_per_1m_out_cached": 0,
2118 "context_window": 262144,
2119 "default_max_tokens": 8000,
2120 "can_reason": true,
2121 "reasoning_levels": [
2122 "low",
2123 "medium",
2124 "high"
2125 ],
2126 "default_reasoning_effort": "medium",
2127 "supports_attachments": false,
2128 "options": {}
2129 },
2130 {
2131 "id": "alibaba/qwen3-max-thinking",
2132 "name": "Qwen 3 Max Thinking",
2133 "cost_per_1m_in": 1.2,
2134 "cost_per_1m_out": 6,
2135 "cost_per_1m_in_cached": 0.24,
2136 "cost_per_1m_out_cached": 0,
2137 "context_window": 256000,
2138 "default_max_tokens": 8000,
2139 "can_reason": true,
2140 "reasoning_levels": [
2141 "low",
2142 "medium",
2143 "high"
2144 ],
2145 "default_reasoning_effort": "medium",
2146 "supports_attachments": false,
2147 "options": {}
2148 },
2149 {
2150 "id": "alibaba/qwen3.5-flash",
2151 "name": "Qwen 3.5 Flash",
2152 "cost_per_1m_in": 0.1,
2153 "cost_per_1m_out": 0.4,
2154 "cost_per_1m_in_cached": 0.001,
2155 "cost_per_1m_out_cached": 0.125,
2156 "context_window": 1000000,
2157 "default_max_tokens": 8000,
2158 "can_reason": true,
2159 "reasoning_levels": [
2160 "low",
2161 "medium",
2162 "high"
2163 ],
2164 "default_reasoning_effort": "medium",
2165 "supports_attachments": true,
2166 "options": {}
2167 },
2168 {
2169 "id": "alibaba/qwen3.5-plus",
2170 "name": "Qwen 3.5 Plus",
2171 "cost_per_1m_in": 0.4,
2172 "cost_per_1m_out": 2.4,
2173 "cost_per_1m_in_cached": 0.04,
2174 "cost_per_1m_out_cached": 0.5,
2175 "context_window": 1000000,
2176 "default_max_tokens": 8000,
2177 "can_reason": true,
2178 "reasoning_levels": [
2179 "low",
2180 "medium",
2181 "high"
2182 ],
2183 "default_reasoning_effort": "medium",
2184 "supports_attachments": true,
2185 "options": {}
2186 },
2187 {
2188 "id": "alibaba/qwen3-235b-a22b-thinking",
2189 "name": "Qwen3 235B A22B Thinking 2507",
2190 "cost_per_1m_in": 0.23,
2191 "cost_per_1m_out": 2.3,
2192 "cost_per_1m_in_cached": 0.2,
2193 "cost_per_1m_out_cached": 0,
2194 "context_window": 262114,
2195 "default_max_tokens": 8000,
2196 "can_reason": true,
2197 "reasoning_levels": [
2198 "low",
2199 "medium",
2200 "high"
2201 ],
2202 "default_reasoning_effort": "medium",
2203 "supports_attachments": true,
2204 "options": {}
2205 },
2206 {
2207 "id": "alibaba/qwen3-coder",
2208 "name": "Qwen3 Coder 480B A35B Instruct",
2209 "cost_per_1m_in": 0.4,
2210 "cost_per_1m_out": 1.6,
2211 "cost_per_1m_in_cached": 0.022,
2212 "cost_per_1m_out_cached": 0,
2213 "context_window": 262144,
2214 "default_max_tokens": 8000,
2215 "can_reason": false,
2216 "supports_attachments": false,
2217 "options": {}
2218 },
2219 {
2220 "id": "alibaba/qwen3-coder-next",
2221 "name": "Qwen3 Coder Next",
2222 "cost_per_1m_in": 0.5,
2223 "cost_per_1m_out": 1.2,
2224 "cost_per_1m_in_cached": 0,
2225 "cost_per_1m_out_cached": 0,
2226 "context_window": 256000,
2227 "default_max_tokens": 8000,
2228 "can_reason": false,
2229 "supports_attachments": false,
2230 "options": {}
2231 },
2232 {
2233 "id": "alibaba/qwen3-coder-plus",
2234 "name": "Qwen3 Coder Plus",
2235 "cost_per_1m_in": 1,
2236 "cost_per_1m_out": 5,
2237 "cost_per_1m_in_cached": 0.2,
2238 "cost_per_1m_out_cached": 0,
2239 "context_window": 1000000,
2240 "default_max_tokens": 8000,
2241 "can_reason": false,
2242 "supports_attachments": false,
2243 "options": {}
2244 },
2245 {
2246 "id": "alibaba/qwen3-max",
2247 "name": "Qwen3 Max",
2248 "cost_per_1m_in": 1.2,
2249 "cost_per_1m_out": 6,
2250 "cost_per_1m_in_cached": 0.24,
2251 "cost_per_1m_out_cached": 0,
2252 "context_window": 262144,
2253 "default_max_tokens": 8000,
2254 "can_reason": false,
2255 "supports_attachments": false,
2256 "options": {}
2257 },
2258 {
2259 "id": "alibaba/qwen3-max-preview",
2260 "name": "Qwen3 Max Preview",
2261 "cost_per_1m_in": 1.2,
2262 "cost_per_1m_out": 6,
2263 "cost_per_1m_in_cached": 0.24,
2264 "cost_per_1m_out_cached": 0,
2265 "context_window": 262144,
2266 "default_max_tokens": 8000,
2267 "can_reason": false,
2268 "supports_attachments": false,
2269 "options": {}
2270 },
2271 {
2272 "id": "alibaba/qwen3-vl-thinking",
2273 "name": "Qwen3 VL 235B A22B Thinking",
2274 "cost_per_1m_in": 0.22,
2275 "cost_per_1m_out": 0.88,
2276 "cost_per_1m_in_cached": 0,
2277 "cost_per_1m_out_cached": 0,
2278 "context_window": 256000,
2279 "default_max_tokens": 8000,
2280 "can_reason": true,
2281 "reasoning_levels": [
2282 "low",
2283 "medium",
2284 "high"
2285 ],
2286 "default_reasoning_effort": "medium",
2287 "supports_attachments": true,
2288 "options": {}
2289 },
2290 {
2291 "id": "alibaba/qwen-3-14b",
2292 "name": "Qwen3-14B",
2293 "cost_per_1m_in": 0.12,
2294 "cost_per_1m_out": 0.24,
2295 "cost_per_1m_in_cached": 0,
2296 "cost_per_1m_out_cached": 0,
2297 "context_window": 40960,
2298 "default_max_tokens": 8000,
2299 "can_reason": true,
2300 "reasoning_levels": [
2301 "low",
2302 "medium",
2303 "high"
2304 ],
2305 "default_reasoning_effort": "medium",
2306 "supports_attachments": false,
2307 "options": {}
2308 },
2309 {
2310 "id": "alibaba/qwen-3-235b",
2311 "name": "Qwen3-235B-A22B",
2312 "cost_per_1m_in": 0.071,
2313 "cost_per_1m_out": 0.463,
2314 "cost_per_1m_in_cached": 0,
2315 "cost_per_1m_out_cached": 0,
2316 "context_window": 40960,
2317 "default_max_tokens": 8000,
2318 "can_reason": false,
2319 "supports_attachments": false,
2320 "options": {}
2321 },
2322 {
2323 "id": "alibaba/qwen-3-30b",
2324 "name": "Qwen3-30B-A3B",
2325 "cost_per_1m_in": 0.08,
2326 "cost_per_1m_out": 0.29,
2327 "cost_per_1m_in_cached": 0,
2328 "cost_per_1m_out_cached": 0,
2329 "context_window": 40960,
2330 "default_max_tokens": 8000,
2331 "can_reason": true,
2332 "reasoning_levels": [
2333 "low",
2334 "medium",
2335 "high"
2336 ],
2337 "default_reasoning_effort": "medium",
2338 "supports_attachments": false,
2339 "options": {}
2340 },
2341 {
2342 "id": "bytedance/seed-1.6",
2343 "name": "Seed 1.6",
2344 "cost_per_1m_in": 0.25,
2345 "cost_per_1m_out": 2,
2346 "cost_per_1m_in_cached": 0.05,
2347 "cost_per_1m_out_cached": 0,
2348 "context_window": 256000,
2349 "default_max_tokens": 8000,
2350 "can_reason": true,
2351 "reasoning_levels": [
2352 "low",
2353 "medium",
2354 "high"
2355 ],
2356 "default_reasoning_effort": "medium",
2357 "supports_attachments": false,
2358 "options": {}
2359 },
2360 {
2361 "id": "perplexity/sonar",
2362 "name": "Sonar",
2363 "cost_per_1m_in": 0,
2364 "cost_per_1m_out": 0,
2365 "cost_per_1m_in_cached": 0,
2366 "cost_per_1m_out_cached": 0,
2367 "context_window": 127000,
2368 "default_max_tokens": 8000,
2369 "can_reason": false,
2370 "supports_attachments": true,
2371 "options": {}
2372 },
2373 {
2374 "id": "perplexity/sonar-pro",
2375 "name": "Sonar Pro",
2376 "cost_per_1m_in": 0,
2377 "cost_per_1m_out": 0,
2378 "cost_per_1m_in_cached": 0,
2379 "cost_per_1m_out_cached": 0,
2380 "context_window": 200000,
2381 "default_max_tokens": 8000,
2382 "can_reason": false,
2383 "supports_attachments": true,
2384 "options": {}
2385 },
2386 {
2387 "id": "arcee-ai/trinity-large-preview",
2388 "name": "Trinity Large Preview",
2389 "cost_per_1m_in": 0.25,
2390 "cost_per_1m_out": 1,
2391 "cost_per_1m_in_cached": 0,
2392 "cost_per_1m_out_cached": 0,
2393 "context_window": 131000,
2394 "default_max_tokens": 8000,
2395 "can_reason": false,
2396 "supports_attachments": false,
2397 "options": {}
2398 },
2399 {
2400 "id": "openai/gpt-oss-20b",
2401 "name": "gpt-oss-20b",
2402 "cost_per_1m_in": 0.07,
2403 "cost_per_1m_out": 0.3,
2404 "cost_per_1m_in_cached": 0,
2405 "cost_per_1m_out_cached": 0,
2406 "context_window": 128000,
2407 "default_max_tokens": 8000,
2408 "can_reason": true,
2409 "reasoning_levels": [
2410 "low",
2411 "medium",
2412 "high"
2413 ],
2414 "default_reasoning_effort": "medium",
2415 "supports_attachments": false,
2416 "options": {}
2417 },
2418 {
2419 "id": "openai/gpt-oss-safeguard-20b",
2420 "name": "gpt-oss-safeguard-20b",
2421 "cost_per_1m_in": 0.075,
2422 "cost_per_1m_out": 0.3,
2423 "cost_per_1m_in_cached": 0.037,
2424 "cost_per_1m_out_cached": 0,
2425 "context_window": 131072,
2426 "default_max_tokens": 8000,
2427 "can_reason": true,
2428 "reasoning_levels": [
2429 "low",
2430 "medium",
2431 "high"
2432 ],
2433 "default_reasoning_effort": "medium",
2434 "supports_attachments": false,
2435 "options": {}
2436 },
2437 {
2438 "id": "openai/o1",
2439 "name": "o1",
2440 "cost_per_1m_in": 15,
2441 "cost_per_1m_out": 60,
2442 "cost_per_1m_in_cached": 7.5,
2443 "cost_per_1m_out_cached": 0,
2444 "context_window": 200000,
2445 "default_max_tokens": 8000,
2446 "can_reason": true,
2447 "reasoning_levels": [
2448 "low",
2449 "medium",
2450 "high"
2451 ],
2452 "default_reasoning_effort": "medium",
2453 "supports_attachments": true,
2454 "options": {}
2455 },
2456 {
2457 "id": "openai/o3",
2458 "name": "o3",
2459 "cost_per_1m_in": 2,
2460 "cost_per_1m_out": 8,
2461 "cost_per_1m_in_cached": 0.5,
2462 "cost_per_1m_out_cached": 0,
2463 "context_window": 200000,
2464 "default_max_tokens": 8000,
2465 "can_reason": true,
2466 "reasoning_levels": [
2467 "low",
2468 "medium",
2469 "high"
2470 ],
2471 "default_reasoning_effort": "medium",
2472 "supports_attachments": true,
2473 "options": {}
2474 },
2475 {
2476 "id": "openai/o3-pro",
2477 "name": "o3 Pro",
2478 "cost_per_1m_in": 20,
2479 "cost_per_1m_out": 80,
2480 "cost_per_1m_in_cached": 0,
2481 "cost_per_1m_out_cached": 0,
2482 "context_window": 200000,
2483 "default_max_tokens": 8000,
2484 "can_reason": true,
2485 "reasoning_levels": [
2486 "low",
2487 "medium",
2488 "high"
2489 ],
2490 "default_reasoning_effort": "medium",
2491 "supports_attachments": true,
2492 "options": {}
2493 },
2494 {
2495 "id": "openai/o3-deep-research",
2496 "name": "o3-deep-research",
2497 "cost_per_1m_in": 10,
2498 "cost_per_1m_out": 40,
2499 "cost_per_1m_in_cached": 2.5,
2500 "cost_per_1m_out_cached": 0,
2501 "context_window": 200000,
2502 "default_max_tokens": 8000,
2503 "can_reason": true,
2504 "reasoning_levels": [
2505 "low",
2506 "medium",
2507 "high"
2508 ],
2509 "default_reasoning_effort": "medium",
2510 "supports_attachments": true,
2511 "options": {}
2512 },
2513 {
2514 "id": "openai/o3-mini",
2515 "name": "o3-mini",
2516 "cost_per_1m_in": 1.1,
2517 "cost_per_1m_out": 4.4,
2518 "cost_per_1m_in_cached": 0.55,
2519 "cost_per_1m_out_cached": 0,
2520 "context_window": 200000,
2521 "default_max_tokens": 8000,
2522 "can_reason": true,
2523 "reasoning_levels": [
2524 "low",
2525 "medium",
2526 "high"
2527 ],
2528 "default_reasoning_effort": "medium",
2529 "supports_attachments": false,
2530 "options": {}
2531 },
2532 {
2533 "id": "openai/o4-mini",
2534 "name": "o4-mini",
2535 "cost_per_1m_in": 1.1,
2536 "cost_per_1m_out": 4.4,
2537 "cost_per_1m_in_cached": 0.275,
2538 "cost_per_1m_out_cached": 0,
2539 "context_window": 200000,
2540 "default_max_tokens": 8000,
2541 "can_reason": true,
2542 "reasoning_levels": [
2543 "low",
2544 "medium",
2545 "high"
2546 ],
2547 "default_reasoning_effort": "medium",
2548 "supports_attachments": true,
2549 "options": {}
2550 }
2551 ],
2552 "default_headers": {
2553 "HTTP-Referer": "https://charm.land",
2554 "X-Title": "Crush"
2555 }
2556}