1{
2 "name": "Vercel",
3 "id": "vercel",
4 "api_key": "$VERCEL_API_KEY",
5 "api_endpoint": "https://ai-gateway.vercel.sh/v1",
6 "type": "vercel",
7 "default_large_model_id": "anthropic/claude-sonnet-4",
8 "default_small_model_id": "anthropic/claude-haiku-4.5",
9 "models": [
10 {
11 "id": "anthropic/claude-3-haiku",
12 "name": "Claude 3 Haiku",
13 "cost_per_1m_in": 0.25,
14 "cost_per_1m_out": 1.25,
15 "cost_per_1m_in_cached": 0.03,
16 "cost_per_1m_out_cached": 0.3,
17 "context_window": 200000,
18 "default_max_tokens": 4096,
19 "can_reason": false,
20 "supports_attachments": true
21 },
22 {
23 "id": "anthropic/claude-3.5-haiku",
24 "name": "Claude 3.5 Haiku",
25 "cost_per_1m_in": 0.8,
26 "cost_per_1m_out": 4,
27 "cost_per_1m_in_cached": 0.08,
28 "cost_per_1m_out_cached": 1,
29 "context_window": 200000,
30 "default_max_tokens": 8000,
31 "can_reason": false,
32 "supports_attachments": true
33 },
34 {
35 "id": "anthropic/claude-3.7-sonnet",
36 "name": "Claude 3.7 Sonnet",
37 "cost_per_1m_in": 3,
38 "cost_per_1m_out": 15,
39 "cost_per_1m_in_cached": 0.3,
40 "cost_per_1m_out_cached": 3.75,
41 "context_window": 200000,
42 "default_max_tokens": 8000,
43 "can_reason": true,
44 "reasoning_levels": [
45 "none",
46 "minimal",
47 "low",
48 "medium",
49 "high",
50 "xhigh"
51 ],
52 "default_reasoning_effort": "medium",
53 "supports_attachments": true
54 },
55 {
56 "id": "anthropic/claude-haiku-4.5",
57 "name": "Claude Haiku 4.5",
58 "cost_per_1m_in": 1,
59 "cost_per_1m_out": 5,
60 "cost_per_1m_in_cached": 0.1,
61 "cost_per_1m_out_cached": 1.25,
62 "context_window": 200000,
63 "default_max_tokens": 8000,
64 "can_reason": true,
65 "reasoning_levels": [
66 "none",
67 "minimal",
68 "low",
69 "medium",
70 "high",
71 "xhigh"
72 ],
73 "default_reasoning_effort": "medium",
74 "supports_attachments": true
75 },
76 {
77 "id": "anthropic/claude-opus-4",
78 "name": "Claude Opus 4",
79 "cost_per_1m_in": 15,
80 "cost_per_1m_out": 75,
81 "cost_per_1m_in_cached": 1.5,
82 "cost_per_1m_out_cached": 18.75,
83 "context_window": 200000,
84 "default_max_tokens": 8000,
85 "can_reason": true,
86 "reasoning_levels": [
87 "none",
88 "minimal",
89 "low",
90 "medium",
91 "high",
92 "xhigh"
93 ],
94 "default_reasoning_effort": "medium",
95 "supports_attachments": true
96 },
97 {
98 "id": "anthropic/claude-opus-4.1",
99 "name": "Claude Opus 4.1",
100 "cost_per_1m_in": 15,
101 "cost_per_1m_out": 75,
102 "cost_per_1m_in_cached": 1.5,
103 "cost_per_1m_out_cached": 18.75,
104 "context_window": 200000,
105 "default_max_tokens": 8000,
106 "can_reason": true,
107 "reasoning_levels": [
108 "none",
109 "minimal",
110 "low",
111 "medium",
112 "high",
113 "xhigh"
114 ],
115 "default_reasoning_effort": "medium",
116 "supports_attachments": true
117 },
118 {
119 "id": "anthropic/claude-opus-4.5",
120 "name": "Claude Opus 4.5",
121 "cost_per_1m_in": 5,
122 "cost_per_1m_out": 25,
123 "cost_per_1m_in_cached": 0.5,
124 "cost_per_1m_out_cached": 6.25,
125 "context_window": 200000,
126 "default_max_tokens": 8000,
127 "can_reason": true,
128 "reasoning_levels": [
129 "none",
130 "minimal",
131 "low",
132 "medium",
133 "high",
134 "xhigh"
135 ],
136 "default_reasoning_effort": "medium",
137 "supports_attachments": true
138 },
139 {
140 "id": "anthropic/claude-opus-4.6",
141 "name": "Claude Opus 4.6",
142 "cost_per_1m_in": 5,
143 "cost_per_1m_out": 25,
144 "cost_per_1m_in_cached": 0.5,
145 "cost_per_1m_out_cached": 6.25,
146 "context_window": 1000000,
147 "default_max_tokens": 8000,
148 "can_reason": true,
149 "reasoning_levels": [
150 "none",
151 "minimal",
152 "low",
153 "medium",
154 "high",
155 "xhigh"
156 ],
157 "default_reasoning_effort": "medium",
158 "supports_attachments": true
159 },
160 {
161 "id": "anthropic/claude-opus-4.7",
162 "name": "Claude Opus 4.7",
163 "cost_per_1m_in": 5,
164 "cost_per_1m_out": 25,
165 "cost_per_1m_in_cached": 0.5,
166 "cost_per_1m_out_cached": 6.25,
167 "context_window": 1000000,
168 "default_max_tokens": 8000,
169 "can_reason": true,
170 "reasoning_levels": [
171 "none",
172 "minimal",
173 "low",
174 "medium",
175 "high",
176 "xhigh"
177 ],
178 "default_reasoning_effort": "medium",
179 "supports_attachments": true
180 },
181 {
182 "id": "anthropic/claude-sonnet-4",
183 "name": "Claude Sonnet 4",
184 "cost_per_1m_in": 3,
185 "cost_per_1m_out": 15,
186 "cost_per_1m_in_cached": 0.3,
187 "cost_per_1m_out_cached": 3.75,
188 "context_window": 1000000,
189 "default_max_tokens": 8000,
190 "can_reason": true,
191 "reasoning_levels": [
192 "none",
193 "minimal",
194 "low",
195 "medium",
196 "high",
197 "xhigh"
198 ],
199 "default_reasoning_effort": "medium",
200 "supports_attachments": true
201 },
202 {
203 "id": "anthropic/claude-sonnet-4.5",
204 "name": "Claude Sonnet 4.5",
205 "cost_per_1m_in": 3,
206 "cost_per_1m_out": 15,
207 "cost_per_1m_in_cached": 0.3,
208 "cost_per_1m_out_cached": 3.75,
209 "context_window": 1000000,
210 "default_max_tokens": 8000,
211 "can_reason": true,
212 "reasoning_levels": [
213 "none",
214 "minimal",
215 "low",
216 "medium",
217 "high",
218 "xhigh"
219 ],
220 "default_reasoning_effort": "medium",
221 "supports_attachments": true
222 },
223 {
224 "id": "anthropic/claude-sonnet-4.6",
225 "name": "Claude Sonnet 4.6",
226 "cost_per_1m_in": 3,
227 "cost_per_1m_out": 15,
228 "cost_per_1m_in_cached": 0.3,
229 "cost_per_1m_out_cached": 3.75,
230 "context_window": 1000000,
231 "default_max_tokens": 8000,
232 "can_reason": true,
233 "reasoning_levels": [
234 "none",
235 "minimal",
236 "low",
237 "medium",
238 "high",
239 "xhigh"
240 ],
241 "default_reasoning_effort": "medium",
242 "supports_attachments": true
243 },
244 {
245 "id": "cohere/command-a",
246 "name": "Command A",
247 "cost_per_1m_in": 2.5,
248 "cost_per_1m_out": 10,
249 "cost_per_1m_in_cached": 0,
250 "cost_per_1m_out_cached": 0,
251 "context_window": 256000,
252 "default_max_tokens": 8000,
253 "can_reason": false,
254 "supports_attachments": false
255 },
256 {
257 "id": "deepseek/deepseek-v3",
258 "name": "DeepSeek V3 0324",
259 "cost_per_1m_in": 0.77,
260 "cost_per_1m_out": 0.77,
261 "cost_per_1m_in_cached": 0,
262 "cost_per_1m_out_cached": 0,
263 "context_window": 163840,
264 "default_max_tokens": 8000,
265 "can_reason": false,
266 "supports_attachments": false
267 },
268 {
269 "id": "deepseek/deepseek-v3.1-terminus",
270 "name": "DeepSeek V3.1 Terminus",
271 "cost_per_1m_in": 0.27,
272 "cost_per_1m_out": 1,
273 "cost_per_1m_in_cached": 0.135,
274 "cost_per_1m_out_cached": 0,
275 "context_window": 131072,
276 "default_max_tokens": 8000,
277 "can_reason": true,
278 "reasoning_levels": [
279 "low",
280 "medium",
281 "high"
282 ],
283 "default_reasoning_effort": "medium",
284 "supports_attachments": false
285 },
286 {
287 "id": "deepseek/deepseek-v3.2",
288 "name": "DeepSeek V3.2",
289 "cost_per_1m_in": 0.28,
290 "cost_per_1m_out": 0.42,
291 "cost_per_1m_in_cached": 0.028,
292 "cost_per_1m_out_cached": 0,
293 "context_window": 128000,
294 "default_max_tokens": 8000,
295 "can_reason": false,
296 "supports_attachments": false
297 },
298 {
299 "id": "deepseek/deepseek-v3.2-thinking",
300 "name": "DeepSeek V3.2 Thinking",
301 "cost_per_1m_in": 0.28,
302 "cost_per_1m_out": 0.42,
303 "cost_per_1m_in_cached": 0.028,
304 "cost_per_1m_out_cached": 0,
305 "context_window": 128000,
306 "default_max_tokens": 8000,
307 "can_reason": true,
308 "reasoning_levels": [
309 "low",
310 "medium",
311 "high"
312 ],
313 "default_reasoning_effort": "medium",
314 "supports_attachments": false
315 },
316 {
317 "id": "deepseek/deepseek-r1",
318 "name": "DeepSeek-R1",
319 "cost_per_1m_in": 1.35,
320 "cost_per_1m_out": 5.4,
321 "cost_per_1m_in_cached": 0,
322 "cost_per_1m_out_cached": 0,
323 "context_window": 128000,
324 "default_max_tokens": 8000,
325 "can_reason": true,
326 "reasoning_levels": [
327 "low",
328 "medium",
329 "high"
330 ],
331 "default_reasoning_effort": "medium",
332 "supports_attachments": false
333 },
334 {
335 "id": "deepseek/deepseek-v3.1",
336 "name": "DeepSeek-V3.1",
337 "cost_per_1m_in": 0.56,
338 "cost_per_1m_out": 1.68,
339 "cost_per_1m_in_cached": 0.28,
340 "cost_per_1m_out_cached": 0,
341 "context_window": 163840,
342 "default_max_tokens": 8000,
343 "can_reason": true,
344 "reasoning_levels": [
345 "low",
346 "medium",
347 "high"
348 ],
349 "default_reasoning_effort": "medium",
350 "supports_attachments": false
351 },
352 {
353 "id": "mistral/devstral-2",
354 "name": "Devstral 2",
355 "cost_per_1m_in": 0.4,
356 "cost_per_1m_out": 2,
357 "cost_per_1m_in_cached": 0,
358 "cost_per_1m_out_cached": 0,
359 "context_window": 256000,
360 "default_max_tokens": 8000,
361 "can_reason": false,
362 "supports_attachments": false
363 },
364 {
365 "id": "mistral/devstral-small",
366 "name": "Devstral Small 1.1",
367 "cost_per_1m_in": 0.1,
368 "cost_per_1m_out": 0.3,
369 "cost_per_1m_in_cached": 0,
370 "cost_per_1m_out_cached": 0,
371 "context_window": 128000,
372 "default_max_tokens": 8000,
373 "can_reason": false,
374 "supports_attachments": false
375 },
376 {
377 "id": "mistral/devstral-small-2",
378 "name": "Devstral Small 2",
379 "cost_per_1m_in": 0.1,
380 "cost_per_1m_out": 0.3,
381 "cost_per_1m_in_cached": 0,
382 "cost_per_1m_out_cached": 0,
383 "context_window": 256000,
384 "default_max_tokens": 8000,
385 "can_reason": false,
386 "supports_attachments": false
387 },
388 {
389 "id": "zai/glm-4.5-air",
390 "name": "GLM 4.5 Air",
391 "cost_per_1m_in": 0.2,
392 "cost_per_1m_out": 1.1,
393 "cost_per_1m_in_cached": 0.03,
394 "cost_per_1m_out_cached": 0,
395 "context_window": 128000,
396 "default_max_tokens": 8000,
397 "can_reason": true,
398 "reasoning_levels": [
399 "low",
400 "medium",
401 "high"
402 ],
403 "default_reasoning_effort": "medium",
404 "supports_attachments": false
405 },
406 {
407 "id": "zai/glm-4.5v",
408 "name": "GLM 4.5V",
409 "cost_per_1m_in": 0.6,
410 "cost_per_1m_out": 1.8,
411 "cost_per_1m_in_cached": 0.11,
412 "cost_per_1m_out_cached": 0,
413 "context_window": 66000,
414 "default_max_tokens": 8000,
415 "can_reason": false,
416 "supports_attachments": true
417 },
418 {
419 "id": "zai/glm-4.6",
420 "name": "GLM 4.6",
421 "cost_per_1m_in": 0.6,
422 "cost_per_1m_out": 2.2,
423 "cost_per_1m_in_cached": 0.11,
424 "cost_per_1m_out_cached": 0,
425 "context_window": 200000,
426 "default_max_tokens": 8000,
427 "can_reason": true,
428 "reasoning_levels": [
429 "low",
430 "medium",
431 "high"
432 ],
433 "default_reasoning_effort": "medium",
434 "supports_attachments": false
435 },
436 {
437 "id": "zai/glm-4.7",
438 "name": "GLM 4.7",
439 "cost_per_1m_in": 2.25,
440 "cost_per_1m_out": 2.75,
441 "cost_per_1m_in_cached": 2.25,
442 "cost_per_1m_out_cached": 0,
443 "context_window": 131000,
444 "default_max_tokens": 8000,
445 "can_reason": true,
446 "reasoning_levels": [
447 "low",
448 "medium",
449 "high"
450 ],
451 "default_reasoning_effort": "medium",
452 "supports_attachments": false
453 },
454 {
455 "id": "zai/glm-4.7-flash",
456 "name": "GLM 4.7 Flash",
457 "cost_per_1m_in": 0.07,
458 "cost_per_1m_out": 0.4,
459 "cost_per_1m_in_cached": 0,
460 "cost_per_1m_out_cached": 0,
461 "context_window": 200000,
462 "default_max_tokens": 8000,
463 "can_reason": true,
464 "reasoning_levels": [
465 "low",
466 "medium",
467 "high"
468 ],
469 "default_reasoning_effort": "medium",
470 "supports_attachments": false
471 },
472 {
473 "id": "zai/glm-4.7-flashx",
474 "name": "GLM 4.7 FlashX",
475 "cost_per_1m_in": 0.06,
476 "cost_per_1m_out": 0.4,
477 "cost_per_1m_in_cached": 0.01,
478 "cost_per_1m_out_cached": 0,
479 "context_window": 200000,
480 "default_max_tokens": 8000,
481 "can_reason": true,
482 "reasoning_levels": [
483 "low",
484 "medium",
485 "high"
486 ],
487 "default_reasoning_effort": "medium",
488 "supports_attachments": false
489 },
490 {
491 "id": "zai/glm-5",
492 "name": "GLM 5",
493 "cost_per_1m_in": 1,
494 "cost_per_1m_out": 3.2,
495 "cost_per_1m_in_cached": 0.2,
496 "cost_per_1m_out_cached": 0,
497 "context_window": 202800,
498 "default_max_tokens": 8000,
499 "can_reason": true,
500 "reasoning_levels": [
501 "low",
502 "medium",
503 "high"
504 ],
505 "default_reasoning_effort": "medium",
506 "supports_attachments": false
507 },
508 {
509 "id": "zai/glm-5-turbo",
510 "name": "GLM 5 Turbo",
511 "cost_per_1m_in": 1.2,
512 "cost_per_1m_out": 4,
513 "cost_per_1m_in_cached": 0.24,
514 "cost_per_1m_out_cached": 0,
515 "context_window": 202800,
516 "default_max_tokens": 8000,
517 "can_reason": true,
518 "reasoning_levels": [
519 "low",
520 "medium",
521 "high"
522 ],
523 "default_reasoning_effort": "medium",
524 "supports_attachments": false
525 },
526 {
527 "id": "zai/glm-5.1",
528 "name": "GLM 5.1",
529 "cost_per_1m_in": 1.4,
530 "cost_per_1m_out": 4.4,
531 "cost_per_1m_in_cached": 0.26,
532 "cost_per_1m_out_cached": 0,
533 "context_window": 202752,
534 "default_max_tokens": 8000,
535 "can_reason": true,
536 "reasoning_levels": [
537 "low",
538 "medium",
539 "high"
540 ],
541 "default_reasoning_effort": "medium",
542 "supports_attachments": true
543 },
544 {
545 "id": "zai/glm-5v-turbo",
546 "name": "GLM 5V Turbo",
547 "cost_per_1m_in": 1.2,
548 "cost_per_1m_out": 4,
549 "cost_per_1m_in_cached": 0.24,
550 "cost_per_1m_out_cached": 0,
551 "context_window": 200000,
552 "default_max_tokens": 8000,
553 "can_reason": true,
554 "reasoning_levels": [
555 "low",
556 "medium",
557 "high"
558 ],
559 "default_reasoning_effort": "medium",
560 "supports_attachments": true
561 },
562 {
563 "id": "zai/glm-4.5",
564 "name": "GLM-4.5",
565 "cost_per_1m_in": 0.6,
566 "cost_per_1m_out": 2.2,
567 "cost_per_1m_in_cached": 0.11,
568 "cost_per_1m_out_cached": 0,
569 "context_window": 128000,
570 "default_max_tokens": 8000,
571 "can_reason": true,
572 "reasoning_levels": [
573 "low",
574 "medium",
575 "high"
576 ],
577 "default_reasoning_effort": "medium",
578 "supports_attachments": false
579 },
580 {
581 "id": "zai/glm-4.6v",
582 "name": "GLM-4.6V",
583 "cost_per_1m_in": 0.3,
584 "cost_per_1m_out": 0.9,
585 "cost_per_1m_in_cached": 0.05,
586 "cost_per_1m_out_cached": 0,
587 "context_window": 128000,
588 "default_max_tokens": 8000,
589 "can_reason": true,
590 "reasoning_levels": [
591 "low",
592 "medium",
593 "high"
594 ],
595 "default_reasoning_effort": "medium",
596 "supports_attachments": true
597 },
598 {
599 "id": "zai/glm-4.6v-flash",
600 "name": "GLM-4.6V-Flash",
601 "cost_per_1m_in": 0,
602 "cost_per_1m_out": 0,
603 "cost_per_1m_in_cached": 0,
604 "cost_per_1m_out_cached": 0,
605 "context_window": 128000,
606 "default_max_tokens": 8000,
607 "can_reason": true,
608 "reasoning_levels": [
609 "low",
610 "medium",
611 "high"
612 ],
613 "default_reasoning_effort": "medium",
614 "supports_attachments": true
615 },
616 {
617 "id": "openai/gpt-5-chat",
618 "name": "GPT 5 Chat",
619 "cost_per_1m_in": 1.25,
620 "cost_per_1m_out": 10,
621 "cost_per_1m_in_cached": 0.125,
622 "cost_per_1m_out_cached": 0,
623 "context_window": 128000,
624 "default_max_tokens": 8000,
625 "can_reason": true,
626 "reasoning_levels": [
627 "low",
628 "medium",
629 "high"
630 ],
631 "default_reasoning_effort": "medium",
632 "supports_attachments": true
633 },
634 {
635 "id": "openai/gpt-5.1-codex-max",
636 "name": "GPT 5.1 Codex Max",
637 "cost_per_1m_in": 1.25,
638 "cost_per_1m_out": 10,
639 "cost_per_1m_in_cached": 0.125,
640 "cost_per_1m_out_cached": 0,
641 "context_window": 400000,
642 "default_max_tokens": 8000,
643 "can_reason": true,
644 "reasoning_levels": [
645 "low",
646 "medium",
647 "high"
648 ],
649 "default_reasoning_effort": "medium",
650 "supports_attachments": true
651 },
652 {
653 "id": "openai/gpt-5.1-codex-mini",
654 "name": "GPT 5.1 Codex Mini",
655 "cost_per_1m_in": 0.25,
656 "cost_per_1m_out": 2,
657 "cost_per_1m_in_cached": 0.025,
658 "cost_per_1m_out_cached": 0,
659 "context_window": 400000,
660 "default_max_tokens": 8000,
661 "can_reason": true,
662 "reasoning_levels": [
663 "low",
664 "medium",
665 "high"
666 ],
667 "default_reasoning_effort": "medium",
668 "supports_attachments": true
669 },
670 {
671 "id": "openai/gpt-5.1-thinking",
672 "name": "GPT 5.1 Thinking",
673 "cost_per_1m_in": 1.25,
674 "cost_per_1m_out": 10,
675 "cost_per_1m_in_cached": 0.125,
676 "cost_per_1m_out_cached": 0,
677 "context_window": 400000,
678 "default_max_tokens": 8000,
679 "can_reason": true,
680 "reasoning_levels": [
681 "low",
682 "medium",
683 "high"
684 ],
685 "default_reasoning_effort": "medium",
686 "supports_attachments": true
687 },
688 {
689 "id": "openai/gpt-5.2",
690 "name": "GPT 5.2",
691 "cost_per_1m_in": 1.75,
692 "cost_per_1m_out": 14,
693 "cost_per_1m_in_cached": 0.175,
694 "cost_per_1m_out_cached": 0,
695 "context_window": 400000,
696 "default_max_tokens": 8000,
697 "can_reason": true,
698 "reasoning_levels": [
699 "low",
700 "medium",
701 "high"
702 ],
703 "default_reasoning_effort": "medium",
704 "supports_attachments": true
705 },
706 {
707 "id": "openai/gpt-5.2-pro",
708 "name": "GPT 5.2 ",
709 "cost_per_1m_in": 21,
710 "cost_per_1m_out": 168,
711 "cost_per_1m_in_cached": 0,
712 "cost_per_1m_out_cached": 0,
713 "context_window": 400000,
714 "default_max_tokens": 8000,
715 "can_reason": true,
716 "reasoning_levels": [
717 "low",
718 "medium",
719 "high"
720 ],
721 "default_reasoning_effort": "medium",
722 "supports_attachments": true
723 },
724 {
725 "id": "openai/gpt-5.2-chat",
726 "name": "GPT 5.2 Chat",
727 "cost_per_1m_in": 1.75,
728 "cost_per_1m_out": 14,
729 "cost_per_1m_in_cached": 0.175,
730 "cost_per_1m_out_cached": 0,
731 "context_window": 128000,
732 "default_max_tokens": 8000,
733 "can_reason": true,
734 "reasoning_levels": [
735 "low",
736 "medium",
737 "high"
738 ],
739 "default_reasoning_effort": "medium",
740 "supports_attachments": true
741 },
742 {
743 "id": "openai/gpt-5.2-codex",
744 "name": "GPT 5.2 Codex",
745 "cost_per_1m_in": 1.75,
746 "cost_per_1m_out": 14,
747 "cost_per_1m_in_cached": 0.175,
748 "cost_per_1m_out_cached": 0,
749 "context_window": 400000,
750 "default_max_tokens": 8000,
751 "can_reason": true,
752 "reasoning_levels": [
753 "low",
754 "medium",
755 "high"
756 ],
757 "default_reasoning_effort": "medium",
758 "supports_attachments": true
759 },
760 {
761 "id": "openai/gpt-5.3-codex",
762 "name": "GPT 5.3 Codex",
763 "cost_per_1m_in": 1.75,
764 "cost_per_1m_out": 14,
765 "cost_per_1m_in_cached": 0.175,
766 "cost_per_1m_out_cached": 0,
767 "context_window": 400000,
768 "default_max_tokens": 8000,
769 "can_reason": true,
770 "reasoning_levels": [
771 "low",
772 "medium",
773 "high"
774 ],
775 "default_reasoning_effort": "medium",
776 "supports_attachments": true
777 },
778 {
779 "id": "openai/gpt-5.4",
780 "name": "GPT 5.4",
781 "cost_per_1m_in": 2.5,
782 "cost_per_1m_out": 15,
783 "cost_per_1m_in_cached": 0.25,
784 "cost_per_1m_out_cached": 0,
785 "context_window": 1050000,
786 "default_max_tokens": 8000,
787 "can_reason": true,
788 "reasoning_levels": [
789 "low",
790 "medium",
791 "high"
792 ],
793 "default_reasoning_effort": "medium",
794 "supports_attachments": true
795 },
796 {
797 "id": "openai/gpt-5.4-mini",
798 "name": "GPT 5.4 Mini",
799 "cost_per_1m_in": 0.75,
800 "cost_per_1m_out": 4.5,
801 "cost_per_1m_in_cached": 0.075,
802 "cost_per_1m_out_cached": 0,
803 "context_window": 400000,
804 "default_max_tokens": 8000,
805 "can_reason": true,
806 "reasoning_levels": [
807 "low",
808 "medium",
809 "high"
810 ],
811 "default_reasoning_effort": "medium",
812 "supports_attachments": true
813 },
814 {
815 "id": "openai/gpt-5.4-nano",
816 "name": "GPT 5.4 Nano",
817 "cost_per_1m_in": 0.2,
818 "cost_per_1m_out": 1.25,
819 "cost_per_1m_in_cached": 0.02,
820 "cost_per_1m_out_cached": 0,
821 "context_window": 400000,
822 "default_max_tokens": 8000,
823 "can_reason": true,
824 "reasoning_levels": [
825 "low",
826 "medium",
827 "high"
828 ],
829 "default_reasoning_effort": "medium",
830 "supports_attachments": true
831 },
832 {
833 "id": "openai/gpt-5.4-pro",
834 "name": "GPT 5.4 Pro",
835 "cost_per_1m_in": 30,
836 "cost_per_1m_out": 180,
837 "cost_per_1m_in_cached": 0,
838 "cost_per_1m_out_cached": 0,
839 "context_window": 1050000,
840 "default_max_tokens": 8000,
841 "can_reason": true,
842 "reasoning_levels": [
843 "low",
844 "medium",
845 "high"
846 ],
847 "default_reasoning_effort": "medium",
848 "supports_attachments": true
849 },
850 {
851 "id": "openai/gpt-oss-20b",
852 "name": "GPT OSS 120B",
853 "cost_per_1m_in": 0.05,
854 "cost_per_1m_out": 0.2,
855 "cost_per_1m_in_cached": 0,
856 "cost_per_1m_out_cached": 0,
857 "context_window": 131072,
858 "default_max_tokens": 8000,
859 "can_reason": true,
860 "reasoning_levels": [
861 "low",
862 "medium",
863 "high"
864 ],
865 "default_reasoning_effort": "medium",
866 "supports_attachments": false
867 },
868 {
869 "id": "openai/gpt-oss-safeguard-20b",
870 "name": "GPT OSS Safeguard 20B",
871 "cost_per_1m_in": 0.075,
872 "cost_per_1m_out": 0.3,
873 "cost_per_1m_in_cached": 0.037,
874 "cost_per_1m_out_cached": 0,
875 "context_window": 131072,
876 "default_max_tokens": 8000,
877 "can_reason": true,
878 "reasoning_levels": [
879 "low",
880 "medium",
881 "high"
882 ],
883 "default_reasoning_effort": "medium",
884 "supports_attachments": false
885 },
886 {
887 "id": "openai/gpt-4-turbo",
888 "name": "GPT-4 Turbo",
889 "cost_per_1m_in": 10,
890 "cost_per_1m_out": 30,
891 "cost_per_1m_in_cached": 0,
892 "cost_per_1m_out_cached": 0,
893 "context_window": 128000,
894 "default_max_tokens": 4096,
895 "can_reason": false,
896 "supports_attachments": true
897 },
898 {
899 "id": "openai/gpt-4.1",
900 "name": "GPT-4.1",
901 "cost_per_1m_in": 2,
902 "cost_per_1m_out": 8,
903 "cost_per_1m_in_cached": 0.5,
904 "cost_per_1m_out_cached": 0,
905 "context_window": 1047576,
906 "default_max_tokens": 8000,
907 "can_reason": false,
908 "supports_attachments": true
909 },
910 {
911 "id": "openai/gpt-4.1-mini",
912 "name": "GPT-4.1 mini",
913 "cost_per_1m_in": 0.4,
914 "cost_per_1m_out": 1.6,
915 "cost_per_1m_in_cached": 0.1,
916 "cost_per_1m_out_cached": 0,
917 "context_window": 1047576,
918 "default_max_tokens": 8000,
919 "can_reason": false,
920 "supports_attachments": true
921 },
922 {
923 "id": "openai/gpt-4.1-nano",
924 "name": "GPT-4.1 nano",
925 "cost_per_1m_in": 0.1,
926 "cost_per_1m_out": 0.4,
927 "cost_per_1m_in_cached": 0.025,
928 "cost_per_1m_out_cached": 0,
929 "context_window": 1047576,
930 "default_max_tokens": 8000,
931 "can_reason": false,
932 "supports_attachments": true
933 },
934 {
935 "id": "openai/gpt-4o",
936 "name": "GPT-4o",
937 "cost_per_1m_in": 2.5,
938 "cost_per_1m_out": 10,
939 "cost_per_1m_in_cached": 1.25,
940 "cost_per_1m_out_cached": 0,
941 "context_window": 128000,
942 "default_max_tokens": 8000,
943 "can_reason": false,
944 "supports_attachments": true
945 },
946 {
947 "id": "openai/gpt-4o-mini",
948 "name": "GPT-4o mini",
949 "cost_per_1m_in": 0.15,
950 "cost_per_1m_out": 0.6,
951 "cost_per_1m_in_cached": 0.075,
952 "cost_per_1m_out_cached": 0,
953 "context_window": 128000,
954 "default_max_tokens": 8000,
955 "can_reason": false,
956 "supports_attachments": true
957 },
958 {
959 "id": "openai/gpt-5",
960 "name": "GPT-5",
961 "cost_per_1m_in": 1.25,
962 "cost_per_1m_out": 10,
963 "cost_per_1m_in_cached": 0.125,
964 "cost_per_1m_out_cached": 0,
965 "context_window": 400000,
966 "default_max_tokens": 8000,
967 "can_reason": true,
968 "reasoning_levels": [
969 "low",
970 "medium",
971 "high"
972 ],
973 "default_reasoning_effort": "medium",
974 "supports_attachments": true
975 },
976 {
977 "id": "openai/gpt-5-mini",
978 "name": "GPT-5 mini",
979 "cost_per_1m_in": 0.25,
980 "cost_per_1m_out": 2,
981 "cost_per_1m_in_cached": 0.025,
982 "cost_per_1m_out_cached": 0,
983 "context_window": 400000,
984 "default_max_tokens": 8000,
985 "can_reason": true,
986 "reasoning_levels": [
987 "low",
988 "medium",
989 "high"
990 ],
991 "default_reasoning_effort": "medium",
992 "supports_attachments": true
993 },
994 {
995 "id": "openai/gpt-5-nano",
996 "name": "GPT-5 nano",
997 "cost_per_1m_in": 0.05,
998 "cost_per_1m_out": 0.4,
999 "cost_per_1m_in_cached": 0.005,
1000 "cost_per_1m_out_cached": 0,
1001 "context_window": 400000,
1002 "default_max_tokens": 8000,
1003 "can_reason": true,
1004 "reasoning_levels": [
1005 "low",
1006 "medium",
1007 "high"
1008 ],
1009 "default_reasoning_effort": "medium",
1010 "supports_attachments": true
1011 },
1012 {
1013 "id": "openai/gpt-5-pro",
1014 "name": "GPT-5 pro",
1015 "cost_per_1m_in": 15,
1016 "cost_per_1m_out": 120,
1017 "cost_per_1m_in_cached": 0,
1018 "cost_per_1m_out_cached": 0,
1019 "context_window": 400000,
1020 "default_max_tokens": 8000,
1021 "can_reason": true,
1022 "reasoning_levels": [
1023 "low",
1024 "medium",
1025 "high"
1026 ],
1027 "default_reasoning_effort": "medium",
1028 "supports_attachments": true
1029 },
1030 {
1031 "id": "openai/gpt-5-codex",
1032 "name": "GPT-5-Codex",
1033 "cost_per_1m_in": 1.25,
1034 "cost_per_1m_out": 10,
1035 "cost_per_1m_in_cached": 0.125,
1036 "cost_per_1m_out_cached": 0,
1037 "context_window": 400000,
1038 "default_max_tokens": 8000,
1039 "can_reason": true,
1040 "reasoning_levels": [
1041 "low",
1042 "medium",
1043 "high"
1044 ],
1045 "default_reasoning_effort": "medium",
1046 "supports_attachments": false
1047 },
1048 {
1049 "id": "openai/gpt-5.1-instant",
1050 "name": "GPT-5.1 Instant",
1051 "cost_per_1m_in": 1.25,
1052 "cost_per_1m_out": 10,
1053 "cost_per_1m_in_cached": 0.125,
1054 "cost_per_1m_out_cached": 0,
1055 "context_window": 128000,
1056 "default_max_tokens": 8000,
1057 "can_reason": true,
1058 "reasoning_levels": [
1059 "low",
1060 "medium",
1061 "high"
1062 ],
1063 "default_reasoning_effort": "medium",
1064 "supports_attachments": true
1065 },
1066 {
1067 "id": "openai/gpt-5.1-codex",
1068 "name": "GPT-5.1-Codex",
1069 "cost_per_1m_in": 1.25,
1070 "cost_per_1m_out": 10,
1071 "cost_per_1m_in_cached": 0.125,
1072 "cost_per_1m_out_cached": 0,
1073 "context_window": 400000,
1074 "default_max_tokens": 8000,
1075 "can_reason": true,
1076 "reasoning_levels": [
1077 "low",
1078 "medium",
1079 "high"
1080 ],
1081 "default_reasoning_effort": "medium",
1082 "supports_attachments": true
1083 },
1084 {
1085 "id": "openai/gpt-5.3-chat",
1086 "name": "GPT-5.3 Chat",
1087 "cost_per_1m_in": 1.75,
1088 "cost_per_1m_out": 14,
1089 "cost_per_1m_in_cached": 0.175,
1090 "cost_per_1m_out_cached": 0,
1091 "context_window": 128000,
1092 "default_max_tokens": 8000,
1093 "can_reason": true,
1094 "reasoning_levels": [
1095 "low",
1096 "medium",
1097 "high"
1098 ],
1099 "default_reasoning_effort": "medium",
1100 "supports_attachments": true
1101 },
1102 {
1103 "id": "google/gemini-2.0-flash",
1104 "name": "Gemini 2.0 Flash",
1105 "cost_per_1m_in": 0.15,
1106 "cost_per_1m_out": 0.6,
1107 "cost_per_1m_in_cached": 0.025,
1108 "cost_per_1m_out_cached": 0,
1109 "context_window": 1048576,
1110 "default_max_tokens": 8000,
1111 "can_reason": false,
1112 "supports_attachments": true
1113 },
1114 {
1115 "id": "google/gemini-2.0-flash-lite",
1116 "name": "Gemini 2.0 Flash Lite",
1117 "cost_per_1m_in": 0.075,
1118 "cost_per_1m_out": 0.3,
1119 "cost_per_1m_in_cached": 0.02,
1120 "cost_per_1m_out_cached": 0,
1121 "context_window": 1048576,
1122 "default_max_tokens": 8000,
1123 "can_reason": false,
1124 "supports_attachments": true
1125 },
1126 {
1127 "id": "google/gemini-2.5-flash",
1128 "name": "Gemini 2.5 Flash",
1129 "cost_per_1m_in": 0.3,
1130 "cost_per_1m_out": 2.5,
1131 "cost_per_1m_in_cached": 0.03,
1132 "cost_per_1m_out_cached": 0,
1133 "context_window": 1000000,
1134 "default_max_tokens": 8000,
1135 "can_reason": true,
1136 "reasoning_levels": [
1137 "low",
1138 "medium",
1139 "high"
1140 ],
1141 "default_reasoning_effort": "medium",
1142 "supports_attachments": true
1143 },
1144 {
1145 "id": "google/gemini-2.5-flash-lite",
1146 "name": "Gemini 2.5 Flash Lite",
1147 "cost_per_1m_in": 0.1,
1148 "cost_per_1m_out": 0.4,
1149 "cost_per_1m_in_cached": 0.01,
1150 "cost_per_1m_out_cached": 0,
1151 "context_window": 1048576,
1152 "default_max_tokens": 8000,
1153 "can_reason": true,
1154 "reasoning_levels": [
1155 "low",
1156 "medium",
1157 "high"
1158 ],
1159 "default_reasoning_effort": "medium",
1160 "supports_attachments": true
1161 },
1162 {
1163 "id": "google/gemini-2.5-pro",
1164 "name": "Gemini 2.5 Pro",
1165 "cost_per_1m_in": 1.25,
1166 "cost_per_1m_out": 10,
1167 "cost_per_1m_in_cached": 0.125,
1168 "cost_per_1m_out_cached": 0,
1169 "context_window": 1048576,
1170 "default_max_tokens": 8000,
1171 "can_reason": true,
1172 "reasoning_levels": [
1173 "low",
1174 "medium",
1175 "high"
1176 ],
1177 "default_reasoning_effort": "medium",
1178 "supports_attachments": true
1179 },
1180 {
1181 "id": "google/gemini-3-flash",
1182 "name": "Gemini 3 Flash",
1183 "cost_per_1m_in": 0.5,
1184 "cost_per_1m_out": 3,
1185 "cost_per_1m_in_cached": 0.05,
1186 "cost_per_1m_out_cached": 0,
1187 "context_window": 1000000,
1188 "default_max_tokens": 8000,
1189 "can_reason": true,
1190 "reasoning_levels": [
1191 "low",
1192 "medium",
1193 "high"
1194 ],
1195 "default_reasoning_effort": "medium",
1196 "supports_attachments": true
1197 },
1198 {
1199 "id": "google/gemini-3-pro-preview",
1200 "name": "Gemini 3 Pro Preview",
1201 "cost_per_1m_in": 2,
1202 "cost_per_1m_out": 12,
1203 "cost_per_1m_in_cached": 0.2,
1204 "cost_per_1m_out_cached": 0,
1205 "context_window": 1000000,
1206 "default_max_tokens": 8000,
1207 "can_reason": true,
1208 "reasoning_levels": [
1209 "low",
1210 "medium",
1211 "high"
1212 ],
1213 "default_reasoning_effort": "medium",
1214 "supports_attachments": true
1215 },
1216 {
1217 "id": "google/gemini-3.1-flash-lite-preview",
1218 "name": "Gemini 3.1 Flash Lite Preview",
1219 "cost_per_1m_in": 0.25,
1220 "cost_per_1m_out": 1.5,
1221 "cost_per_1m_in_cached": 0.03,
1222 "cost_per_1m_out_cached": 0,
1223 "context_window": 1000000,
1224 "default_max_tokens": 8000,
1225 "can_reason": true,
1226 "reasoning_levels": [
1227 "low",
1228 "medium",
1229 "high"
1230 ],
1231 "default_reasoning_effort": "medium",
1232 "supports_attachments": true
1233 },
1234 {
1235 "id": "google/gemini-3.1-pro-preview",
1236 "name": "Gemini 3.1 Pro Preview",
1237 "cost_per_1m_in": 2,
1238 "cost_per_1m_out": 12,
1239 "cost_per_1m_in_cached": 0.2,
1240 "cost_per_1m_out_cached": 0,
1241 "context_window": 1000000,
1242 "default_max_tokens": 8000,
1243 "can_reason": true,
1244 "reasoning_levels": [
1245 "low",
1246 "medium",
1247 "high"
1248 ],
1249 "default_reasoning_effort": "medium",
1250 "supports_attachments": true
1251 },
1252 {
1253 "id": "google/gemma-4-26b-a4b-it",
1254 "name": "Gemma 4 26B A4B IT",
1255 "cost_per_1m_in": 0.13,
1256 "cost_per_1m_out": 0.4,
1257 "cost_per_1m_in_cached": 0,
1258 "cost_per_1m_out_cached": 0,
1259 "context_window": 262144,
1260 "default_max_tokens": 8000,
1261 "can_reason": false,
1262 "supports_attachments": true
1263 },
1264 {
1265 "id": "google/gemma-4-31b-it",
1266 "name": "Gemma 4 31B IT",
1267 "cost_per_1m_in": 0.14,
1268 "cost_per_1m_out": 0.4,
1269 "cost_per_1m_in_cached": 0,
1270 "cost_per_1m_out_cached": 0,
1271 "context_window": 262144,
1272 "default_max_tokens": 8000,
1273 "can_reason": false,
1274 "supports_attachments": true
1275 },
1276 {
1277 "id": "xai/grok-3",
1278 "name": "Grok 3 Beta",
1279 "cost_per_1m_in": 3,
1280 "cost_per_1m_out": 15,
1281 "cost_per_1m_in_cached": 0.75,
1282 "cost_per_1m_out_cached": 0,
1283 "context_window": 131072,
1284 "default_max_tokens": 8000,
1285 "can_reason": false,
1286 "supports_attachments": false
1287 },
1288 {
1289 "id": "xai/grok-3-fast",
1290 "name": "Grok 3 Fast Beta",
1291 "cost_per_1m_in": 5,
1292 "cost_per_1m_out": 25,
1293 "cost_per_1m_in_cached": 1.25,
1294 "cost_per_1m_out_cached": 0,
1295 "context_window": 131072,
1296 "default_max_tokens": 8000,
1297 "can_reason": false,
1298 "supports_attachments": false
1299 },
1300 {
1301 "id": "xai/grok-3-mini",
1302 "name": "Grok 3 Mini Beta",
1303 "cost_per_1m_in": 0.3,
1304 "cost_per_1m_out": 0.5,
1305 "cost_per_1m_in_cached": 0.075,
1306 "cost_per_1m_out_cached": 0,
1307 "context_window": 131072,
1308 "default_max_tokens": 8000,
1309 "can_reason": false,
1310 "supports_attachments": false
1311 },
1312 {
1313 "id": "xai/grok-3-mini-fast",
1314 "name": "Grok 3 Mini Fast Beta",
1315 "cost_per_1m_in": 0.6,
1316 "cost_per_1m_out": 4,
1317 "cost_per_1m_in_cached": 0,
1318 "cost_per_1m_out_cached": 0,
1319 "context_window": 131072,
1320 "default_max_tokens": 8000,
1321 "can_reason": false,
1322 "supports_attachments": false
1323 },
1324 {
1325 "id": "xai/grok-4",
1326 "name": "Grok 4",
1327 "cost_per_1m_in": 3,
1328 "cost_per_1m_out": 15,
1329 "cost_per_1m_in_cached": 0.75,
1330 "cost_per_1m_out_cached": 0,
1331 "context_window": 256000,
1332 "default_max_tokens": 8000,
1333 "can_reason": true,
1334 "reasoning_levels": [
1335 "low",
1336 "medium",
1337 "high"
1338 ],
1339 "default_reasoning_effort": "medium",
1340 "supports_attachments": true
1341 },
1342 {
1343 "id": "xai/grok-4-fast-non-reasoning",
1344 "name": "Grok 4 Fast Non-Reasoning",
1345 "cost_per_1m_in": 0.2,
1346 "cost_per_1m_out": 0.5,
1347 "cost_per_1m_in_cached": 0.05,
1348 "cost_per_1m_out_cached": 0,
1349 "context_window": 2000000,
1350 "default_max_tokens": 8000,
1351 "can_reason": false,
1352 "supports_attachments": true
1353 },
1354 {
1355 "id": "xai/grok-4-fast-reasoning",
1356 "name": "Grok 4 Fast Reasoning",
1357 "cost_per_1m_in": 0.2,
1358 "cost_per_1m_out": 0.5,
1359 "cost_per_1m_in_cached": 0.05,
1360 "cost_per_1m_out_cached": 0,
1361 "context_window": 2000000,
1362 "default_max_tokens": 8000,
1363 "can_reason": true,
1364 "reasoning_levels": [
1365 "low",
1366 "medium",
1367 "high"
1368 ],
1369 "default_reasoning_effort": "medium",
1370 "supports_attachments": true
1371 },
1372 {
1373 "id": "xai/grok-4.1-fast-non-reasoning",
1374 "name": "Grok 4.1 Fast Non-Reasoning",
1375 "cost_per_1m_in": 0.2,
1376 "cost_per_1m_out": 0.5,
1377 "cost_per_1m_in_cached": 0.05,
1378 "cost_per_1m_out_cached": 0,
1379 "context_window": 2000000,
1380 "default_max_tokens": 8000,
1381 "can_reason": false,
1382 "supports_attachments": true
1383 },
1384 {
1385 "id": "xai/grok-4.1-fast-reasoning",
1386 "name": "Grok 4.1 Fast Reasoning",
1387 "cost_per_1m_in": 0.2,
1388 "cost_per_1m_out": 0.5,
1389 "cost_per_1m_in_cached": 0.05,
1390 "cost_per_1m_out_cached": 0,
1391 "context_window": 2000000,
1392 "default_max_tokens": 8000,
1393 "can_reason": true,
1394 "reasoning_levels": [
1395 "low",
1396 "medium",
1397 "high"
1398 ],
1399 "default_reasoning_effort": "medium",
1400 "supports_attachments": true
1401 },
1402 {
1403 "id": "xai/grok-4.20-non-reasoning-beta",
1404 "name": "Grok 4.20 Beta Non-Reasoning",
1405 "cost_per_1m_in": 2,
1406 "cost_per_1m_out": 6,
1407 "cost_per_1m_in_cached": 0.2,
1408 "cost_per_1m_out_cached": 0,
1409 "context_window": 2000000,
1410 "default_max_tokens": 8000,
1411 "can_reason": false,
1412 "supports_attachments": true
1413 },
1414 {
1415 "id": "xai/grok-4.20-reasoning-beta",
1416 "name": "Grok 4.20 Beta Reasoning",
1417 "cost_per_1m_in": 2,
1418 "cost_per_1m_out": 6,
1419 "cost_per_1m_in_cached": 0.2,
1420 "cost_per_1m_out_cached": 0,
1421 "context_window": 2000000,
1422 "default_max_tokens": 8000,
1423 "can_reason": true,
1424 "reasoning_levels": [
1425 "low",
1426 "medium",
1427 "high"
1428 ],
1429 "default_reasoning_effort": "medium",
1430 "supports_attachments": true
1431 },
1432 {
1433 "id": "xai/grok-4.20-multi-agent-beta",
1434 "name": "Grok 4.20 Multi Agent Beta",
1435 "cost_per_1m_in": 2,
1436 "cost_per_1m_out": 6,
1437 "cost_per_1m_in_cached": 0.2,
1438 "cost_per_1m_out_cached": 0,
1439 "context_window": 2000000,
1440 "default_max_tokens": 8000,
1441 "can_reason": true,
1442 "reasoning_levels": [
1443 "low",
1444 "medium",
1445 "high"
1446 ],
1447 "default_reasoning_effort": "medium",
1448 "supports_attachments": true
1449 },
1450 {
1451 "id": "xai/grok-4.20-multi-agent",
1452 "name": "Grok 4.20 Multi-Agent",
1453 "cost_per_1m_in": 2,
1454 "cost_per_1m_out": 6,
1455 "cost_per_1m_in_cached": 0.2,
1456 "cost_per_1m_out_cached": 0,
1457 "context_window": 2000000,
1458 "default_max_tokens": 8000,
1459 "can_reason": true,
1460 "reasoning_levels": [
1461 "low",
1462 "medium",
1463 "high"
1464 ],
1465 "default_reasoning_effort": "medium",
1466 "supports_attachments": true
1467 },
1468 {
1469 "id": "xai/grok-4.20-non-reasoning",
1470 "name": "Grok 4.20 Non-Reasoning",
1471 "cost_per_1m_in": 2,
1472 "cost_per_1m_out": 6,
1473 "cost_per_1m_in_cached": 0.2,
1474 "cost_per_1m_out_cached": 0,
1475 "context_window": 2000000,
1476 "default_max_tokens": 8000,
1477 "can_reason": false,
1478 "supports_attachments": true
1479 },
1480 {
1481 "id": "xai/grok-4.20-reasoning",
1482 "name": "Grok 4.20 Reasoning",
1483 "cost_per_1m_in": 2,
1484 "cost_per_1m_out": 6,
1485 "cost_per_1m_in_cached": 0.2,
1486 "cost_per_1m_out_cached": 0,
1487 "context_window": 2000000,
1488 "default_max_tokens": 8000,
1489 "can_reason": true,
1490 "reasoning_levels": [
1491 "low",
1492 "medium",
1493 "high"
1494 ],
1495 "default_reasoning_effort": "medium",
1496 "supports_attachments": true
1497 },
1498 {
1499 "id": "xai/grok-code-fast-1",
1500 "name": "Grok Code Fast 1",
1501 "cost_per_1m_in": 0.2,
1502 "cost_per_1m_out": 1.5,
1503 "cost_per_1m_in_cached": 0.02,
1504 "cost_per_1m_out_cached": 0,
1505 "context_window": 256000,
1506 "default_max_tokens": 8000,
1507 "can_reason": true,
1508 "reasoning_levels": [
1509 "low",
1510 "medium",
1511 "high"
1512 ],
1513 "default_reasoning_effort": "medium",
1514 "supports_attachments": false
1515 },
1516 {
1517 "id": "prime-intellect/intellect-3",
1518 "name": "INTELLECT 3",
1519 "cost_per_1m_in": 0.2,
1520 "cost_per_1m_out": 1.1,
1521 "cost_per_1m_in_cached": 0,
1522 "cost_per_1m_out_cached": 0,
1523 "context_window": 131072,
1524 "default_max_tokens": 8000,
1525 "can_reason": true,
1526 "reasoning_levels": [
1527 "low",
1528 "medium",
1529 "high"
1530 ],
1531 "default_reasoning_effort": "medium",
1532 "supports_attachments": false
1533 },
1534 {
1535 "id": "kwaipilot/kat-coder-pro-v2",
1536 "name": "Kat Coder Pro V2",
1537 "cost_per_1m_in": 0.3,
1538 "cost_per_1m_out": 1.2,
1539 "cost_per_1m_in_cached": 0.06,
1540 "cost_per_1m_out_cached": 0,
1541 "context_window": 256000,
1542 "default_max_tokens": 8000,
1543 "can_reason": true,
1544 "reasoning_levels": [
1545 "low",
1546 "medium",
1547 "high"
1548 ],
1549 "default_reasoning_effort": "medium",
1550 "supports_attachments": false
1551 },
1552 {
1553 "id": "moonshotai/kimi-k2-0905",
1554 "name": "Kimi K2 0905",
1555 "cost_per_1m_in": 0.6,
1556 "cost_per_1m_out": 2.5,
1557 "cost_per_1m_in_cached": 0.3,
1558 "cost_per_1m_out_cached": 0,
1559 "context_window": 256000,
1560 "default_max_tokens": 8000,
1561 "can_reason": false,
1562 "supports_attachments": false
1563 },
1564 {
1565 "id": "moonshotai/kimi-k2",
1566 "name": "Kimi K2 Instruct",
1567 "cost_per_1m_in": 0.57,
1568 "cost_per_1m_out": 2.3,
1569 "cost_per_1m_in_cached": 0,
1570 "cost_per_1m_out_cached": 0,
1571 "context_window": 131072,
1572 "default_max_tokens": 8000,
1573 "can_reason": false,
1574 "supports_attachments": false
1575 },
1576 {
1577 "id": "moonshotai/kimi-k2-thinking",
1578 "name": "Kimi K2 Thinking",
1579 "cost_per_1m_in": 0.6,
1580 "cost_per_1m_out": 2.5,
1581 "cost_per_1m_in_cached": 0.15,
1582 "cost_per_1m_out_cached": 0,
1583 "context_window": 262114,
1584 "default_max_tokens": 8000,
1585 "can_reason": true,
1586 "reasoning_levels": [
1587 "low",
1588 "medium",
1589 "high"
1590 ],
1591 "default_reasoning_effort": "medium",
1592 "supports_attachments": false
1593 },
1594 {
1595 "id": "moonshotai/kimi-k2-thinking-turbo",
1596 "name": "Kimi K2 Thinking Turbo",
1597 "cost_per_1m_in": 1.15,
1598 "cost_per_1m_out": 8,
1599 "cost_per_1m_in_cached": 0.15,
1600 "cost_per_1m_out_cached": 0,
1601 "context_window": 262114,
1602 "default_max_tokens": 8000,
1603 "can_reason": true,
1604 "reasoning_levels": [
1605 "low",
1606 "medium",
1607 "high"
1608 ],
1609 "default_reasoning_effort": "medium",
1610 "supports_attachments": false
1611 },
1612 {
1613 "id": "moonshotai/kimi-k2-turbo",
1614 "name": "Kimi K2 Turbo",
1615 "cost_per_1m_in": 1.15,
1616 "cost_per_1m_out": 8,
1617 "cost_per_1m_in_cached": 0.15,
1618 "cost_per_1m_out_cached": 0,
1619 "context_window": 256000,
1620 "default_max_tokens": 8000,
1621 "can_reason": false,
1622 "supports_attachments": false
1623 },
1624 {
1625 "id": "moonshotai/kimi-k2.5",
1626 "name": "Kimi K2.5",
1627 "cost_per_1m_in": 0.6,
1628 "cost_per_1m_out": 3,
1629 "cost_per_1m_in_cached": 0.1,
1630 "cost_per_1m_out_cached": 0,
1631 "context_window": 262114,
1632 "default_max_tokens": 8000,
1633 "can_reason": true,
1634 "reasoning_levels": [
1635 "low",
1636 "medium",
1637 "high"
1638 ],
1639 "default_reasoning_effort": "medium",
1640 "supports_attachments": true
1641 },
1642 {
1643 "id": "moonshotai/kimi-k2.6",
1644 "name": "Kimi K2.6",
1645 "cost_per_1m_in": 0.95,
1646 "cost_per_1m_out": 4,
1647 "cost_per_1m_in_cached": 0.16,
1648 "cost_per_1m_out_cached": 0,
1649 "context_window": 262000,
1650 "default_max_tokens": 8000,
1651 "can_reason": true,
1652 "reasoning_levels": [
1653 "low",
1654 "medium",
1655 "high"
1656 ],
1657 "default_reasoning_effort": "medium",
1658 "supports_attachments": true
1659 },
1660 {
1661 "id": "meta/llama-3.1-70b",
1662 "name": "Llama 3.1 70B Instruct",
1663 "cost_per_1m_in": 0.72,
1664 "cost_per_1m_out": 0.72,
1665 "cost_per_1m_in_cached": 0,
1666 "cost_per_1m_out_cached": 0,
1667 "context_window": 128000,
1668 "default_max_tokens": 8000,
1669 "can_reason": false,
1670 "supports_attachments": false
1671 },
1672 {
1673 "id": "meta/llama-3.1-8b",
1674 "name": "Llama 3.1 8B Instruct",
1675 "cost_per_1m_in": 0.22,
1676 "cost_per_1m_out": 0.22,
1677 "cost_per_1m_in_cached": 0,
1678 "cost_per_1m_out_cached": 0,
1679 "context_window": 128000,
1680 "default_max_tokens": 8000,
1681 "can_reason": false,
1682 "supports_attachments": false
1683 },
1684 {
1685 "id": "meta/llama-3.2-11b",
1686 "name": "Llama 3.2 11B Vision Instruct",
1687 "cost_per_1m_in": 0.16,
1688 "cost_per_1m_out": 0.16,
1689 "cost_per_1m_in_cached": 0,
1690 "cost_per_1m_out_cached": 0,
1691 "context_window": 128000,
1692 "default_max_tokens": 8000,
1693 "can_reason": false,
1694 "supports_attachments": true
1695 },
1696 {
1697 "id": "meta/llama-3.2-90b",
1698 "name": "Llama 3.2 90B Vision Instruct",
1699 "cost_per_1m_in": 0.72,
1700 "cost_per_1m_out": 0.72,
1701 "cost_per_1m_in_cached": 0,
1702 "cost_per_1m_out_cached": 0,
1703 "context_window": 128000,
1704 "default_max_tokens": 8000,
1705 "can_reason": false,
1706 "supports_attachments": true
1707 },
1708 {
1709 "id": "meta/llama-3.3-70b",
1710 "name": "Llama 3.3 70B Instruct",
1711 "cost_per_1m_in": 0.72,
1712 "cost_per_1m_out": 0.72,
1713 "cost_per_1m_in_cached": 0,
1714 "cost_per_1m_out_cached": 0,
1715 "context_window": 128000,
1716 "default_max_tokens": 8000,
1717 "can_reason": false,
1718 "supports_attachments": false
1719 },
1720 {
1721 "id": "meta/llama-4-maverick",
1722 "name": "Llama 4 Maverick 17B Instruct",
1723 "cost_per_1m_in": 0.24,
1724 "cost_per_1m_out": 0.97,
1725 "cost_per_1m_in_cached": 0,
1726 "cost_per_1m_out_cached": 0,
1727 "context_window": 128000,
1728 "default_max_tokens": 8000,
1729 "can_reason": false,
1730 "supports_attachments": true
1731 },
1732 {
1733 "id": "meta/llama-4-scout",
1734 "name": "Llama 4 Scout 17B Instruct",
1735 "cost_per_1m_in": 0.17,
1736 "cost_per_1m_out": 0.66,
1737 "cost_per_1m_in_cached": 0,
1738 "cost_per_1m_out_cached": 0,
1739 "context_window": 128000,
1740 "default_max_tokens": 8000,
1741 "can_reason": false,
1742 "supports_attachments": true
1743 },
1744 {
1745 "id": "meituan/longcat-flash-chat",
1746 "name": "LongCat Flash Chat",
1747 "cost_per_1m_in": 0,
1748 "cost_per_1m_out": 0,
1749 "cost_per_1m_in_cached": 0,
1750 "cost_per_1m_out_cached": 0,
1751 "context_window": 128000,
1752 "default_max_tokens": 8000,
1753 "can_reason": false,
1754 "supports_attachments": false
1755 },
1756 {
1757 "id": "inception/mercury-2",
1758 "name": "Mercury 2",
1759 "cost_per_1m_in": 0.25,
1760 "cost_per_1m_out": 0.75,
1761 "cost_per_1m_in_cached": 0.025,
1762 "cost_per_1m_out_cached": 0,
1763 "context_window": 128000,
1764 "default_max_tokens": 8000,
1765 "can_reason": true,
1766 "reasoning_levels": [
1767 "low",
1768 "medium",
1769 "high"
1770 ],
1771 "default_reasoning_effort": "medium",
1772 "supports_attachments": false
1773 },
1774 {
1775 "id": "inception/mercury-coder-small",
1776 "name": "Mercury Coder Small Beta",
1777 "cost_per_1m_in": 0.25,
1778 "cost_per_1m_out": 1,
1779 "cost_per_1m_in_cached": 0,
1780 "cost_per_1m_out_cached": 0,
1781 "context_window": 32000,
1782 "default_max_tokens": 8000,
1783 "can_reason": false,
1784 "supports_attachments": false
1785 },
1786 {
1787 "id": "xiaomi/mimo-v2-flash",
1788 "name": "MiMo V2 Flash",
1789 "cost_per_1m_in": 0.09,
1790 "cost_per_1m_out": 0.29,
1791 "cost_per_1m_in_cached": 0.045,
1792 "cost_per_1m_out_cached": 0,
1793 "context_window": 262144,
1794 "default_max_tokens": 8000,
1795 "can_reason": true,
1796 "reasoning_levels": [
1797 "low",
1798 "medium",
1799 "high"
1800 ],
1801 "default_reasoning_effort": "medium",
1802 "supports_attachments": false
1803 },
1804 {
1805 "id": "xiaomi/mimo-v2-pro",
1806 "name": "MiMo V2 Pro",
1807 "cost_per_1m_in": 1,
1808 "cost_per_1m_out": 3,
1809 "cost_per_1m_in_cached": 0.2,
1810 "cost_per_1m_out_cached": 0,
1811 "context_window": 1000000,
1812 "default_max_tokens": 8000,
1813 "can_reason": true,
1814 "reasoning_levels": [
1815 "low",
1816 "medium",
1817 "high"
1818 ],
1819 "default_reasoning_effort": "medium",
1820 "supports_attachments": false
1821 },
1822 {
1823 "id": "minimax/minimax-m2",
1824 "name": "MiniMax M2",
1825 "cost_per_1m_in": 0.3,
1826 "cost_per_1m_out": 1.2,
1827 "cost_per_1m_in_cached": 0.03,
1828 "cost_per_1m_out_cached": 0.375,
1829 "context_window": 205000,
1830 "default_max_tokens": 8000,
1831 "can_reason": true,
1832 "reasoning_levels": [
1833 "low",
1834 "medium",
1835 "high"
1836 ],
1837 "default_reasoning_effort": "medium",
1838 "supports_attachments": false
1839 },
1840 {
1841 "id": "minimax/minimax-m2.1",
1842 "name": "MiniMax M2.1",
1843 "cost_per_1m_in": 0.3,
1844 "cost_per_1m_out": 1.2,
1845 "cost_per_1m_in_cached": 0.03,
1846 "cost_per_1m_out_cached": 0.375,
1847 "context_window": 204800,
1848 "default_max_tokens": 8000,
1849 "can_reason": true,
1850 "reasoning_levels": [
1851 "low",
1852 "medium",
1853 "high"
1854 ],
1855 "default_reasoning_effort": "medium",
1856 "supports_attachments": false
1857 },
1858 {
1859 "id": "minimax/minimax-m2.1-lightning",
1860 "name": "MiniMax M2.1 Lightning",
1861 "cost_per_1m_in": 0.3,
1862 "cost_per_1m_out": 2.4,
1863 "cost_per_1m_in_cached": 0.03,
1864 "cost_per_1m_out_cached": 0.375,
1865 "context_window": 204800,
1866 "default_max_tokens": 8000,
1867 "can_reason": true,
1868 "reasoning_levels": [
1869 "low",
1870 "medium",
1871 "high"
1872 ],
1873 "default_reasoning_effort": "medium",
1874 "supports_attachments": false
1875 },
1876 {
1877 "id": "minimax/minimax-m2.5",
1878 "name": "MiniMax M2.5",
1879 "cost_per_1m_in": 0.3,
1880 "cost_per_1m_out": 1.2,
1881 "cost_per_1m_in_cached": 0.03,
1882 "cost_per_1m_out_cached": 0.375,
1883 "context_window": 204800,
1884 "default_max_tokens": 8000,
1885 "can_reason": true,
1886 "reasoning_levels": [
1887 "low",
1888 "medium",
1889 "high"
1890 ],
1891 "default_reasoning_effort": "medium",
1892 "supports_attachments": false
1893 },
1894 {
1895 "id": "minimax/minimax-m2.5-highspeed",
1896 "name": "MiniMax M2.5 High Speed",
1897 "cost_per_1m_in": 0.6,
1898 "cost_per_1m_out": 2.4,
1899 "cost_per_1m_in_cached": 0.03,
1900 "cost_per_1m_out_cached": 0.375,
1901 "context_window": 204800,
1902 "default_max_tokens": 8000,
1903 "can_reason": true,
1904 "reasoning_levels": [
1905 "low",
1906 "medium",
1907 "high"
1908 ],
1909 "default_reasoning_effort": "medium",
1910 "supports_attachments": false
1911 },
1912 {
1913 "id": "minimax/minimax-m2.7-highspeed",
1914 "name": "MiniMax M2.7 High Speed",
1915 "cost_per_1m_in": 0.6,
1916 "cost_per_1m_out": 2.4,
1917 "cost_per_1m_in_cached": 0.06,
1918 "cost_per_1m_out_cached": 0.375,
1919 "context_window": 204800,
1920 "default_max_tokens": 8000,
1921 "can_reason": true,
1922 "reasoning_levels": [
1923 "low",
1924 "medium",
1925 "high"
1926 ],
1927 "default_reasoning_effort": "medium",
1928 "supports_attachments": true
1929 },
1930 {
1931 "id": "minimax/minimax-m2.7",
1932 "name": "Minimax M2.7",
1933 "cost_per_1m_in": 0.3,
1934 "cost_per_1m_out": 1.2,
1935 "cost_per_1m_in_cached": 0.06,
1936 "cost_per_1m_out_cached": 0.375,
1937 "context_window": 204800,
1938 "default_max_tokens": 8000,
1939 "can_reason": true,
1940 "reasoning_levels": [
1941 "low",
1942 "medium",
1943 "high"
1944 ],
1945 "default_reasoning_effort": "medium",
1946 "supports_attachments": true
1947 },
1948 {
1949 "id": "mistral/ministral-3b",
1950 "name": "Ministral 3B",
1951 "cost_per_1m_in": 0.1,
1952 "cost_per_1m_out": 0.1,
1953 "cost_per_1m_in_cached": 0,
1954 "cost_per_1m_out_cached": 0,
1955 "context_window": 128000,
1956 "default_max_tokens": 4000,
1957 "can_reason": false,
1958 "supports_attachments": false
1959 },
1960 {
1961 "id": "mistral/ministral-8b",
1962 "name": "Ministral 8B",
1963 "cost_per_1m_in": 0.15,
1964 "cost_per_1m_out": 0.15,
1965 "cost_per_1m_in_cached": 0,
1966 "cost_per_1m_out_cached": 0,
1967 "context_window": 128000,
1968 "default_max_tokens": 4000,
1969 "can_reason": false,
1970 "supports_attachments": false
1971 },
1972 {
1973 "id": "mistral/codestral",
1974 "name": "Mistral Codestral",
1975 "cost_per_1m_in": 0.3,
1976 "cost_per_1m_out": 0.9,
1977 "cost_per_1m_in_cached": 0,
1978 "cost_per_1m_out_cached": 0,
1979 "context_window": 128000,
1980 "default_max_tokens": 4000,
1981 "can_reason": false,
1982 "supports_attachments": false
1983 },
1984 {
1985 "id": "mistral/mistral-medium",
1986 "name": "Mistral Medium 3.1",
1987 "cost_per_1m_in": 0.4,
1988 "cost_per_1m_out": 2,
1989 "cost_per_1m_in_cached": 0,
1990 "cost_per_1m_out_cached": 0,
1991 "context_window": 128000,
1992 "default_max_tokens": 8000,
1993 "can_reason": false,
1994 "supports_attachments": true
1995 },
1996 {
1997 "id": "mistral/mistral-small",
1998 "name": "Mistral Small",
1999 "cost_per_1m_in": 0.1,
2000 "cost_per_1m_out": 0.3,
2001 "cost_per_1m_in_cached": 0,
2002 "cost_per_1m_out_cached": 0,
2003 "context_window": 32000,
2004 "default_max_tokens": 4000,
2005 "can_reason": false,
2006 "supports_attachments": true
2007 },
2008 {
2009 "id": "nvidia/nemotron-nano-12b-v2-vl",
2010 "name": "Nvidia Nemotron Nano 12B V2 VL",
2011 "cost_per_1m_in": 0.2,
2012 "cost_per_1m_out": 0.6,
2013 "cost_per_1m_in_cached": 0,
2014 "cost_per_1m_out_cached": 0,
2015 "context_window": 131072,
2016 "default_max_tokens": 8000,
2017 "can_reason": true,
2018 "reasoning_levels": [
2019 "low",
2020 "medium",
2021 "high"
2022 ],
2023 "default_reasoning_effort": "medium",
2024 "supports_attachments": true
2025 },
2026 {
2027 "id": "nvidia/nemotron-nano-9b-v2",
2028 "name": "Nvidia Nemotron Nano 9B V2",
2029 "cost_per_1m_in": 0.06,
2030 "cost_per_1m_out": 0.23,
2031 "cost_per_1m_in_cached": 0,
2032 "cost_per_1m_out_cached": 0,
2033 "context_window": 131072,
2034 "default_max_tokens": 8000,
2035 "can_reason": true,
2036 "reasoning_levels": [
2037 "low",
2038 "medium",
2039 "high"
2040 ],
2041 "default_reasoning_effort": "medium",
2042 "supports_attachments": false
2043 },
2044 {
2045 "id": "mistral/pixtral-12b",
2046 "name": "Pixtral 12B 2409",
2047 "cost_per_1m_in": 0.15,
2048 "cost_per_1m_out": 0.15,
2049 "cost_per_1m_in_cached": 0,
2050 "cost_per_1m_out_cached": 0,
2051 "context_window": 128000,
2052 "default_max_tokens": 4000,
2053 "can_reason": false,
2054 "supports_attachments": true
2055 },
2056 {
2057 "id": "mistral/pixtral-large",
2058 "name": "Pixtral Large",
2059 "cost_per_1m_in": 2,
2060 "cost_per_1m_out": 6,
2061 "cost_per_1m_in_cached": 0,
2062 "cost_per_1m_out_cached": 0,
2063 "context_window": 128000,
2064 "default_max_tokens": 4000,
2065 "can_reason": false,
2066 "supports_attachments": true
2067 },
2068 {
2069 "id": "alibaba/qwen-3-32b",
2070 "name": "Qwen 3 32B",
2071 "cost_per_1m_in": 0.16,
2072 "cost_per_1m_out": 0.64,
2073 "cost_per_1m_in_cached": 0,
2074 "cost_per_1m_out_cached": 0,
2075 "context_window": 128000,
2076 "default_max_tokens": 8000,
2077 "can_reason": true,
2078 "reasoning_levels": [
2079 "low",
2080 "medium",
2081 "high"
2082 ],
2083 "default_reasoning_effort": "medium",
2084 "supports_attachments": false
2085 },
2086 {
2087 "id": "alibaba/qwen3-coder-30b-a3b",
2088 "name": "Qwen 3 Coder 30B A3B Instruct",
2089 "cost_per_1m_in": 0.15,
2090 "cost_per_1m_out": 0.6,
2091 "cost_per_1m_in_cached": 0,
2092 "cost_per_1m_out_cached": 0,
2093 "context_window": 262144,
2094 "default_max_tokens": 8000,
2095 "can_reason": true,
2096 "reasoning_levels": [
2097 "low",
2098 "medium",
2099 "high"
2100 ],
2101 "default_reasoning_effort": "medium",
2102 "supports_attachments": false
2103 },
2104 {
2105 "id": "alibaba/qwen3-max-thinking",
2106 "name": "Qwen 3 Max Thinking",
2107 "cost_per_1m_in": 1.2,
2108 "cost_per_1m_out": 6,
2109 "cost_per_1m_in_cached": 0.24,
2110 "cost_per_1m_out_cached": 0,
2111 "context_window": 256000,
2112 "default_max_tokens": 8000,
2113 "can_reason": true,
2114 "reasoning_levels": [
2115 "low",
2116 "medium",
2117 "high"
2118 ],
2119 "default_reasoning_effort": "medium",
2120 "supports_attachments": false
2121 },
2122 {
2123 "id": "alibaba/qwen3.5-flash",
2124 "name": "Qwen 3.5 Flash",
2125 "cost_per_1m_in": 0.1,
2126 "cost_per_1m_out": 0.4,
2127 "cost_per_1m_in_cached": 0.001,
2128 "cost_per_1m_out_cached": 0.125,
2129 "context_window": 1000000,
2130 "default_max_tokens": 8000,
2131 "can_reason": true,
2132 "reasoning_levels": [
2133 "low",
2134 "medium",
2135 "high"
2136 ],
2137 "default_reasoning_effort": "medium",
2138 "supports_attachments": true
2139 },
2140 {
2141 "id": "alibaba/qwen3.5-plus",
2142 "name": "Qwen 3.5 Plus",
2143 "cost_per_1m_in": 0.4,
2144 "cost_per_1m_out": 2.4,
2145 "cost_per_1m_in_cached": 0.04,
2146 "cost_per_1m_out_cached": 0.5,
2147 "context_window": 1000000,
2148 "default_max_tokens": 8000,
2149 "can_reason": true,
2150 "reasoning_levels": [
2151 "low",
2152 "medium",
2153 "high"
2154 ],
2155 "default_reasoning_effort": "medium",
2156 "supports_attachments": true
2157 },
2158 {
2159 "id": "alibaba/qwen3.6-plus",
2160 "name": "Qwen 3.6 Plus",
2161 "cost_per_1m_in": 0.5,
2162 "cost_per_1m_out": 3,
2163 "cost_per_1m_in_cached": 0.1,
2164 "cost_per_1m_out_cached": 0,
2165 "context_window": 1000000,
2166 "default_max_tokens": 8000,
2167 "can_reason": true,
2168 "reasoning_levels": [
2169 "low",
2170 "medium",
2171 "high"
2172 ],
2173 "default_reasoning_effort": "medium",
2174 "supports_attachments": true
2175 },
2176 {
2177 "id": "alibaba/qwen3-235b-a22b-thinking",
2178 "name": "Qwen3 235B A22B Thinking 2507",
2179 "cost_per_1m_in": 0.23,
2180 "cost_per_1m_out": 2.3,
2181 "cost_per_1m_in_cached": 0.2,
2182 "cost_per_1m_out_cached": 0,
2183 "context_window": 262114,
2184 "default_max_tokens": 8000,
2185 "can_reason": true,
2186 "reasoning_levels": [
2187 "low",
2188 "medium",
2189 "high"
2190 ],
2191 "default_reasoning_effort": "medium",
2192 "supports_attachments": true
2193 },
2194 {
2195 "id": "alibaba/qwen-3-235b",
2196 "name": "Qwen3 235B A22b Instruct 2507",
2197 "cost_per_1m_in": 0.6,
2198 "cost_per_1m_out": 1.2,
2199 "cost_per_1m_in_cached": 0.6,
2200 "cost_per_1m_out_cached": 0,
2201 "context_window": 131000,
2202 "default_max_tokens": 8000,
2203 "can_reason": false,
2204 "supports_attachments": false
2205 },
2206 {
2207 "id": "alibaba/qwen3-coder",
2208 "name": "Qwen3 Coder 480B A35B Instruct",
2209 "cost_per_1m_in": 1.5,
2210 "cost_per_1m_out": 7.5,
2211 "cost_per_1m_in_cached": 0.3,
2212 "cost_per_1m_out_cached": 0,
2213 "context_window": 262144,
2214 "default_max_tokens": 8000,
2215 "can_reason": false,
2216 "supports_attachments": false
2217 },
2218 {
2219 "id": "alibaba/qwen3-coder-next",
2220 "name": "Qwen3 Coder Next",
2221 "cost_per_1m_in": 0.5,
2222 "cost_per_1m_out": 1.2,
2223 "cost_per_1m_in_cached": 0,
2224 "cost_per_1m_out_cached": 0,
2225 "context_window": 256000,
2226 "default_max_tokens": 8000,
2227 "can_reason": false,
2228 "supports_attachments": false
2229 },
2230 {
2231 "id": "alibaba/qwen3-coder-plus",
2232 "name": "Qwen3 Coder Plus",
2233 "cost_per_1m_in": 1,
2234 "cost_per_1m_out": 5,
2235 "cost_per_1m_in_cached": 0.2,
2236 "cost_per_1m_out_cached": 0,
2237 "context_window": 1000000,
2238 "default_max_tokens": 8000,
2239 "can_reason": false,
2240 "supports_attachments": false
2241 },
2242 {
2243 "id": "alibaba/qwen3-max",
2244 "name": "Qwen3 Max",
2245 "cost_per_1m_in": 1.2,
2246 "cost_per_1m_out": 6,
2247 "cost_per_1m_in_cached": 0.24,
2248 "cost_per_1m_out_cached": 0,
2249 "context_window": 262144,
2250 "default_max_tokens": 8000,
2251 "can_reason": false,
2252 "supports_attachments": false
2253 },
2254 {
2255 "id": "alibaba/qwen3-max-preview",
2256 "name": "Qwen3 Max Preview",
2257 "cost_per_1m_in": 1.2,
2258 "cost_per_1m_out": 6,
2259 "cost_per_1m_in_cached": 0.24,
2260 "cost_per_1m_out_cached": 0,
2261 "context_window": 262144,
2262 "default_max_tokens": 8000,
2263 "can_reason": false,
2264 "supports_attachments": false
2265 },
2266 {
2267 "id": "alibaba/qwen3-vl-thinking",
2268 "name": "Qwen3 VL 235B A22B Thinking",
2269 "cost_per_1m_in": 0.4,
2270 "cost_per_1m_out": 4,
2271 "cost_per_1m_in_cached": 0,
2272 "cost_per_1m_out_cached": 0,
2273 "context_window": 131072,
2274 "default_max_tokens": 8000,
2275 "can_reason": true,
2276 "reasoning_levels": [
2277 "low",
2278 "medium",
2279 "high"
2280 ],
2281 "default_reasoning_effort": "medium",
2282 "supports_attachments": true
2283 },
2284 {
2285 "id": "alibaba/qwen-3-14b",
2286 "name": "Qwen3-14B",
2287 "cost_per_1m_in": 0.12,
2288 "cost_per_1m_out": 0.24,
2289 "cost_per_1m_in_cached": 0,
2290 "cost_per_1m_out_cached": 0,
2291 "context_window": 40960,
2292 "default_max_tokens": 8000,
2293 "can_reason": true,
2294 "reasoning_levels": [
2295 "low",
2296 "medium",
2297 "high"
2298 ],
2299 "default_reasoning_effort": "medium",
2300 "supports_attachments": false
2301 },
2302 {
2303 "id": "alibaba/qwen-3-30b",
2304 "name": "Qwen3-30B-A3B",
2305 "cost_per_1m_in": 0.08,
2306 "cost_per_1m_out": 0.29,
2307 "cost_per_1m_in_cached": 0,
2308 "cost_per_1m_out_cached": 0,
2309 "context_window": 40960,
2310 "default_max_tokens": 8000,
2311 "can_reason": true,
2312 "reasoning_levels": [
2313 "low",
2314 "medium",
2315 "high"
2316 ],
2317 "default_reasoning_effort": "medium",
2318 "supports_attachments": false
2319 },
2320 {
2321 "id": "bytedance/seed-1.6",
2322 "name": "Seed 1.6",
2323 "cost_per_1m_in": 0.25,
2324 "cost_per_1m_out": 2,
2325 "cost_per_1m_in_cached": 0.05,
2326 "cost_per_1m_out_cached": 0,
2327 "context_window": 256000,
2328 "default_max_tokens": 8000,
2329 "can_reason": true,
2330 "reasoning_levels": [
2331 "low",
2332 "medium",
2333 "high"
2334 ],
2335 "default_reasoning_effort": "medium",
2336 "supports_attachments": false
2337 },
2338 {
2339 "id": "perplexity/sonar",
2340 "name": "Sonar",
2341 "cost_per_1m_in": 0,
2342 "cost_per_1m_out": 0,
2343 "cost_per_1m_in_cached": 0,
2344 "cost_per_1m_out_cached": 0,
2345 "context_window": 127000,
2346 "default_max_tokens": 8000,
2347 "can_reason": false,
2348 "supports_attachments": true
2349 },
2350 {
2351 "id": "perplexity/sonar-pro",
2352 "name": "Sonar Pro",
2353 "cost_per_1m_in": 0,
2354 "cost_per_1m_out": 0,
2355 "cost_per_1m_in_cached": 0,
2356 "cost_per_1m_out_cached": 0,
2357 "context_window": 200000,
2358 "default_max_tokens": 8000,
2359 "can_reason": false,
2360 "supports_attachments": true
2361 },
2362 {
2363 "id": "arcee-ai/trinity-large-preview",
2364 "name": "Trinity Large Preview",
2365 "cost_per_1m_in": 0.25,
2366 "cost_per_1m_out": 1,
2367 "cost_per_1m_in_cached": 0,
2368 "cost_per_1m_out_cached": 0,
2369 "context_window": 131000,
2370 "default_max_tokens": 8000,
2371 "can_reason": false,
2372 "supports_attachments": false
2373 },
2374 {
2375 "id": "arcee-ai/trinity-large-thinking",
2376 "name": "Trinity Large Thinking",
2377 "cost_per_1m_in": 0.25,
2378 "cost_per_1m_out": 0.9,
2379 "cost_per_1m_in_cached": 0,
2380 "cost_per_1m_out_cached": 0,
2381 "context_window": 262100,
2382 "default_max_tokens": 8000,
2383 "can_reason": true,
2384 "reasoning_levels": [
2385 "low",
2386 "medium",
2387 "high"
2388 ],
2389 "default_reasoning_effort": "medium",
2390 "supports_attachments": false
2391 },
2392 {
2393 "id": "openai/o1",
2394 "name": "o1",
2395 "cost_per_1m_in": 15,
2396 "cost_per_1m_out": 60,
2397 "cost_per_1m_in_cached": 7.5,
2398 "cost_per_1m_out_cached": 0,
2399 "context_window": 200000,
2400 "default_max_tokens": 8000,
2401 "can_reason": true,
2402 "reasoning_levels": [
2403 "low",
2404 "medium",
2405 "high"
2406 ],
2407 "default_reasoning_effort": "medium",
2408 "supports_attachments": true
2409 },
2410 {
2411 "id": "openai/o3",
2412 "name": "o3",
2413 "cost_per_1m_in": 2,
2414 "cost_per_1m_out": 8,
2415 "cost_per_1m_in_cached": 0.5,
2416 "cost_per_1m_out_cached": 0,
2417 "context_window": 200000,
2418 "default_max_tokens": 8000,
2419 "can_reason": true,
2420 "reasoning_levels": [
2421 "low",
2422 "medium",
2423 "high"
2424 ],
2425 "default_reasoning_effort": "medium",
2426 "supports_attachments": true
2427 },
2428 {
2429 "id": "openai/o3-pro",
2430 "name": "o3 Pro",
2431 "cost_per_1m_in": 20,
2432 "cost_per_1m_out": 80,
2433 "cost_per_1m_in_cached": 0,
2434 "cost_per_1m_out_cached": 0,
2435 "context_window": 200000,
2436 "default_max_tokens": 8000,
2437 "can_reason": true,
2438 "reasoning_levels": [
2439 "low",
2440 "medium",
2441 "high"
2442 ],
2443 "default_reasoning_effort": "medium",
2444 "supports_attachments": true
2445 },
2446 {
2447 "id": "openai/o3-deep-research",
2448 "name": "o3-deep-research",
2449 "cost_per_1m_in": 10,
2450 "cost_per_1m_out": 40,
2451 "cost_per_1m_in_cached": 2.5,
2452 "cost_per_1m_out_cached": 0,
2453 "context_window": 200000,
2454 "default_max_tokens": 8000,
2455 "can_reason": true,
2456 "reasoning_levels": [
2457 "low",
2458 "medium",
2459 "high"
2460 ],
2461 "default_reasoning_effort": "medium",
2462 "supports_attachments": true
2463 },
2464 {
2465 "id": "openai/o3-mini",
2466 "name": "o3-mini",
2467 "cost_per_1m_in": 1.1,
2468 "cost_per_1m_out": 4.4,
2469 "cost_per_1m_in_cached": 0.55,
2470 "cost_per_1m_out_cached": 0,
2471 "context_window": 200000,
2472 "default_max_tokens": 8000,
2473 "can_reason": true,
2474 "reasoning_levels": [
2475 "low",
2476 "medium",
2477 "high"
2478 ],
2479 "default_reasoning_effort": "medium",
2480 "supports_attachments": false
2481 },
2482 {
2483 "id": "openai/o4-mini",
2484 "name": "o4-mini",
2485 "cost_per_1m_in": 1.1,
2486 "cost_per_1m_out": 4.4,
2487 "cost_per_1m_in_cached": 0.275,
2488 "cost_per_1m_out_cached": 0,
2489 "context_window": 200000,
2490 "default_max_tokens": 8000,
2491 "can_reason": true,
2492 "reasoning_levels": [
2493 "low",
2494 "medium",
2495 "high"
2496 ],
2497 "default_reasoning_effort": "medium",
2498 "supports_attachments": true
2499 }
2500 ],
2501 "default_headers": {
2502 "HTTP-Referer": "https://charm.land",
2503 "X-Title": "Crush"
2504 }
2505}