1{
2 "name": "Vercel",
3 "id": "vercel",
4 "api_key": "$VERCEL_API_KEY",
5 "api_endpoint": "https://ai-gateway.vercel.sh/v1",
6 "type": "vercel",
7 "default_large_model_id": "anthropic/claude-sonnet-4",
8 "default_small_model_id": "anthropic/claude-haiku-4.5",
9 "models": [
10 {
11 "id": "anthropic/claude-3-haiku",
12 "name": "Claude 3 Haiku",
13 "cost_per_1m_in": 0.25,
14 "cost_per_1m_out": 1.25,
15 "cost_per_1m_in_cached": 0.03,
16 "cost_per_1m_out_cached": 0.3,
17 "context_window": 200000,
18 "default_max_tokens": 4096,
19 "can_reason": false,
20 "supports_attachments": true,
21 "options": {}
22 },
23 {
24 "id": "anthropic/claude-3.5-haiku",
25 "name": "Claude 3.5 Haiku",
26 "cost_per_1m_in": 0.7999999999999999,
27 "cost_per_1m_out": 4,
28 "cost_per_1m_in_cached": 0.08,
29 "cost_per_1m_out_cached": 1,
30 "context_window": 200000,
31 "default_max_tokens": 8000,
32 "can_reason": false,
33 "supports_attachments": true,
34 "options": {}
35 },
36 {
37 "id": "anthropic/claude-3.5-sonnet",
38 "name": "Claude 3.5 Sonnet",
39 "cost_per_1m_in": 3,
40 "cost_per_1m_out": 15,
41 "cost_per_1m_in_cached": 0.3,
42 "cost_per_1m_out_cached": 3.75,
43 "context_window": 200000,
44 "default_max_tokens": 8000,
45 "can_reason": false,
46 "supports_attachments": true,
47 "options": {}
48 },
49 {
50 "id": "anthropic/claude-3.5-sonnet-20240620",
51 "name": "Claude 3.5 Sonnet (2024-06-20)",
52 "cost_per_1m_in": 3,
53 "cost_per_1m_out": 15,
54 "cost_per_1m_in_cached": 0,
55 "cost_per_1m_out_cached": 0,
56 "context_window": 200000,
57 "default_max_tokens": 8000,
58 "can_reason": false,
59 "supports_attachments": true,
60 "options": {}
61 },
62 {
63 "id": "anthropic/claude-3.7-sonnet",
64 "name": "Claude 3.7 Sonnet",
65 "cost_per_1m_in": 3,
66 "cost_per_1m_out": 15,
67 "cost_per_1m_in_cached": 0.3,
68 "cost_per_1m_out_cached": 3.75,
69 "context_window": 200000,
70 "default_max_tokens": 8000,
71 "can_reason": true,
72 "reasoning_levels": [
73 "none",
74 "minimal",
75 "low",
76 "medium",
77 "high",
78 "xhigh"
79 ],
80 "default_reasoning_effort": "medium",
81 "supports_attachments": true,
82 "options": {}
83 },
84 {
85 "id": "anthropic/claude-haiku-4.5",
86 "name": "Claude Haiku 4.5",
87 "cost_per_1m_in": 1,
88 "cost_per_1m_out": 5,
89 "cost_per_1m_in_cached": 0.09999999999999999,
90 "cost_per_1m_out_cached": 1.25,
91 "context_window": 200000,
92 "default_max_tokens": 8000,
93 "can_reason": true,
94 "reasoning_levels": [
95 "none",
96 "minimal",
97 "low",
98 "medium",
99 "high",
100 "xhigh"
101 ],
102 "default_reasoning_effort": "medium",
103 "supports_attachments": true,
104 "options": {}
105 },
106 {
107 "id": "anthropic/claude-opus-4",
108 "name": "Claude Opus 4",
109 "cost_per_1m_in": 15,
110 "cost_per_1m_out": 75,
111 "cost_per_1m_in_cached": 1.5,
112 "cost_per_1m_out_cached": 18.75,
113 "context_window": 200000,
114 "default_max_tokens": 8000,
115 "can_reason": true,
116 "reasoning_levels": [
117 "none",
118 "minimal",
119 "low",
120 "medium",
121 "high",
122 "xhigh"
123 ],
124 "default_reasoning_effort": "medium",
125 "supports_attachments": true,
126 "options": {}
127 },
128 {
129 "id": "anthropic/claude-opus-4.1",
130 "name": "Claude Opus 4.1",
131 "cost_per_1m_in": 15,
132 "cost_per_1m_out": 75,
133 "cost_per_1m_in_cached": 1.5,
134 "cost_per_1m_out_cached": 18.75,
135 "context_window": 200000,
136 "default_max_tokens": 8000,
137 "can_reason": true,
138 "reasoning_levels": [
139 "none",
140 "minimal",
141 "low",
142 "medium",
143 "high",
144 "xhigh"
145 ],
146 "default_reasoning_effort": "medium",
147 "supports_attachments": true,
148 "options": {}
149 },
150 {
151 "id": "anthropic/claude-opus-4.5",
152 "name": "Claude Opus 4.5",
153 "cost_per_1m_in": 5,
154 "cost_per_1m_out": 25,
155 "cost_per_1m_in_cached": 0.5,
156 "cost_per_1m_out_cached": 6.25,
157 "context_window": 200000,
158 "default_max_tokens": 8000,
159 "can_reason": true,
160 "reasoning_levels": [
161 "none",
162 "minimal",
163 "low",
164 "medium",
165 "high",
166 "xhigh"
167 ],
168 "default_reasoning_effort": "medium",
169 "supports_attachments": true,
170 "options": {}
171 },
172 {
173 "id": "anthropic/claude-opus-4.6",
174 "name": "Claude Opus 4.6",
175 "cost_per_1m_in": 5,
176 "cost_per_1m_out": 25,
177 "cost_per_1m_in_cached": 0.5,
178 "cost_per_1m_out_cached": 6.25,
179 "context_window": 1000000,
180 "default_max_tokens": 8000,
181 "can_reason": true,
182 "reasoning_levels": [
183 "none",
184 "minimal",
185 "low",
186 "medium",
187 "high",
188 "xhigh"
189 ],
190 "default_reasoning_effort": "medium",
191 "supports_attachments": true,
192 "options": {}
193 },
194 {
195 "id": "anthropic/claude-sonnet-4",
196 "name": "Claude Sonnet 4",
197 "cost_per_1m_in": 3,
198 "cost_per_1m_out": 15,
199 "cost_per_1m_in_cached": 0.3,
200 "cost_per_1m_out_cached": 3.75,
201 "context_window": 1000000,
202 "default_max_tokens": 8000,
203 "can_reason": true,
204 "reasoning_levels": [
205 "none",
206 "minimal",
207 "low",
208 "medium",
209 "high",
210 "xhigh"
211 ],
212 "default_reasoning_effort": "medium",
213 "supports_attachments": true,
214 "options": {}
215 },
216 {
217 "id": "anthropic/claude-sonnet-4.5",
218 "name": "Claude Sonnet 4.5",
219 "cost_per_1m_in": 3,
220 "cost_per_1m_out": 15,
221 "cost_per_1m_in_cached": 0.3,
222 "cost_per_1m_out_cached": 3.75,
223 "context_window": 1000000,
224 "default_max_tokens": 8000,
225 "can_reason": true,
226 "reasoning_levels": [
227 "none",
228 "minimal",
229 "low",
230 "medium",
231 "high",
232 "xhigh"
233 ],
234 "default_reasoning_effort": "medium",
235 "supports_attachments": true,
236 "options": {}
237 },
238 {
239 "id": "anthropic/claude-sonnet-4.6",
240 "name": "Claude Sonnet 4.6",
241 "cost_per_1m_in": 3,
242 "cost_per_1m_out": 15,
243 "cost_per_1m_in_cached": 0.3,
244 "cost_per_1m_out_cached": 3.75,
245 "context_window": 1000000,
246 "default_max_tokens": 8000,
247 "can_reason": true,
248 "reasoning_levels": [
249 "none",
250 "minimal",
251 "low",
252 "medium",
253 "high",
254 "xhigh"
255 ],
256 "default_reasoning_effort": "medium",
257 "supports_attachments": true,
258 "options": {}
259 },
260 {
261 "id": "openai/codex-mini",
262 "name": "Codex Mini",
263 "cost_per_1m_in": 1.5,
264 "cost_per_1m_out": 6,
265 "cost_per_1m_in_cached": 0.375,
266 "cost_per_1m_out_cached": 0,
267 "context_window": 200000,
268 "default_max_tokens": 8000,
269 "can_reason": true,
270 "reasoning_levels": [
271 "low",
272 "medium",
273 "high"
274 ],
275 "default_reasoning_effort": "medium",
276 "supports_attachments": true,
277 "options": {}
278 },
279 {
280 "id": "cohere/command-a",
281 "name": "Command A",
282 "cost_per_1m_in": 2.5,
283 "cost_per_1m_out": 10,
284 "cost_per_1m_in_cached": 0,
285 "cost_per_1m_out_cached": 0,
286 "context_window": 256000,
287 "default_max_tokens": 8000,
288 "can_reason": false,
289 "supports_attachments": false,
290 "options": {}
291 },
292 {
293 "id": "deepseek/deepseek-v3",
294 "name": "DeepSeek V3 0324",
295 "cost_per_1m_in": 0.77,
296 "cost_per_1m_out": 0.77,
297 "cost_per_1m_in_cached": 0,
298 "cost_per_1m_out_cached": 0,
299 "context_window": 163840,
300 "default_max_tokens": 8000,
301 "can_reason": false,
302 "supports_attachments": false,
303 "options": {}
304 },
305 {
306 "id": "deepseek/deepseek-v3.1-terminus",
307 "name": "DeepSeek V3.1 Terminus",
308 "cost_per_1m_in": 0.27,
309 "cost_per_1m_out": 1,
310 "cost_per_1m_in_cached": 0,
311 "cost_per_1m_out_cached": 0,
312 "context_window": 131072,
313 "default_max_tokens": 8000,
314 "can_reason": true,
315 "reasoning_levels": [
316 "low",
317 "medium",
318 "high"
319 ],
320 "default_reasoning_effort": "medium",
321 "supports_attachments": false,
322 "options": {}
323 },
324 {
325 "id": "deepseek/deepseek-v3.2",
326 "name": "DeepSeek V3.2",
327 "cost_per_1m_in": 0.26,
328 "cost_per_1m_out": 0.38,
329 "cost_per_1m_in_cached": 0.13,
330 "cost_per_1m_out_cached": 0,
331 "context_window": 128000,
332 "default_max_tokens": 8000,
333 "can_reason": false,
334 "supports_attachments": false,
335 "options": {}
336 },
337 {
338 "id": "deepseek/deepseek-v3.2-thinking",
339 "name": "DeepSeek V3.2 Thinking",
340 "cost_per_1m_in": 0.28,
341 "cost_per_1m_out": 0.42,
342 "cost_per_1m_in_cached": 0.028,
343 "cost_per_1m_out_cached": 0,
344 "context_window": 128000,
345 "default_max_tokens": 8000,
346 "can_reason": true,
347 "reasoning_levels": [
348 "low",
349 "medium",
350 "high"
351 ],
352 "default_reasoning_effort": "medium",
353 "supports_attachments": false,
354 "options": {}
355 },
356 {
357 "id": "deepseek/deepseek-v3.1",
358 "name": "DeepSeek-V3.1",
359 "cost_per_1m_in": 0.21,
360 "cost_per_1m_out": 0.7899999999999999,
361 "cost_per_1m_in_cached": 0,
362 "cost_per_1m_out_cached": 0,
363 "context_window": 163840,
364 "default_max_tokens": 8000,
365 "can_reason": true,
366 "reasoning_levels": [
367 "low",
368 "medium",
369 "high"
370 ],
371 "default_reasoning_effort": "medium",
372 "supports_attachments": false,
373 "options": {}
374 },
375 {
376 "id": "mistral/devstral-2",
377 "name": "Devstral 2",
378 "cost_per_1m_in": 0,
379 "cost_per_1m_out": 0,
380 "cost_per_1m_in_cached": 0,
381 "cost_per_1m_out_cached": 0,
382 "context_window": 256000,
383 "default_max_tokens": 8000,
384 "can_reason": false,
385 "supports_attachments": false,
386 "options": {}
387 },
388 {
389 "id": "mistral/devstral-small",
390 "name": "Devstral Small 1.1",
391 "cost_per_1m_in": 0.09999999999999999,
392 "cost_per_1m_out": 0.3,
393 "cost_per_1m_in_cached": 0,
394 "cost_per_1m_out_cached": 0,
395 "context_window": 128000,
396 "default_max_tokens": 8000,
397 "can_reason": false,
398 "supports_attachments": false,
399 "options": {}
400 },
401 {
402 "id": "mistral/devstral-small-2",
403 "name": "Devstral Small 2",
404 "cost_per_1m_in": 0,
405 "cost_per_1m_out": 0,
406 "cost_per_1m_in_cached": 0,
407 "cost_per_1m_out_cached": 0,
408 "context_window": 256000,
409 "default_max_tokens": 8000,
410 "can_reason": false,
411 "supports_attachments": false,
412 "options": {}
413 },
414 {
415 "id": "zai/glm-4.5-air",
416 "name": "GLM 4.5 Air",
417 "cost_per_1m_in": 0.19999999999999998,
418 "cost_per_1m_out": 1.1,
419 "cost_per_1m_in_cached": 0.03,
420 "cost_per_1m_out_cached": 0,
421 "context_window": 128000,
422 "default_max_tokens": 8000,
423 "can_reason": true,
424 "reasoning_levels": [
425 "low",
426 "medium",
427 "high"
428 ],
429 "default_reasoning_effort": "medium",
430 "supports_attachments": false,
431 "options": {}
432 },
433 {
434 "id": "zai/glm-4.5v",
435 "name": "GLM 4.5V",
436 "cost_per_1m_in": 0.6,
437 "cost_per_1m_out": 1.7999999999999998,
438 "cost_per_1m_in_cached": 0,
439 "cost_per_1m_out_cached": 0,
440 "context_window": 65536,
441 "default_max_tokens": 8000,
442 "can_reason": true,
443 "reasoning_levels": [
444 "low",
445 "medium",
446 "high"
447 ],
448 "default_reasoning_effort": "medium",
449 "supports_attachments": true,
450 "options": {}
451 },
452 {
453 "id": "zai/glm-4.6",
454 "name": "GLM 4.6",
455 "cost_per_1m_in": 0.44999999999999996,
456 "cost_per_1m_out": 1.7999999999999998,
457 "cost_per_1m_in_cached": 0.11,
458 "cost_per_1m_out_cached": 0,
459 "context_window": 200000,
460 "default_max_tokens": 8000,
461 "can_reason": true,
462 "reasoning_levels": [
463 "low",
464 "medium",
465 "high"
466 ],
467 "default_reasoning_effort": "medium",
468 "supports_attachments": false,
469 "options": {}
470 },
471 {
472 "id": "zai/glm-4.7",
473 "name": "GLM 4.7",
474 "cost_per_1m_in": 0.43,
475 "cost_per_1m_out": 1.75,
476 "cost_per_1m_in_cached": 0.08,
477 "cost_per_1m_out_cached": 0,
478 "context_window": 202752,
479 "default_max_tokens": 8000,
480 "can_reason": true,
481 "reasoning_levels": [
482 "low",
483 "medium",
484 "high"
485 ],
486 "default_reasoning_effort": "medium",
487 "supports_attachments": false,
488 "options": {}
489 },
490 {
491 "id": "zai/glm-4.7-flashx",
492 "name": "GLM 4.7 FlashX",
493 "cost_per_1m_in": 0.06,
494 "cost_per_1m_out": 0.39999999999999997,
495 "cost_per_1m_in_cached": 0.01,
496 "cost_per_1m_out_cached": 0,
497 "context_window": 200000,
498 "default_max_tokens": 8000,
499 "can_reason": true,
500 "reasoning_levels": [
501 "low",
502 "medium",
503 "high"
504 ],
505 "default_reasoning_effort": "medium",
506 "supports_attachments": false,
507 "options": {}
508 },
509 {
510 "id": "zai/glm-4.5",
511 "name": "GLM-4.5",
512 "cost_per_1m_in": 0.6,
513 "cost_per_1m_out": 2.2,
514 "cost_per_1m_in_cached": 0,
515 "cost_per_1m_out_cached": 0,
516 "context_window": 131072,
517 "default_max_tokens": 8000,
518 "can_reason": true,
519 "reasoning_levels": [
520 "low",
521 "medium",
522 "high"
523 ],
524 "default_reasoning_effort": "medium",
525 "supports_attachments": false,
526 "options": {}
527 },
528 {
529 "id": "zai/glm-4.6v",
530 "name": "GLM-4.6V",
531 "cost_per_1m_in": 0.3,
532 "cost_per_1m_out": 0.8999999999999999,
533 "cost_per_1m_in_cached": 0.049999999999999996,
534 "cost_per_1m_out_cached": 0,
535 "context_window": 128000,
536 "default_max_tokens": 8000,
537 "can_reason": true,
538 "reasoning_levels": [
539 "low",
540 "medium",
541 "high"
542 ],
543 "default_reasoning_effort": "medium",
544 "supports_attachments": true,
545 "options": {}
546 },
547 {
548 "id": "zai/glm-4.6v-flash",
549 "name": "GLM-4.6V-Flash",
550 "cost_per_1m_in": 0,
551 "cost_per_1m_out": 0,
552 "cost_per_1m_in_cached": 0,
553 "cost_per_1m_out_cached": 0,
554 "context_window": 128000,
555 "default_max_tokens": 8000,
556 "can_reason": true,
557 "reasoning_levels": [
558 "low",
559 "medium",
560 "high"
561 ],
562 "default_reasoning_effort": "medium",
563 "supports_attachments": true,
564 "options": {}
565 },
566 {
567 "id": "zai/glm-5",
568 "name": "GLM-5",
569 "cost_per_1m_in": 1,
570 "cost_per_1m_out": 3.1999999999999997,
571 "cost_per_1m_in_cached": 0.19999999999999998,
572 "cost_per_1m_out_cached": 0,
573 "context_window": 202800,
574 "default_max_tokens": 8000,
575 "can_reason": true,
576 "reasoning_levels": [
577 "low",
578 "medium",
579 "high"
580 ],
581 "default_reasoning_effort": "medium",
582 "supports_attachments": false,
583 "options": {}
584 },
585 {
586 "id": "openai/gpt-5.1-codex-max",
587 "name": "GPT 5.1 Codex Max",
588 "cost_per_1m_in": 1.25,
589 "cost_per_1m_out": 10,
590 "cost_per_1m_in_cached": 0.125,
591 "cost_per_1m_out_cached": 0,
592 "context_window": 400000,
593 "default_max_tokens": 8000,
594 "can_reason": true,
595 "reasoning_levels": [
596 "low",
597 "medium",
598 "high"
599 ],
600 "default_reasoning_effort": "medium",
601 "supports_attachments": true,
602 "options": {}
603 },
604 {
605 "id": "openai/gpt-5.1-thinking",
606 "name": "GPT 5.1 Thinking",
607 "cost_per_1m_in": 1.25,
608 "cost_per_1m_out": 10,
609 "cost_per_1m_in_cached": 0.13,
610 "cost_per_1m_out_cached": 0,
611 "context_window": 400000,
612 "default_max_tokens": 8000,
613 "can_reason": true,
614 "reasoning_levels": [
615 "low",
616 "medium",
617 "high"
618 ],
619 "default_reasoning_effort": "medium",
620 "supports_attachments": true,
621 "options": {}
622 },
623 {
624 "id": "openai/gpt-5.2",
625 "name": "GPT 5.2",
626 "cost_per_1m_in": 1.75,
627 "cost_per_1m_out": 14,
628 "cost_per_1m_in_cached": 0.18,
629 "cost_per_1m_out_cached": 0,
630 "context_window": 400000,
631 "default_max_tokens": 8000,
632 "can_reason": true,
633 "reasoning_levels": [
634 "low",
635 "medium",
636 "high"
637 ],
638 "default_reasoning_effort": "medium",
639 "supports_attachments": true,
640 "options": {}
641 },
642 {
643 "id": "openai/gpt-5.2-pro",
644 "name": "GPT 5.2 ",
645 "cost_per_1m_in": 21,
646 "cost_per_1m_out": 168,
647 "cost_per_1m_in_cached": 0,
648 "cost_per_1m_out_cached": 0,
649 "context_window": 400000,
650 "default_max_tokens": 8000,
651 "can_reason": true,
652 "reasoning_levels": [
653 "low",
654 "medium",
655 "high"
656 ],
657 "default_reasoning_effort": "medium",
658 "supports_attachments": true,
659 "options": {}
660 },
661 {
662 "id": "openai/gpt-5.3-codex",
663 "name": "GPT 5.3 Codex",
664 "cost_per_1m_in": 1.75,
665 "cost_per_1m_out": 14,
666 "cost_per_1m_in_cached": 0.175,
667 "cost_per_1m_out_cached": 0,
668 "context_window": 400000,
669 "default_max_tokens": 8000,
670 "can_reason": true,
671 "reasoning_levels": [
672 "low",
673 "medium",
674 "high"
675 ],
676 "default_reasoning_effort": "medium",
677 "supports_attachments": true,
678 "options": {}
679 },
680 {
681 "id": "openai/gpt-4-turbo",
682 "name": "GPT-4 Turbo",
683 "cost_per_1m_in": 10,
684 "cost_per_1m_out": 30,
685 "cost_per_1m_in_cached": 0,
686 "cost_per_1m_out_cached": 0,
687 "context_window": 128000,
688 "default_max_tokens": 4096,
689 "can_reason": false,
690 "supports_attachments": true,
691 "options": {}
692 },
693 {
694 "id": "openai/gpt-4.1",
695 "name": "GPT-4.1",
696 "cost_per_1m_in": 2,
697 "cost_per_1m_out": 8,
698 "cost_per_1m_in_cached": 0.5,
699 "cost_per_1m_out_cached": 0,
700 "context_window": 1047576,
701 "default_max_tokens": 8000,
702 "can_reason": false,
703 "supports_attachments": true,
704 "options": {}
705 },
706 {
707 "id": "openai/gpt-4.1-mini",
708 "name": "GPT-4.1 mini",
709 "cost_per_1m_in": 0.39999999999999997,
710 "cost_per_1m_out": 1.5999999999999999,
711 "cost_per_1m_in_cached": 0.09999999999999999,
712 "cost_per_1m_out_cached": 0,
713 "context_window": 1047576,
714 "default_max_tokens": 8000,
715 "can_reason": false,
716 "supports_attachments": true,
717 "options": {}
718 },
719 {
720 "id": "openai/gpt-4.1-nano",
721 "name": "GPT-4.1 nano",
722 "cost_per_1m_in": 0.09999999999999999,
723 "cost_per_1m_out": 0.39999999999999997,
724 "cost_per_1m_in_cached": 0.03,
725 "cost_per_1m_out_cached": 0,
726 "context_window": 1047576,
727 "default_max_tokens": 8000,
728 "can_reason": false,
729 "supports_attachments": true,
730 "options": {}
731 },
732 {
733 "id": "openai/gpt-4o",
734 "name": "GPT-4o",
735 "cost_per_1m_in": 2.5,
736 "cost_per_1m_out": 10,
737 "cost_per_1m_in_cached": 1.25,
738 "cost_per_1m_out_cached": 0,
739 "context_window": 128000,
740 "default_max_tokens": 8000,
741 "can_reason": false,
742 "supports_attachments": true,
743 "options": {}
744 },
745 {
746 "id": "openai/gpt-4o-mini",
747 "name": "GPT-4o mini",
748 "cost_per_1m_in": 0.15,
749 "cost_per_1m_out": 0.6,
750 "cost_per_1m_in_cached": 0.075,
751 "cost_per_1m_out_cached": 0,
752 "context_window": 128000,
753 "default_max_tokens": 8000,
754 "can_reason": false,
755 "supports_attachments": true,
756 "options": {}
757 },
758 {
759 "id": "openai/gpt-5",
760 "name": "GPT-5",
761 "cost_per_1m_in": 1.25,
762 "cost_per_1m_out": 10,
763 "cost_per_1m_in_cached": 0.13,
764 "cost_per_1m_out_cached": 0,
765 "context_window": 400000,
766 "default_max_tokens": 8000,
767 "can_reason": true,
768 "reasoning_levels": [
769 "low",
770 "medium",
771 "high"
772 ],
773 "default_reasoning_effort": "medium",
774 "supports_attachments": true,
775 "options": {}
776 },
777 {
778 "id": "openai/gpt-5-chat",
779 "name": "GPT-5 Chat",
780 "cost_per_1m_in": 1.25,
781 "cost_per_1m_out": 10,
782 "cost_per_1m_in_cached": 0.125,
783 "cost_per_1m_out_cached": 0,
784 "context_window": 128000,
785 "default_max_tokens": 8000,
786 "can_reason": true,
787 "reasoning_levels": [
788 "low",
789 "medium",
790 "high"
791 ],
792 "default_reasoning_effort": "medium",
793 "supports_attachments": true,
794 "options": {}
795 },
796 {
797 "id": "openai/gpt-5-mini",
798 "name": "GPT-5 mini",
799 "cost_per_1m_in": 0.25,
800 "cost_per_1m_out": 2,
801 "cost_per_1m_in_cached": 0.03,
802 "cost_per_1m_out_cached": 0,
803 "context_window": 400000,
804 "default_max_tokens": 8000,
805 "can_reason": true,
806 "reasoning_levels": [
807 "low",
808 "medium",
809 "high"
810 ],
811 "default_reasoning_effort": "medium",
812 "supports_attachments": true,
813 "options": {}
814 },
815 {
816 "id": "openai/gpt-5-nano",
817 "name": "GPT-5 nano",
818 "cost_per_1m_in": 0.049999999999999996,
819 "cost_per_1m_out": 0.39999999999999997,
820 "cost_per_1m_in_cached": 0.01,
821 "cost_per_1m_out_cached": 0,
822 "context_window": 400000,
823 "default_max_tokens": 8000,
824 "can_reason": true,
825 "reasoning_levels": [
826 "low",
827 "medium",
828 "high"
829 ],
830 "default_reasoning_effort": "medium",
831 "supports_attachments": true,
832 "options": {}
833 },
834 {
835 "id": "openai/gpt-5-pro",
836 "name": "GPT-5 pro",
837 "cost_per_1m_in": 15,
838 "cost_per_1m_out": 120,
839 "cost_per_1m_in_cached": 0,
840 "cost_per_1m_out_cached": 0,
841 "context_window": 400000,
842 "default_max_tokens": 8000,
843 "can_reason": true,
844 "reasoning_levels": [
845 "low",
846 "medium",
847 "high"
848 ],
849 "default_reasoning_effort": "medium",
850 "supports_attachments": true,
851 "options": {}
852 },
853 {
854 "id": "openai/gpt-5-codex",
855 "name": "GPT-5-Codex",
856 "cost_per_1m_in": 1.25,
857 "cost_per_1m_out": 10,
858 "cost_per_1m_in_cached": 0.13,
859 "cost_per_1m_out_cached": 0,
860 "context_window": 400000,
861 "default_max_tokens": 8000,
862 "can_reason": true,
863 "reasoning_levels": [
864 "low",
865 "medium",
866 "high"
867 ],
868 "default_reasoning_effort": "medium",
869 "supports_attachments": true,
870 "options": {}
871 },
872 {
873 "id": "openai/gpt-5.1-codex-mini",
874 "name": "GPT-5.1 Codex mini",
875 "cost_per_1m_in": 0.25,
876 "cost_per_1m_out": 2,
877 "cost_per_1m_in_cached": 0.024999999999999998,
878 "cost_per_1m_out_cached": 0,
879 "context_window": 400000,
880 "default_max_tokens": 8000,
881 "can_reason": true,
882 "reasoning_levels": [
883 "low",
884 "medium",
885 "high"
886 ],
887 "default_reasoning_effort": "medium",
888 "supports_attachments": true,
889 "options": {}
890 },
891 {
892 "id": "openai/gpt-5.1-instant",
893 "name": "GPT-5.1 Instant",
894 "cost_per_1m_in": 1.25,
895 "cost_per_1m_out": 10,
896 "cost_per_1m_in_cached": 0.13,
897 "cost_per_1m_out_cached": 0,
898 "context_window": 128000,
899 "default_max_tokens": 8000,
900 "can_reason": true,
901 "reasoning_levels": [
902 "low",
903 "medium",
904 "high"
905 ],
906 "default_reasoning_effort": "medium",
907 "supports_attachments": true,
908 "options": {}
909 },
910 {
911 "id": "openai/gpt-5.1-codex",
912 "name": "GPT-5.1-Codex",
913 "cost_per_1m_in": 1.25,
914 "cost_per_1m_out": 10,
915 "cost_per_1m_in_cached": 0.13,
916 "cost_per_1m_out_cached": 0,
917 "context_window": 400000,
918 "default_max_tokens": 8000,
919 "can_reason": true,
920 "reasoning_levels": [
921 "low",
922 "medium",
923 "high"
924 ],
925 "default_reasoning_effort": "medium",
926 "supports_attachments": true,
927 "options": {}
928 },
929 {
930 "id": "openai/gpt-5.2-chat",
931 "name": "GPT-5.2 Chat",
932 "cost_per_1m_in": 1.75,
933 "cost_per_1m_out": 14,
934 "cost_per_1m_in_cached": 0.175,
935 "cost_per_1m_out_cached": 0,
936 "context_window": 128000,
937 "default_max_tokens": 8000,
938 "can_reason": true,
939 "reasoning_levels": [
940 "low",
941 "medium",
942 "high"
943 ],
944 "default_reasoning_effort": "medium",
945 "supports_attachments": true,
946 "options": {}
947 },
948 {
949 "id": "openai/gpt-5.2-codex",
950 "name": "GPT-5.2-Codex",
951 "cost_per_1m_in": 1.75,
952 "cost_per_1m_out": 14,
953 "cost_per_1m_in_cached": 0.175,
954 "cost_per_1m_out_cached": 0,
955 "context_window": 400000,
956 "default_max_tokens": 8000,
957 "can_reason": true,
958 "reasoning_levels": [
959 "low",
960 "medium",
961 "high"
962 ],
963 "default_reasoning_effort": "medium",
964 "supports_attachments": true,
965 "options": {}
966 },
967 {
968 "id": "google/gemini-2.5-flash",
969 "name": "Gemini 2.5 Flash",
970 "cost_per_1m_in": 0.3,
971 "cost_per_1m_out": 2.5,
972 "cost_per_1m_in_cached": 0,
973 "cost_per_1m_out_cached": 0,
974 "context_window": 1000000,
975 "default_max_tokens": 8000,
976 "can_reason": true,
977 "reasoning_levels": [
978 "low",
979 "medium",
980 "high"
981 ],
982 "default_reasoning_effort": "medium",
983 "supports_attachments": false,
984 "options": {}
985 },
986 {
987 "id": "google/gemini-2.5-flash-lite",
988 "name": "Gemini 2.5 Flash Lite",
989 "cost_per_1m_in": 0.09999999999999999,
990 "cost_per_1m_out": 0.39999999999999997,
991 "cost_per_1m_in_cached": 0.01,
992 "cost_per_1m_out_cached": 0,
993 "context_window": 1048576,
994 "default_max_tokens": 8000,
995 "can_reason": true,
996 "reasoning_levels": [
997 "low",
998 "medium",
999 "high"
1000 ],
1001 "default_reasoning_effort": "medium",
1002 "supports_attachments": true,
1003 "options": {}
1004 },
1005 {
1006 "id": "google/gemini-2.5-flash-lite-preview-09-2025",
1007 "name": "Gemini 2.5 Flash Lite Preview 09-2025",
1008 "cost_per_1m_in": 0.09999999999999999,
1009 "cost_per_1m_out": 0.39999999999999997,
1010 "cost_per_1m_in_cached": 0.01,
1011 "cost_per_1m_out_cached": 0,
1012 "context_window": 1048576,
1013 "default_max_tokens": 8000,
1014 "can_reason": true,
1015 "reasoning_levels": [
1016 "low",
1017 "medium",
1018 "high"
1019 ],
1020 "default_reasoning_effort": "medium",
1021 "supports_attachments": true,
1022 "options": {}
1023 },
1024 {
1025 "id": "google/gemini-2.5-flash-preview-09-2025",
1026 "name": "Gemini 2.5 Flash Preview 09-2025",
1027 "cost_per_1m_in": 0.3,
1028 "cost_per_1m_out": 2.5,
1029 "cost_per_1m_in_cached": 0.03,
1030 "cost_per_1m_out_cached": 0,
1031 "context_window": 1000000,
1032 "default_max_tokens": 8000,
1033 "can_reason": true,
1034 "reasoning_levels": [
1035 "low",
1036 "medium",
1037 "high"
1038 ],
1039 "default_reasoning_effort": "medium",
1040 "supports_attachments": true,
1041 "options": {}
1042 },
1043 {
1044 "id": "google/gemini-2.5-pro",
1045 "name": "Gemini 2.5 Pro",
1046 "cost_per_1m_in": 1.25,
1047 "cost_per_1m_out": 10,
1048 "cost_per_1m_in_cached": 0,
1049 "cost_per_1m_out_cached": 0,
1050 "context_window": 1048576,
1051 "default_max_tokens": 8000,
1052 "can_reason": true,
1053 "reasoning_levels": [
1054 "low",
1055 "medium",
1056 "high"
1057 ],
1058 "default_reasoning_effort": "medium",
1059 "supports_attachments": false,
1060 "options": {}
1061 },
1062 {
1063 "id": "google/gemini-3-flash",
1064 "name": "Gemini 3 Flash",
1065 "cost_per_1m_in": 0.5,
1066 "cost_per_1m_out": 3,
1067 "cost_per_1m_in_cached": 0.049999999999999996,
1068 "cost_per_1m_out_cached": 0,
1069 "context_window": 1000000,
1070 "default_max_tokens": 8000,
1071 "can_reason": true,
1072 "reasoning_levels": [
1073 "low",
1074 "medium",
1075 "high"
1076 ],
1077 "default_reasoning_effort": "medium",
1078 "supports_attachments": true,
1079 "options": {}
1080 },
1081 {
1082 "id": "google/gemini-3-pro-preview",
1083 "name": "Gemini 3 Pro Preview",
1084 "cost_per_1m_in": 2,
1085 "cost_per_1m_out": 12,
1086 "cost_per_1m_in_cached": 0.19999999999999998,
1087 "cost_per_1m_out_cached": 0,
1088 "context_window": 1000000,
1089 "default_max_tokens": 8000,
1090 "can_reason": true,
1091 "reasoning_levels": [
1092 "low",
1093 "medium",
1094 "high"
1095 ],
1096 "default_reasoning_effort": "medium",
1097 "supports_attachments": true,
1098 "options": {}
1099 },
1100 {
1101 "id": "google/gemini-3.1-pro-preview",
1102 "name": "Gemini 3.1 Pro Preview",
1103 "cost_per_1m_in": 2,
1104 "cost_per_1m_out": 12,
1105 "cost_per_1m_in_cached": 0.19999999999999998,
1106 "cost_per_1m_out_cached": 0,
1107 "context_window": 1000000,
1108 "default_max_tokens": 8000,
1109 "can_reason": true,
1110 "reasoning_levels": [
1111 "low",
1112 "medium",
1113 "high"
1114 ],
1115 "default_reasoning_effort": "medium",
1116 "supports_attachments": true,
1117 "options": {}
1118 },
1119 {
1120 "id": "xai/grok-2-vision",
1121 "name": "Grok 2 Vision",
1122 "cost_per_1m_in": 2,
1123 "cost_per_1m_out": 10,
1124 "cost_per_1m_in_cached": 0,
1125 "cost_per_1m_out_cached": 0,
1126 "context_window": 32768,
1127 "default_max_tokens": 8000,
1128 "can_reason": false,
1129 "supports_attachments": true,
1130 "options": {}
1131 },
1132 {
1133 "id": "xai/grok-3",
1134 "name": "Grok 3 Beta",
1135 "cost_per_1m_in": 3,
1136 "cost_per_1m_out": 15,
1137 "cost_per_1m_in_cached": 0,
1138 "cost_per_1m_out_cached": 0,
1139 "context_window": 131072,
1140 "default_max_tokens": 8000,
1141 "can_reason": false,
1142 "supports_attachments": false,
1143 "options": {}
1144 },
1145 {
1146 "id": "xai/grok-3-fast",
1147 "name": "Grok 3 Fast Beta",
1148 "cost_per_1m_in": 5,
1149 "cost_per_1m_out": 25,
1150 "cost_per_1m_in_cached": 0,
1151 "cost_per_1m_out_cached": 0,
1152 "context_window": 131072,
1153 "default_max_tokens": 8000,
1154 "can_reason": false,
1155 "supports_attachments": false,
1156 "options": {}
1157 },
1158 {
1159 "id": "xai/grok-3-mini",
1160 "name": "Grok 3 Mini Beta",
1161 "cost_per_1m_in": 0.3,
1162 "cost_per_1m_out": 0.5,
1163 "cost_per_1m_in_cached": 0,
1164 "cost_per_1m_out_cached": 0,
1165 "context_window": 131072,
1166 "default_max_tokens": 8000,
1167 "can_reason": false,
1168 "supports_attachments": false,
1169 "options": {}
1170 },
1171 {
1172 "id": "xai/grok-3-mini-fast",
1173 "name": "Grok 3 Mini Fast Beta",
1174 "cost_per_1m_in": 0.6,
1175 "cost_per_1m_out": 4,
1176 "cost_per_1m_in_cached": 0,
1177 "cost_per_1m_out_cached": 0,
1178 "context_window": 131072,
1179 "default_max_tokens": 8000,
1180 "can_reason": false,
1181 "supports_attachments": false,
1182 "options": {}
1183 },
1184 {
1185 "id": "xai/grok-4",
1186 "name": "Grok 4",
1187 "cost_per_1m_in": 3,
1188 "cost_per_1m_out": 15,
1189 "cost_per_1m_in_cached": 0,
1190 "cost_per_1m_out_cached": 0,
1191 "context_window": 256000,
1192 "default_max_tokens": 8000,
1193 "can_reason": true,
1194 "reasoning_levels": [
1195 "low",
1196 "medium",
1197 "high"
1198 ],
1199 "default_reasoning_effort": "medium",
1200 "supports_attachments": true,
1201 "options": {}
1202 },
1203 {
1204 "id": "xai/grok-4-fast-non-reasoning",
1205 "name": "Grok 4 Fast Non-Reasoning",
1206 "cost_per_1m_in": 0.19999999999999998,
1207 "cost_per_1m_out": 0.5,
1208 "cost_per_1m_in_cached": 0.049999999999999996,
1209 "cost_per_1m_out_cached": 0,
1210 "context_window": 2000000,
1211 "default_max_tokens": 8000,
1212 "can_reason": false,
1213 "supports_attachments": false,
1214 "options": {}
1215 },
1216 {
1217 "id": "xai/grok-4-fast-reasoning",
1218 "name": "Grok 4 Fast Reasoning",
1219 "cost_per_1m_in": 0.19999999999999998,
1220 "cost_per_1m_out": 0.5,
1221 "cost_per_1m_in_cached": 0.049999999999999996,
1222 "cost_per_1m_out_cached": 0,
1223 "context_window": 2000000,
1224 "default_max_tokens": 8000,
1225 "can_reason": true,
1226 "reasoning_levels": [
1227 "low",
1228 "medium",
1229 "high"
1230 ],
1231 "default_reasoning_effort": "medium",
1232 "supports_attachments": false,
1233 "options": {}
1234 },
1235 {
1236 "id": "xai/grok-4.1-fast-non-reasoning",
1237 "name": "Grok 4.1 Fast Non-Reasoning",
1238 "cost_per_1m_in": 0.19999999999999998,
1239 "cost_per_1m_out": 0.5,
1240 "cost_per_1m_in_cached": 0.049999999999999996,
1241 "cost_per_1m_out_cached": 0,
1242 "context_window": 2000000,
1243 "default_max_tokens": 8000,
1244 "can_reason": false,
1245 "supports_attachments": false,
1246 "options": {}
1247 },
1248 {
1249 "id": "xai/grok-4.1-fast-reasoning",
1250 "name": "Grok 4.1 Fast Reasoning",
1251 "cost_per_1m_in": 0.19999999999999998,
1252 "cost_per_1m_out": 0.5,
1253 "cost_per_1m_in_cached": 0.049999999999999996,
1254 "cost_per_1m_out_cached": 0,
1255 "context_window": 2000000,
1256 "default_max_tokens": 8000,
1257 "can_reason": true,
1258 "reasoning_levels": [
1259 "low",
1260 "medium",
1261 "high"
1262 ],
1263 "default_reasoning_effort": "medium",
1264 "supports_attachments": false,
1265 "options": {}
1266 },
1267 {
1268 "id": "xai/grok-code-fast-1",
1269 "name": "Grok Code Fast 1",
1270 "cost_per_1m_in": 0.19999999999999998,
1271 "cost_per_1m_out": 1.5,
1272 "cost_per_1m_in_cached": 0.02,
1273 "cost_per_1m_out_cached": 0,
1274 "context_window": 256000,
1275 "default_max_tokens": 8000,
1276 "can_reason": true,
1277 "reasoning_levels": [
1278 "low",
1279 "medium",
1280 "high"
1281 ],
1282 "default_reasoning_effort": "medium",
1283 "supports_attachments": false,
1284 "options": {}
1285 },
1286 {
1287 "id": "prime-intellect/intellect-3",
1288 "name": "INTELLECT 3",
1289 "cost_per_1m_in": 0.19999999999999998,
1290 "cost_per_1m_out": 1.1,
1291 "cost_per_1m_in_cached": 0,
1292 "cost_per_1m_out_cached": 0,
1293 "context_window": 131072,
1294 "default_max_tokens": 8000,
1295 "can_reason": true,
1296 "reasoning_levels": [
1297 "low",
1298 "medium",
1299 "high"
1300 ],
1301 "default_reasoning_effort": "medium",
1302 "supports_attachments": false,
1303 "options": {}
1304 },
1305 {
1306 "id": "moonshotai/kimi-k2",
1307 "name": "Kimi K2",
1308 "cost_per_1m_in": 0.5,
1309 "cost_per_1m_out": 2,
1310 "cost_per_1m_in_cached": 0,
1311 "cost_per_1m_out_cached": 0,
1312 "context_window": 131072,
1313 "default_max_tokens": 8000,
1314 "can_reason": false,
1315 "supports_attachments": false,
1316 "options": {}
1317 },
1318 {
1319 "id": "moonshotai/kimi-k2-thinking",
1320 "name": "Kimi K2 Thinking",
1321 "cost_per_1m_in": 0.47,
1322 "cost_per_1m_out": 2,
1323 "cost_per_1m_in_cached": 0.14100000000000001,
1324 "cost_per_1m_out_cached": 0,
1325 "context_window": 216144,
1326 "default_max_tokens": 8000,
1327 "can_reason": true,
1328 "reasoning_levels": [
1329 "low",
1330 "medium",
1331 "high"
1332 ],
1333 "default_reasoning_effort": "medium",
1334 "supports_attachments": false,
1335 "options": {}
1336 },
1337 {
1338 "id": "moonshotai/kimi-k2-thinking-turbo",
1339 "name": "Kimi K2 Thinking Turbo",
1340 "cost_per_1m_in": 1.15,
1341 "cost_per_1m_out": 8,
1342 "cost_per_1m_in_cached": 0.15,
1343 "cost_per_1m_out_cached": 0,
1344 "context_window": 262114,
1345 "default_max_tokens": 8000,
1346 "can_reason": true,
1347 "reasoning_levels": [
1348 "low",
1349 "medium",
1350 "high"
1351 ],
1352 "default_reasoning_effort": "medium",
1353 "supports_attachments": false,
1354 "options": {}
1355 },
1356 {
1357 "id": "moonshotai/kimi-k2-turbo",
1358 "name": "Kimi K2 Turbo",
1359 "cost_per_1m_in": 2.4,
1360 "cost_per_1m_out": 10,
1361 "cost_per_1m_in_cached": 0,
1362 "cost_per_1m_out_cached": 0,
1363 "context_window": 256000,
1364 "default_max_tokens": 8000,
1365 "can_reason": false,
1366 "supports_attachments": false,
1367 "options": {}
1368 },
1369 {
1370 "id": "moonshotai/kimi-k2.5",
1371 "name": "Kimi K2.5",
1372 "cost_per_1m_in": 0.5,
1373 "cost_per_1m_out": 2.8,
1374 "cost_per_1m_in_cached": 0,
1375 "cost_per_1m_out_cached": 0,
1376 "context_window": 256000,
1377 "default_max_tokens": 8000,
1378 "can_reason": true,
1379 "reasoning_levels": [
1380 "low",
1381 "medium",
1382 "high"
1383 ],
1384 "default_reasoning_effort": "medium",
1385 "supports_attachments": true,
1386 "options": {}
1387 },
1388 {
1389 "id": "meta/llama-3.1-70b",
1390 "name": "Llama 3.1 70B Instruct",
1391 "cost_per_1m_in": 0.39999999999999997,
1392 "cost_per_1m_out": 0.39999999999999997,
1393 "cost_per_1m_in_cached": 0,
1394 "cost_per_1m_out_cached": 0,
1395 "context_window": 131072,
1396 "default_max_tokens": 8000,
1397 "can_reason": false,
1398 "supports_attachments": false,
1399 "options": {}
1400 },
1401 {
1402 "id": "meta/llama-3.1-8b",
1403 "name": "Llama 3.1 8B Instruct",
1404 "cost_per_1m_in": 0.03,
1405 "cost_per_1m_out": 0.049999999999999996,
1406 "cost_per_1m_in_cached": 0,
1407 "cost_per_1m_out_cached": 0,
1408 "context_window": 131072,
1409 "default_max_tokens": 8000,
1410 "can_reason": false,
1411 "supports_attachments": false,
1412 "options": {}
1413 },
1414 {
1415 "id": "meta/llama-3.2-11b",
1416 "name": "Llama 3.2 11B Vision Instruct",
1417 "cost_per_1m_in": 0.16,
1418 "cost_per_1m_out": 0.16,
1419 "cost_per_1m_in_cached": 0,
1420 "cost_per_1m_out_cached": 0,
1421 "context_window": 128000,
1422 "default_max_tokens": 8000,
1423 "can_reason": false,
1424 "supports_attachments": true,
1425 "options": {}
1426 },
1427 {
1428 "id": "meta/llama-3.2-90b",
1429 "name": "Llama 3.2 90B Vision Instruct",
1430 "cost_per_1m_in": 0.72,
1431 "cost_per_1m_out": 0.72,
1432 "cost_per_1m_in_cached": 0,
1433 "cost_per_1m_out_cached": 0,
1434 "context_window": 128000,
1435 "default_max_tokens": 8000,
1436 "can_reason": false,
1437 "supports_attachments": true,
1438 "options": {}
1439 },
1440 {
1441 "id": "meta/llama-3.3-70b",
1442 "name": "Llama 3.3 70B Instruct",
1443 "cost_per_1m_in": 0.72,
1444 "cost_per_1m_out": 0.72,
1445 "cost_per_1m_in_cached": 0,
1446 "cost_per_1m_out_cached": 0,
1447 "context_window": 128000,
1448 "default_max_tokens": 8000,
1449 "can_reason": false,
1450 "supports_attachments": false,
1451 "options": {}
1452 },
1453 {
1454 "id": "meta/llama-4-maverick",
1455 "name": "Llama 4 Maverick 17B Instruct",
1456 "cost_per_1m_in": 0.15,
1457 "cost_per_1m_out": 0.6,
1458 "cost_per_1m_in_cached": 0,
1459 "cost_per_1m_out_cached": 0,
1460 "context_window": 131072,
1461 "default_max_tokens": 8000,
1462 "can_reason": false,
1463 "supports_attachments": true,
1464 "options": {}
1465 },
1466 {
1467 "id": "meta/llama-4-scout",
1468 "name": "Llama 4 Scout 17B Instruct",
1469 "cost_per_1m_in": 0.08,
1470 "cost_per_1m_out": 0.3,
1471 "cost_per_1m_in_cached": 0,
1472 "cost_per_1m_out_cached": 0,
1473 "context_window": 131072,
1474 "default_max_tokens": 8000,
1475 "can_reason": false,
1476 "supports_attachments": true,
1477 "options": {}
1478 },
1479 {
1480 "id": "meituan/longcat-flash-chat",
1481 "name": "LongCat Flash Chat",
1482 "cost_per_1m_in": 0,
1483 "cost_per_1m_out": 0,
1484 "cost_per_1m_in_cached": 0,
1485 "cost_per_1m_out_cached": 0,
1486 "context_window": 128000,
1487 "default_max_tokens": 8000,
1488 "can_reason": false,
1489 "supports_attachments": false,
1490 "options": {}
1491 },
1492 {
1493 "id": "meituan/longcat-flash-thinking",
1494 "name": "LongCat Flash Thinking",
1495 "cost_per_1m_in": 0.15,
1496 "cost_per_1m_out": 1.5,
1497 "cost_per_1m_in_cached": 0,
1498 "cost_per_1m_out_cached": 0,
1499 "context_window": 128000,
1500 "default_max_tokens": 8000,
1501 "can_reason": true,
1502 "reasoning_levels": [
1503 "low",
1504 "medium",
1505 "high"
1506 ],
1507 "default_reasoning_effort": "medium",
1508 "supports_attachments": false,
1509 "options": {}
1510 },
1511 {
1512 "id": "inception/mercury-coder-small",
1513 "name": "Mercury Coder Small Beta",
1514 "cost_per_1m_in": 0.25,
1515 "cost_per_1m_out": 1,
1516 "cost_per_1m_in_cached": 0,
1517 "cost_per_1m_out_cached": 0,
1518 "context_window": 32000,
1519 "default_max_tokens": 8000,
1520 "can_reason": false,
1521 "supports_attachments": false,
1522 "options": {}
1523 },
1524 {
1525 "id": "xiaomi/mimo-v2-flash",
1526 "name": "MiMo V2 Flash",
1527 "cost_per_1m_in": 0.09,
1528 "cost_per_1m_out": 0.29,
1529 "cost_per_1m_in_cached": 0,
1530 "cost_per_1m_out_cached": 0,
1531 "context_window": 262144,
1532 "default_max_tokens": 8000,
1533 "can_reason": true,
1534 "reasoning_levels": [
1535 "low",
1536 "medium",
1537 "high"
1538 ],
1539 "default_reasoning_effort": "medium",
1540 "supports_attachments": false,
1541 "options": {}
1542 },
1543 {
1544 "id": "minimax/minimax-m2",
1545 "name": "MiniMax M2",
1546 "cost_per_1m_in": 0.3,
1547 "cost_per_1m_out": 1.2,
1548 "cost_per_1m_in_cached": 0.03,
1549 "cost_per_1m_out_cached": 0.375,
1550 "context_window": 205000,
1551 "default_max_tokens": 8000,
1552 "can_reason": true,
1553 "reasoning_levels": [
1554 "low",
1555 "medium",
1556 "high"
1557 ],
1558 "default_reasoning_effort": "medium",
1559 "supports_attachments": false,
1560 "options": {}
1561 },
1562 {
1563 "id": "minimax/minimax-m2.1",
1564 "name": "MiniMax M2.1",
1565 "cost_per_1m_in": 0.3,
1566 "cost_per_1m_out": 1.2,
1567 "cost_per_1m_in_cached": 0.15,
1568 "cost_per_1m_out_cached": 0,
1569 "context_window": 204800,
1570 "default_max_tokens": 8000,
1571 "can_reason": true,
1572 "reasoning_levels": [
1573 "low",
1574 "medium",
1575 "high"
1576 ],
1577 "default_reasoning_effort": "medium",
1578 "supports_attachments": false,
1579 "options": {}
1580 },
1581 {
1582 "id": "minimax/minimax-m2.1-lightning",
1583 "name": "MiniMax M2.1 Lightning",
1584 "cost_per_1m_in": 0.3,
1585 "cost_per_1m_out": 2.4,
1586 "cost_per_1m_in_cached": 0.03,
1587 "cost_per_1m_out_cached": 0.375,
1588 "context_window": 204800,
1589 "default_max_tokens": 8000,
1590 "can_reason": true,
1591 "reasoning_levels": [
1592 "low",
1593 "medium",
1594 "high"
1595 ],
1596 "default_reasoning_effort": "medium",
1597 "supports_attachments": false,
1598 "options": {}
1599 },
1600 {
1601 "id": "minimax/minimax-m2.5",
1602 "name": "MiniMax M2.5",
1603 "cost_per_1m_in": 0.3,
1604 "cost_per_1m_out": 1.2,
1605 "cost_per_1m_in_cached": 0.03,
1606 "cost_per_1m_out_cached": 0.375,
1607 "context_window": 204800,
1608 "default_max_tokens": 8000,
1609 "can_reason": true,
1610 "reasoning_levels": [
1611 "low",
1612 "medium",
1613 "high"
1614 ],
1615 "default_reasoning_effort": "medium",
1616 "supports_attachments": false,
1617 "options": {}
1618 },
1619 {
1620 "id": "mistral/ministral-3b",
1621 "name": "Ministral 3B",
1622 "cost_per_1m_in": 0.04,
1623 "cost_per_1m_out": 0.04,
1624 "cost_per_1m_in_cached": 0,
1625 "cost_per_1m_out_cached": 0,
1626 "context_window": 128000,
1627 "default_max_tokens": 4000,
1628 "can_reason": false,
1629 "supports_attachments": false,
1630 "options": {}
1631 },
1632 {
1633 "id": "mistral/ministral-8b",
1634 "name": "Ministral 8B",
1635 "cost_per_1m_in": 0.09999999999999999,
1636 "cost_per_1m_out": 0.09999999999999999,
1637 "cost_per_1m_in_cached": 0,
1638 "cost_per_1m_out_cached": 0,
1639 "context_window": 128000,
1640 "default_max_tokens": 4000,
1641 "can_reason": false,
1642 "supports_attachments": false,
1643 "options": {}
1644 },
1645 {
1646 "id": "mistral/codestral",
1647 "name": "Mistral Codestral",
1648 "cost_per_1m_in": 0.3,
1649 "cost_per_1m_out": 0.8999999999999999,
1650 "cost_per_1m_in_cached": 0,
1651 "cost_per_1m_out_cached": 0,
1652 "context_window": 128000,
1653 "default_max_tokens": 4000,
1654 "can_reason": false,
1655 "supports_attachments": false,
1656 "options": {}
1657 },
1658 {
1659 "id": "mistral/mistral-medium",
1660 "name": "Mistral Medium 3.1",
1661 "cost_per_1m_in": 0.39999999999999997,
1662 "cost_per_1m_out": 2,
1663 "cost_per_1m_in_cached": 0,
1664 "cost_per_1m_out_cached": 0,
1665 "context_window": 128000,
1666 "default_max_tokens": 8000,
1667 "can_reason": false,
1668 "supports_attachments": true,
1669 "options": {}
1670 },
1671 {
1672 "id": "mistral/mistral-small",
1673 "name": "Mistral Small",
1674 "cost_per_1m_in": 0.09999999999999999,
1675 "cost_per_1m_out": 0.3,
1676 "cost_per_1m_in_cached": 0,
1677 "cost_per_1m_out_cached": 0,
1678 "context_window": 32000,
1679 "default_max_tokens": 4000,
1680 "can_reason": false,
1681 "supports_attachments": true,
1682 "options": {}
1683 },
1684 {
1685 "id": "nvidia/nemotron-nano-12b-v2-vl",
1686 "name": "Nvidia Nemotron Nano 12B V2 VL",
1687 "cost_per_1m_in": 0.19999999999999998,
1688 "cost_per_1m_out": 0.6,
1689 "cost_per_1m_in_cached": 0,
1690 "cost_per_1m_out_cached": 0,
1691 "context_window": 131072,
1692 "default_max_tokens": 8000,
1693 "can_reason": true,
1694 "reasoning_levels": [
1695 "low",
1696 "medium",
1697 "high"
1698 ],
1699 "default_reasoning_effort": "medium",
1700 "supports_attachments": true,
1701 "options": {}
1702 },
1703 {
1704 "id": "nvidia/nemotron-nano-9b-v2",
1705 "name": "Nvidia Nemotron Nano 9B V2",
1706 "cost_per_1m_in": 0.04,
1707 "cost_per_1m_out": 0.16,
1708 "cost_per_1m_in_cached": 0,
1709 "cost_per_1m_out_cached": 0,
1710 "context_window": 131072,
1711 "default_max_tokens": 8000,
1712 "can_reason": true,
1713 "reasoning_levels": [
1714 "low",
1715 "medium",
1716 "high"
1717 ],
1718 "default_reasoning_effort": "medium",
1719 "supports_attachments": false,
1720 "options": {}
1721 },
1722 {
1723 "id": "mistral/pixtral-12b",
1724 "name": "Pixtral 12B 2409",
1725 "cost_per_1m_in": 0.15,
1726 "cost_per_1m_out": 0.15,
1727 "cost_per_1m_in_cached": 0,
1728 "cost_per_1m_out_cached": 0,
1729 "context_window": 128000,
1730 "default_max_tokens": 4000,
1731 "can_reason": false,
1732 "supports_attachments": true,
1733 "options": {}
1734 },
1735 {
1736 "id": "mistral/pixtral-large",
1737 "name": "Pixtral Large",
1738 "cost_per_1m_in": 2,
1739 "cost_per_1m_out": 6,
1740 "cost_per_1m_in_cached": 0,
1741 "cost_per_1m_out_cached": 0,
1742 "context_window": 128000,
1743 "default_max_tokens": 4000,
1744 "can_reason": false,
1745 "supports_attachments": true,
1746 "options": {}
1747 },
1748 {
1749 "id": "alibaba/qwen-3-32b",
1750 "name": "Qwen 3 32B",
1751 "cost_per_1m_in": 0.09999999999999999,
1752 "cost_per_1m_out": 0.3,
1753 "cost_per_1m_in_cached": 0,
1754 "cost_per_1m_out_cached": 0,
1755 "context_window": 40960,
1756 "default_max_tokens": 8000,
1757 "can_reason": true,
1758 "reasoning_levels": [
1759 "low",
1760 "medium",
1761 "high"
1762 ],
1763 "default_reasoning_effort": "medium",
1764 "supports_attachments": false,
1765 "options": {}
1766 },
1767 {
1768 "id": "alibaba/qwen3-coder-30b-a3b",
1769 "name": "Qwen 3 Coder 30B A3B Instruct",
1770 "cost_per_1m_in": 0.07,
1771 "cost_per_1m_out": 0.27,
1772 "cost_per_1m_in_cached": 0,
1773 "cost_per_1m_out_cached": 0,
1774 "context_window": 160000,
1775 "default_max_tokens": 8000,
1776 "can_reason": true,
1777 "reasoning_levels": [
1778 "low",
1779 "medium",
1780 "high"
1781 ],
1782 "default_reasoning_effort": "medium",
1783 "supports_attachments": false,
1784 "options": {}
1785 },
1786 {
1787 "id": "alibaba/qwen3-max-thinking",
1788 "name": "Qwen 3 Max Thinking",
1789 "cost_per_1m_in": 1.2,
1790 "cost_per_1m_out": 6,
1791 "cost_per_1m_in_cached": 0.24,
1792 "cost_per_1m_out_cached": 0,
1793 "context_window": 256000,
1794 "default_max_tokens": 8000,
1795 "can_reason": true,
1796 "reasoning_levels": [
1797 "low",
1798 "medium",
1799 "high"
1800 ],
1801 "default_reasoning_effort": "medium",
1802 "supports_attachments": false,
1803 "options": {}
1804 },
1805 {
1806 "id": "alibaba/qwen3.5-flash",
1807 "name": "Qwen 3.5 Flash",
1808 "cost_per_1m_in": 0.09999999999999999,
1809 "cost_per_1m_out": 0.39999999999999997,
1810 "cost_per_1m_in_cached": 0.001,
1811 "cost_per_1m_out_cached": 0.125,
1812 "context_window": 1000000,
1813 "default_max_tokens": 8000,
1814 "can_reason": true,
1815 "reasoning_levels": [
1816 "low",
1817 "medium",
1818 "high"
1819 ],
1820 "default_reasoning_effort": "medium",
1821 "supports_attachments": true,
1822 "options": {}
1823 },
1824 {
1825 "id": "alibaba/qwen3.5-plus",
1826 "name": "Qwen 3.5 Plus",
1827 "cost_per_1m_in": 0.39999999999999997,
1828 "cost_per_1m_out": 2.4,
1829 "cost_per_1m_in_cached": 0.04,
1830 "cost_per_1m_out_cached": 0.5,
1831 "context_window": 1000000,
1832 "default_max_tokens": 8000,
1833 "can_reason": true,
1834 "reasoning_levels": [
1835 "low",
1836 "medium",
1837 "high"
1838 ],
1839 "default_reasoning_effort": "medium",
1840 "supports_attachments": true,
1841 "options": {}
1842 },
1843 {
1844 "id": "alibaba/qwen3-235b-a22b-thinking",
1845 "name": "Qwen3 235B A22B Thinking 2507",
1846 "cost_per_1m_in": 0.3,
1847 "cost_per_1m_out": 2.9000000000000004,
1848 "cost_per_1m_in_cached": 0,
1849 "cost_per_1m_out_cached": 0,
1850 "context_window": 262114,
1851 "default_max_tokens": 8000,
1852 "can_reason": true,
1853 "reasoning_levels": [
1854 "low",
1855 "medium",
1856 "high"
1857 ],
1858 "default_reasoning_effort": "medium",
1859 "supports_attachments": true,
1860 "options": {}
1861 },
1862 {
1863 "id": "alibaba/qwen3-coder",
1864 "name": "Qwen3 Coder 480B A35B Instruct",
1865 "cost_per_1m_in": 0.39999999999999997,
1866 "cost_per_1m_out": 1.5999999999999999,
1867 "cost_per_1m_in_cached": 0,
1868 "cost_per_1m_out_cached": 0,
1869 "context_window": 262144,
1870 "default_max_tokens": 8000,
1871 "can_reason": false,
1872 "supports_attachments": false,
1873 "options": {}
1874 },
1875 {
1876 "id": "alibaba/qwen3-coder-next",
1877 "name": "Qwen3 Coder Next",
1878 "cost_per_1m_in": 0.5,
1879 "cost_per_1m_out": 1.2,
1880 "cost_per_1m_in_cached": 0,
1881 "cost_per_1m_out_cached": 0,
1882 "context_window": 256000,
1883 "default_max_tokens": 8000,
1884 "can_reason": true,
1885 "reasoning_levels": [
1886 "low",
1887 "medium",
1888 "high"
1889 ],
1890 "default_reasoning_effort": "medium",
1891 "supports_attachments": false,
1892 "options": {}
1893 },
1894 {
1895 "id": "alibaba/qwen3-coder-plus",
1896 "name": "Qwen3 Coder Plus",
1897 "cost_per_1m_in": 1,
1898 "cost_per_1m_out": 5,
1899 "cost_per_1m_in_cached": 0.19999999999999998,
1900 "cost_per_1m_out_cached": 0,
1901 "context_window": 1000000,
1902 "default_max_tokens": 8000,
1903 "can_reason": false,
1904 "supports_attachments": false,
1905 "options": {}
1906 },
1907 {
1908 "id": "alibaba/qwen3-max-preview",
1909 "name": "Qwen3 Max Preview",
1910 "cost_per_1m_in": 1.2,
1911 "cost_per_1m_out": 6,
1912 "cost_per_1m_in_cached": 0.24,
1913 "cost_per_1m_out_cached": 0,
1914 "context_window": 262144,
1915 "default_max_tokens": 8000,
1916 "can_reason": false,
1917 "supports_attachments": false,
1918 "options": {}
1919 },
1920 {
1921 "id": "alibaba/qwen3-vl-thinking",
1922 "name": "Qwen3 VL 235B A22B Thinking",
1923 "cost_per_1m_in": 0.22,
1924 "cost_per_1m_out": 0.88,
1925 "cost_per_1m_in_cached": 0,
1926 "cost_per_1m_out_cached": 0,
1927 "context_window": 256000,
1928 "default_max_tokens": 8000,
1929 "can_reason": true,
1930 "reasoning_levels": [
1931 "low",
1932 "medium",
1933 "high"
1934 ],
1935 "default_reasoning_effort": "medium",
1936 "supports_attachments": true,
1937 "options": {}
1938 },
1939 {
1940 "id": "alibaba/qwen-3-14b",
1941 "name": "Qwen3-14B",
1942 "cost_per_1m_in": 0.06,
1943 "cost_per_1m_out": 0.24,
1944 "cost_per_1m_in_cached": 0,
1945 "cost_per_1m_out_cached": 0,
1946 "context_window": 40960,
1947 "default_max_tokens": 8000,
1948 "can_reason": true,
1949 "reasoning_levels": [
1950 "low",
1951 "medium",
1952 "high"
1953 ],
1954 "default_reasoning_effort": "medium",
1955 "supports_attachments": false,
1956 "options": {}
1957 },
1958 {
1959 "id": "alibaba/qwen-3-235b",
1960 "name": "Qwen3-235B-A22B",
1961 "cost_per_1m_in": 0.071,
1962 "cost_per_1m_out": 0.463,
1963 "cost_per_1m_in_cached": 0,
1964 "cost_per_1m_out_cached": 0,
1965 "context_window": 40960,
1966 "default_max_tokens": 8000,
1967 "can_reason": false,
1968 "supports_attachments": false,
1969 "options": {}
1970 },
1971 {
1972 "id": "alibaba/qwen-3-30b",
1973 "name": "Qwen3-30B-A3B",
1974 "cost_per_1m_in": 0.08,
1975 "cost_per_1m_out": 0.29,
1976 "cost_per_1m_in_cached": 0,
1977 "cost_per_1m_out_cached": 0,
1978 "context_window": 40960,
1979 "default_max_tokens": 8000,
1980 "can_reason": true,
1981 "reasoning_levels": [
1982 "low",
1983 "medium",
1984 "high"
1985 ],
1986 "default_reasoning_effort": "medium",
1987 "supports_attachments": false,
1988 "options": {}
1989 },
1990 {
1991 "id": "bytedance/seed-1.6",
1992 "name": "Seed 1.6",
1993 "cost_per_1m_in": 0.25,
1994 "cost_per_1m_out": 2,
1995 "cost_per_1m_in_cached": 0.049999999999999996,
1996 "cost_per_1m_out_cached": 0,
1997 "context_window": 256000,
1998 "default_max_tokens": 8000,
1999 "can_reason": true,
2000 "reasoning_levels": [
2001 "low",
2002 "medium",
2003 "high"
2004 ],
2005 "default_reasoning_effort": "medium",
2006 "supports_attachments": false,
2007 "options": {}
2008 },
2009 {
2010 "id": "perplexity/sonar",
2011 "name": "Sonar",
2012 "cost_per_1m_in": 1,
2013 "cost_per_1m_out": 1,
2014 "cost_per_1m_in_cached": 0,
2015 "cost_per_1m_out_cached": 0,
2016 "context_window": 127000,
2017 "default_max_tokens": 8000,
2018 "can_reason": false,
2019 "supports_attachments": true,
2020 "options": {}
2021 },
2022 {
2023 "id": "perplexity/sonar-pro",
2024 "name": "Sonar Pro",
2025 "cost_per_1m_in": 3,
2026 "cost_per_1m_out": 15,
2027 "cost_per_1m_in_cached": 0,
2028 "cost_per_1m_out_cached": 0,
2029 "context_window": 200000,
2030 "default_max_tokens": 8000,
2031 "can_reason": false,
2032 "supports_attachments": true,
2033 "options": {}
2034 },
2035 {
2036 "id": "arcee-ai/trinity-large-preview",
2037 "name": "Trinity Large Preview",
2038 "cost_per_1m_in": 0.25,
2039 "cost_per_1m_out": 1,
2040 "cost_per_1m_in_cached": 0,
2041 "cost_per_1m_out_cached": 0,
2042 "context_window": 131000,
2043 "default_max_tokens": 8000,
2044 "can_reason": false,
2045 "supports_attachments": false,
2046 "options": {}
2047 },
2048 {
2049 "id": "openai/gpt-oss-120b",
2050 "name": "gpt-oss-120b",
2051 "cost_per_1m_in": 0.09999999999999999,
2052 "cost_per_1m_out": 0.5,
2053 "cost_per_1m_in_cached": 0,
2054 "cost_per_1m_out_cached": 0,
2055 "context_window": 131072,
2056 "default_max_tokens": 8000,
2057 "can_reason": true,
2058 "reasoning_levels": [
2059 "low",
2060 "medium",
2061 "high"
2062 ],
2063 "default_reasoning_effort": "medium",
2064 "supports_attachments": false,
2065 "options": {}
2066 },
2067 {
2068 "id": "openai/gpt-oss-20b",
2069 "name": "gpt-oss-20b",
2070 "cost_per_1m_in": 0.07,
2071 "cost_per_1m_out": 0.3,
2072 "cost_per_1m_in_cached": 0,
2073 "cost_per_1m_out_cached": 0,
2074 "context_window": 128000,
2075 "default_max_tokens": 8000,
2076 "can_reason": true,
2077 "reasoning_levels": [
2078 "low",
2079 "medium",
2080 "high"
2081 ],
2082 "default_reasoning_effort": "medium",
2083 "supports_attachments": false,
2084 "options": {}
2085 },
2086 {
2087 "id": "openai/gpt-oss-safeguard-20b",
2088 "name": "gpt-oss-safeguard-20b",
2089 "cost_per_1m_in": 0.075,
2090 "cost_per_1m_out": 0.3,
2091 "cost_per_1m_in_cached": 0.037,
2092 "cost_per_1m_out_cached": 0,
2093 "context_window": 131072,
2094 "default_max_tokens": 8000,
2095 "can_reason": true,
2096 "reasoning_levels": [
2097 "low",
2098 "medium",
2099 "high"
2100 ],
2101 "default_reasoning_effort": "medium",
2102 "supports_attachments": false,
2103 "options": {}
2104 },
2105 {
2106 "id": "openai/o1",
2107 "name": "o1",
2108 "cost_per_1m_in": 15,
2109 "cost_per_1m_out": 60,
2110 "cost_per_1m_in_cached": 7.5,
2111 "cost_per_1m_out_cached": 0,
2112 "context_window": 200000,
2113 "default_max_tokens": 8000,
2114 "can_reason": true,
2115 "reasoning_levels": [
2116 "low",
2117 "medium",
2118 "high"
2119 ],
2120 "default_reasoning_effort": "medium",
2121 "supports_attachments": true,
2122 "options": {}
2123 },
2124 {
2125 "id": "openai/o3",
2126 "name": "o3",
2127 "cost_per_1m_in": 2,
2128 "cost_per_1m_out": 8,
2129 "cost_per_1m_in_cached": 0.5,
2130 "cost_per_1m_out_cached": 0,
2131 "context_window": 200000,
2132 "default_max_tokens": 8000,
2133 "can_reason": true,
2134 "reasoning_levels": [
2135 "low",
2136 "medium",
2137 "high"
2138 ],
2139 "default_reasoning_effort": "medium",
2140 "supports_attachments": true,
2141 "options": {}
2142 },
2143 {
2144 "id": "openai/o3-pro",
2145 "name": "o3 Pro",
2146 "cost_per_1m_in": 20,
2147 "cost_per_1m_out": 80,
2148 "cost_per_1m_in_cached": 0,
2149 "cost_per_1m_out_cached": 0,
2150 "context_window": 200000,
2151 "default_max_tokens": 8000,
2152 "can_reason": true,
2153 "reasoning_levels": [
2154 "low",
2155 "medium",
2156 "high"
2157 ],
2158 "default_reasoning_effort": "medium",
2159 "supports_attachments": true,
2160 "options": {}
2161 },
2162 {
2163 "id": "openai/o3-deep-research",
2164 "name": "o3-deep-research",
2165 "cost_per_1m_in": 10,
2166 "cost_per_1m_out": 40,
2167 "cost_per_1m_in_cached": 2.5,
2168 "cost_per_1m_out_cached": 0,
2169 "context_window": 200000,
2170 "default_max_tokens": 8000,
2171 "can_reason": true,
2172 "reasoning_levels": [
2173 "low",
2174 "medium",
2175 "high"
2176 ],
2177 "default_reasoning_effort": "medium",
2178 "supports_attachments": true,
2179 "options": {}
2180 },
2181 {
2182 "id": "openai/o3-mini",
2183 "name": "o3-mini",
2184 "cost_per_1m_in": 1.1,
2185 "cost_per_1m_out": 4.4,
2186 "cost_per_1m_in_cached": 0.55,
2187 "cost_per_1m_out_cached": 0,
2188 "context_window": 200000,
2189 "default_max_tokens": 8000,
2190 "can_reason": true,
2191 "reasoning_levels": [
2192 "low",
2193 "medium",
2194 "high"
2195 ],
2196 "default_reasoning_effort": "medium",
2197 "supports_attachments": false,
2198 "options": {}
2199 },
2200 {
2201 "id": "openai/o4-mini",
2202 "name": "o4-mini",
2203 "cost_per_1m_in": 1.1,
2204 "cost_per_1m_out": 4.4,
2205 "cost_per_1m_in_cached": 0.275,
2206 "cost_per_1m_out_cached": 0,
2207 "context_window": 200000,
2208 "default_max_tokens": 8000,
2209 "can_reason": true,
2210 "reasoning_levels": [
2211 "low",
2212 "medium",
2213 "high"
2214 ],
2215 "default_reasoning_effort": "medium",
2216 "supports_attachments": true,
2217 "options": {}
2218 },
2219 {
2220 "id": "vercel/v0-1.0-md",
2221 "name": "v0-1.0-md",
2222 "cost_per_1m_in": 3,
2223 "cost_per_1m_out": 15,
2224 "cost_per_1m_in_cached": 0,
2225 "cost_per_1m_out_cached": 0,
2226 "context_window": 128000,
2227 "default_max_tokens": 8000,
2228 "can_reason": false,
2229 "supports_attachments": true,
2230 "options": {}
2231 },
2232 {
2233 "id": "vercel/v0-1.5-md",
2234 "name": "v0-1.5-md",
2235 "cost_per_1m_in": 3,
2236 "cost_per_1m_out": 15,
2237 "cost_per_1m_in_cached": 0,
2238 "cost_per_1m_out_cached": 0,
2239 "context_window": 128000,
2240 "default_max_tokens": 8000,
2241 "can_reason": false,
2242 "supports_attachments": true,
2243 "options": {}
2244 }
2245 ],
2246 "default_headers": {
2247 "HTTP-Referer": "https://charm.land",
2248 "X-Title": "Crush"
2249 }
2250}