1{
2 "name": "Cortecs",
3 "id": "cortecs",
4 "api_key": "$CORTECS_API_KEY",
5 "api_endpoint": "https://api.cortecs.ai/v1",
6 "type": "openai",
7 "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
8 "default_small_model_id": "glm-4.7-flash",
9 "models": [
10 {
11 "id": "kimi-k2.6",
12 "name": "Kimi K2.6",
13 "cost_per_1m_in": 0.6936,
14 "cost_per_1m_out": 3.0345,
15 "cost_per_1m_in_cached": 0,
16 "cost_per_1m_out_cached": 0,
17 "context_window": 256000,
18 "default_max_tokens": 25600,
19 "can_reason": true,
20 "reasoning_levels": [
21 "low",
22 "medium",
23 "high"
24 ],
25 "default_reasoning_effort": "medium",
26 "supports_attachments": false
27 },
28 {
29 "id": "claude-opus4-7",
30 "name": "Claude Opus 4.7",
31 "cost_per_1m_in": 4.7685,
32 "cost_per_1m_out": 23.8425,
33 "cost_per_1m_in_cached": 0,
34 "cost_per_1m_out_cached": 0,
35 "context_window": 1000000,
36 "default_max_tokens": 100000,
37 "can_reason": true,
38 "reasoning_levels": [
39 "low",
40 "medium",
41 "high"
42 ],
43 "default_reasoning_effort": "medium",
44 "supports_attachments": true
45 },
46 {
47 "id": "minimax-m2.7",
48 "name": "MiniMax M2.7",
49 "cost_per_1m_in": 0.26622,
50 "cost_per_1m_out": 1.06488,
51 "cost_per_1m_in_cached": 0,
52 "cost_per_1m_out_cached": 0,
53 "context_window": 196608,
54 "default_max_tokens": 19660,
55 "can_reason": true,
56 "reasoning_levels": [
57 "low",
58 "medium",
59 "high"
60 ],
61 "default_reasoning_effort": "medium",
62 "supports_attachments": false
63 },
64 {
65 "id": "glm-5.1",
66 "name": "GLM 5.1",
67 "cost_per_1m_in": 1.24236,
68 "cost_per_1m_out": 3.90336,
69 "cost_per_1m_in_cached": 0,
70 "cost_per_1m_out_cached": 0,
71 "context_window": 202752,
72 "default_max_tokens": 20275,
73 "can_reason": true,
74 "reasoning_levels": [
75 "low",
76 "medium",
77 "high"
78 ],
79 "default_reasoning_effort": "medium",
80 "supports_attachments": false
81 },
82 {
83 "id": "qwen3.5-122b-a10b",
84 "name": "Qwen3.5 122B A10B",
85 "cost_per_1m_in": 0.4437,
86 "cost_per_1m_out": 3.1059,
87 "cost_per_1m_in_cached": 0,
88 "cost_per_1m_out_cached": 0,
89 "context_window": 262144,
90 "default_max_tokens": 26214,
91 "can_reason": true,
92 "reasoning_levels": [
93 "low",
94 "medium",
95 "high"
96 ],
97 "default_reasoning_effort": "medium",
98 "supports_attachments": false
99 },
100 {
101 "id": "qwen3.5-9b",
102 "name": "Qwen3.5 9B",
103 "cost_per_1m_in": 0.1,
104 "cost_per_1m_out": 0.15,
105 "cost_per_1m_in_cached": 0,
106 "cost_per_1m_out_cached": 0,
107 "context_window": 262000,
108 "default_max_tokens": 26200,
109 "can_reason": true,
110 "reasoning_levels": [
111 "low",
112 "medium",
113 "high"
114 ],
115 "default_reasoning_effort": "medium",
116 "supports_attachments": false
117 },
118 {
119 "id": "nemotron-3-super-120b-a12b",
120 "name": "Nemotron 3 Super 120B A12B",
121 "cost_per_1m_in": 0.15606,
122 "cost_per_1m_out": 0.67626,
123 "cost_per_1m_in_cached": 0,
124 "cost_per_1m_out_cached": 0,
125 "context_window": 262000,
126 "default_max_tokens": 26214,
127 "can_reason": true,
128 "reasoning_levels": [
129 "low",
130 "medium",
131 "high"
132 ],
133 "default_reasoning_effort": "medium",
134 "supports_attachments": false
135 },
136 {
137 "id": "qwen3-coder-next",
138 "name": "Qwen3 Coder Next",
139 "cost_per_1m_in": 0.15,
140 "cost_per_1m_out": 0.8,
141 "cost_per_1m_in_cached": 0,
142 "cost_per_1m_out_cached": 0,
143 "context_window": 256000,
144 "default_max_tokens": 25600,
145 "can_reason": true,
146 "reasoning_levels": [
147 "low",
148 "medium",
149 "high"
150 ],
151 "default_reasoning_effort": "medium",
152 "supports_attachments": false
153 },
154 {
155 "id": "glm-5",
156 "name": "GLM 5",
157 "cost_per_1m_in": 0.8874,
158 "cost_per_1m_out": 2.83968,
159 "cost_per_1m_in_cached": 0,
160 "cost_per_1m_out_cached": 0,
161 "context_window": 202752,
162 "default_max_tokens": 20275,
163 "can_reason": true,
164 "reasoning_levels": [
165 "low",
166 "medium",
167 "high"
168 ],
169 "default_reasoning_effort": "medium",
170 "supports_attachments": false
171 },
172 {
173 "id": "glm-4.6",
174 "name": "GLM 4.6",
175 "cost_per_1m_in": 0.35496,
176 "cost_per_1m_out": 1.55295,
177 "cost_per_1m_in_cached": 0,
178 "cost_per_1m_out_cached": 0,
179 "context_window": 203000,
180 "default_max_tokens": 20300,
181 "can_reason": true,
182 "reasoning_levels": [
183 "low",
184 "medium",
185 "high"
186 ],
187 "default_reasoning_effort": "medium",
188 "supports_attachments": false
189 },
190 {
191 "id": "deepseek-chat-v3.1",
192 "name": "DeepSeek Chat V3.1",
193 "cost_per_1m_in": 0.17748,
194 "cost_per_1m_out": 0.70992,
195 "cost_per_1m_in_cached": 0,
196 "cost_per_1m_out_cached": 0,
197 "context_window": 164000,
198 "default_max_tokens": 16400,
199 "can_reason": true,
200 "reasoning_levels": [
201 "low",
202 "medium",
203 "high"
204 ],
205 "default_reasoning_effort": "medium",
206 "supports_attachments": false
207 },
208 {
209 "id": "qwen-2.5-72b-instruct",
210 "name": "Qwen2.5 72B Instruct",
211 "cost_per_1m_in": 0.062118,
212 "cost_per_1m_out": 0.230724,
213 "cost_per_1m_in_cached": 0,
214 "cost_per_1m_out_cached": 0,
215 "context_window": 33000,
216 "default_max_tokens": 3300,
217 "can_reason": false,
218 "supports_attachments": false
219 },
220 {
221 "id": "qwen3.5-397b-a17b",
222 "name": "Qwen3.5 397B A17B ",
223 "cost_per_1m_in": 0.53244,
224 "cost_per_1m_out": 3.19464,
225 "cost_per_1m_in_cached": 0,
226 "cost_per_1m_out_cached": 0,
227 "context_window": 262000,
228 "default_max_tokens": 25000,
229 "can_reason": true,
230 "reasoning_levels": [
231 "low",
232 "medium",
233 "high"
234 ],
235 "default_reasoning_effort": "medium",
236 "supports_attachments": false
237 },
238 {
239 "id": "deepseek-v3.2",
240 "name": "DeepSeek V3.2",
241 "cost_per_1m_in": 0.26622,
242 "cost_per_1m_out": 0.4437,
243 "cost_per_1m_in_cached": 0,
244 "cost_per_1m_out_cached": 0,
245 "context_window": 163840,
246 "default_max_tokens": 16384,
247 "can_reason": true,
248 "reasoning_levels": [
249 "low",
250 "medium",
251 "high"
252 ],
253 "default_reasoning_effort": "medium",
254 "supports_attachments": false
255 },
256 {
257 "id": "mistral-small-2603",
258 "name": "Mistral Small 4 2603",
259 "cost_per_1m_in": 0.1275,
260 "cost_per_1m_out": 0.51,
261 "cost_per_1m_in_cached": 0,
262 "cost_per_1m_out_cached": 0,
263 "context_window": 256000,
264 "default_max_tokens": 25600,
265 "can_reason": true,
266 "reasoning_levels": [
267 "low",
268 "medium",
269 "high"
270 ],
271 "default_reasoning_effort": "medium",
272 "supports_attachments": true
273 },
274 {
275 "id": "minimax-m2.5",
276 "name": "MiniMax M2.5",
277 "cost_per_1m_in": 0.26622,
278 "cost_per_1m_out": 0.97614,
279 "cost_per_1m_in_cached": 0,
280 "cost_per_1m_out_cached": 0,
281 "context_window": 196608,
282 "default_max_tokens": 19660,
283 "can_reason": true,
284 "reasoning_levels": [
285 "low",
286 "medium",
287 "high"
288 ],
289 "default_reasoning_effort": "medium",
290 "supports_attachments": false
291 },
292 {
293 "id": "claude-4-6-sonnet",
294 "name": "Claude Sonnet 4.6",
295 "cost_per_1m_in": 2.8691,
296 "cost_per_1m_out": 14.3095,
297 "cost_per_1m_in_cached": 0,
298 "cost_per_1m_out_cached": 0,
299 "context_window": 1000000,
300 "default_max_tokens": 100000,
301 "can_reason": true,
302 "reasoning_levels": [
303 "low",
304 "medium",
305 "high"
306 ],
307 "default_reasoning_effort": "medium",
308 "supports_attachments": true
309 },
310 {
311 "id": "glm-4.7-flash",
312 "name": "GLM 4.7 Flash",
313 "cost_per_1m_in": 0.0716,
314 "cost_per_1m_out": 0.4293,
315 "cost_per_1m_in_cached": 0,
316 "cost_per_1m_out_cached": 0,
317 "context_window": 203000,
318 "default_max_tokens": 20300,
319 "can_reason": false,
320 "supports_attachments": false
321 },
322 {
323 "id": "kimi-k2.5",
324 "name": "Kimi K2.5",
325 "cost_per_1m_in": 0.4437,
326 "cost_per_1m_out": 2.12976,
327 "cost_per_1m_in_cached": 0,
328 "cost_per_1m_out_cached": 0,
329 "context_window": 256000,
330 "default_max_tokens": 25600,
331 "can_reason": true,
332 "reasoning_levels": [
333 "low",
334 "medium",
335 "high"
336 ],
337 "default_reasoning_effort": "medium",
338 "supports_attachments": false
339 },
340 {
341 "id": "claude-opus4-6",
342 "name": "Claude Opus 4.6",
343 "cost_per_1m_in": 4.7685,
344 "cost_per_1m_out": 23.8425,
345 "cost_per_1m_in_cached": 0,
346 "cost_per_1m_out_cached": 0,
347 "context_window": 1000000,
348 "default_max_tokens": 100000,
349 "can_reason": true,
350 "reasoning_levels": [
351 "low",
352 "medium",
353 "high"
354 ],
355 "default_reasoning_effort": "medium",
356 "supports_attachments": true
357 },
358 {
359 "id": "minimax-m2",
360 "name": "MiniMax M2",
361 "cost_per_1m_in": 0.22185,
362 "cost_per_1m_out": 0.8874,
363 "cost_per_1m_in_cached": 0,
364 "cost_per_1m_out_cached": 0,
365 "context_window": 196608,
366 "default_max_tokens": 19660,
367 "can_reason": true,
368 "reasoning_levels": [
369 "low",
370 "medium",
371 "high"
372 ],
373 "default_reasoning_effort": "medium",
374 "supports_attachments": false
375 },
376 {
377 "id": "glm-4.7",
378 "name": "GLM 4.7",
379 "cost_per_1m_in": 0.53244,
380 "cost_per_1m_out": 1.95228,
381 "cost_per_1m_in_cached": 0,
382 "cost_per_1m_out_cached": 0,
383 "context_window": 202752,
384 "default_max_tokens": 20275,
385 "can_reason": true,
386 "reasoning_levels": [
387 "low",
388 "medium",
389 "high"
390 ],
391 "default_reasoning_effort": "medium",
392 "supports_attachments": false
393 },
394 {
395 "id": "minimax-m2.1",
396 "name": "MiniMax M2.1",
397 "cost_per_1m_in": 0.322,
398 "cost_per_1m_out": 1.2879,
399 "cost_per_1m_in_cached": 0,
400 "cost_per_1m_out_cached": 0,
401 "context_window": 196000,
402 "default_max_tokens": 19600,
403 "can_reason": true,
404 "reasoning_levels": [
405 "low",
406 "medium",
407 "high"
408 ],
409 "default_reasoning_effort": "medium",
410 "supports_attachments": false
411 },
412 {
413 "id": "qwen3-vl-235b-a22b",
414 "name": "Qwen3 VL 235B A22B",
415 "cost_per_1m_in": 0.186354,
416 "cost_per_1m_out": 1.68606,
417 "cost_per_1m_in_cached": 0,
418 "cost_per_1m_out_cached": 0,
419 "context_window": 131000,
420 "default_max_tokens": 13100,
421 "can_reason": true,
422 "reasoning_levels": [
423 "low",
424 "medium",
425 "high"
426 ],
427 "default_reasoning_effort": "medium",
428 "supports_attachments": true
429 },
430 {
431 "id": "mistral-small-creative",
432 "name": "Mistral Small Creative",
433 "cost_per_1m_in": 0.1,
434 "cost_per_1m_out": 0.3,
435 "cost_per_1m_in_cached": 0,
436 "cost_per_1m_out_cached": 0,
437 "context_window": 32000,
438 "default_max_tokens": 3200,
439 "can_reason": false,
440 "supports_attachments": false
441 },
442 {
443 "id": "nvidia-nemotron-3-nano-30b-a3b",
444 "name": "Nemotron 3 Nano 30B A3B",
445 "cost_per_1m_in": 0.0537,
446 "cost_per_1m_out": 0.215,
447 "cost_per_1m_in_cached": 0,
448 "cost_per_1m_out_cached": 0,
449 "context_window": 128000,
450 "default_max_tokens": 12800,
451 "can_reason": true,
452 "reasoning_levels": [
453 "low",
454 "medium",
455 "high"
456 ],
457 "default_reasoning_effort": "medium",
458 "supports_attachments": false
459 },
460 {
461 "id": "claude-opus4-5",
462 "name": "Claude Opus 4.5",
463 "cost_per_1m_in": 4.7695,
464 "cost_per_1m_out": 23.8485,
465 "cost_per_1m_in_cached": 0,
466 "cost_per_1m_out_cached": 0,
467 "context_window": 200000,
468 "default_max_tokens": 20000,
469 "can_reason": true,
470 "reasoning_levels": [
471 "low",
472 "medium",
473 "high"
474 ],
475 "default_reasoning_effort": "medium",
476 "supports_attachments": true
477 },
478 {
479 "id": "qwen3-next-80b-a3b-thinking",
480 "name": "Qwen3 Next 80B A3B Thinking",
481 "cost_per_1m_in": 0.13311,
482 "cost_per_1m_out": 1.06488,
483 "cost_per_1m_in_cached": 0,
484 "cost_per_1m_out_cached": 0,
485 "context_window": 262000,
486 "default_max_tokens": 12800,
487 "can_reason": true,
488 "reasoning_levels": [
489 "low",
490 "medium",
491 "high"
492 ],
493 "default_reasoning_effort": "medium",
494 "supports_attachments": false
495 },
496 {
497 "id": "holo2-30b-a3b",
498 "name": "Holo2 30B A3B",
499 "cost_per_1m_in": 0.3,
500 "cost_per_1m_out": 0.7,
501 "cost_per_1m_in_cached": 0,
502 "cost_per_1m_out_cached": 0,
503 "context_window": 22000,
504 "default_max_tokens": 2200,
505 "can_reason": true,
506 "reasoning_levels": [
507 "low",
508 "medium",
509 "high"
510 ],
511 "default_reasoning_effort": "medium",
512 "supports_attachments": true
513 },
514 {
515 "id": "devstral-2512",
516 "name": "Devstral 2 2512",
517 "cost_per_1m_in": 0.4,
518 "cost_per_1m_out": 2,
519 "cost_per_1m_in_cached": 0,
520 "cost_per_1m_out_cached": 0,
521 "context_window": 262000,
522 "default_max_tokens": 26200,
523 "can_reason": false,
524 "supports_attachments": false
525 },
526 {
527 "id": "nova-2-lite",
528 "name": "Nova 2 Lite",
529 "cost_per_1m_in": 0.335,
530 "cost_per_1m_out": 2.822,
531 "cost_per_1m_in_cached": 0,
532 "cost_per_1m_out_cached": 0,
533 "context_window": 1000000,
534 "default_max_tokens": 100000,
535 "can_reason": true,
536 "reasoning_levels": [
537 "low",
538 "medium",
539 "high"
540 ],
541 "default_reasoning_effort": "medium",
542 "supports_attachments": true
543 },
544 {
545 "id": "gpt-oss-safeguard-120b",
546 "name": "GPT OSS Safeguard 120B",
547 "cost_per_1m_in": 0.161,
548 "cost_per_1m_out": 0.626,
549 "cost_per_1m_in_cached": 0,
550 "cost_per_1m_out_cached": 0,
551 "context_window": 128000,
552 "default_max_tokens": 12800,
553 "can_reason": true,
554 "reasoning_levels": [
555 "low",
556 "medium",
557 "high"
558 ],
559 "default_reasoning_effort": "medium",
560 "supports_attachments": false
561 },
562 {
563 "id": "mistral-large-2512",
564 "name": "Mistral Large 3 2512",
565 "cost_per_1m_in": 0.5,
566 "cost_per_1m_out": 1.5,
567 "cost_per_1m_in_cached": 0,
568 "cost_per_1m_out_cached": 0,
569 "context_window": 256000,
570 "default_max_tokens": 25600,
571 "can_reason": false,
572 "supports_attachments": true
573 },
574 {
575 "id": "ministral-8b-2512",
576 "name": "Ministral 3 8b 2512",
577 "cost_per_1m_in": 0.15,
578 "cost_per_1m_out": 0.15,
579 "cost_per_1m_in_cached": 0,
580 "cost_per_1m_out_cached": 0,
581 "context_window": 256000,
582 "default_max_tokens": 25600,
583 "can_reason": false,
584 "supports_attachments": true
585 },
586 {
587 "id": "ministral-3b-2512",
588 "name": "Ministral 3 3b 2512",
589 "cost_per_1m_in": 0.1,
590 "cost_per_1m_out": 0.1,
591 "cost_per_1m_in_cached": 0,
592 "cost_per_1m_out_cached": 0,
593 "context_window": 256000,
594 "default_max_tokens": 25600,
595 "can_reason": false,
596 "supports_attachments": true
597 },
598 {
599 "id": "ministral-14b-2512",
600 "name": "Ministral 3 14b 2512",
601 "cost_per_1m_in": 0.2,
602 "cost_per_1m_out": 0.2,
603 "cost_per_1m_in_cached": 0,
604 "cost_per_1m_out_cached": 0,
605 "context_window": 256000,
606 "default_max_tokens": 25600,
607 "can_reason": false,
608 "supports_attachments": true
609 },
610 {
611 "id": "intellect-3",
612 "name": "INTELLECT-3",
613 "cost_per_1m_in": 0.179,
614 "cost_per_1m_out": 0.984,
615 "cost_per_1m_in_cached": 0,
616 "cost_per_1m_out_cached": 0,
617 "context_window": 128000,
618 "default_max_tokens": 12800,
619 "can_reason": true,
620 "reasoning_levels": [
621 "low",
622 "medium",
623 "high"
624 ],
625 "default_reasoning_effort": "medium",
626 "supports_attachments": false
627 },
628 {
629 "id": "gpt-5.1",
630 "name": "GPT 5.1",
631 "cost_per_1m_in": 1.234,
632 "cost_per_1m_out": 9.838,
633 "cost_per_1m_in_cached": 0,
634 "cost_per_1m_out_cached": 0,
635 "context_window": 400000,
636 "default_max_tokens": 40000,
637 "can_reason": true,
638 "reasoning_levels": [
639 "low",
640 "medium",
641 "high"
642 ],
643 "default_reasoning_effort": "medium",
644 "supports_attachments": true
645 },
646 {
647 "id": "nemotron-nano-v2-12b",
648 "name": "Nemotron Nano V2 12b",
649 "cost_per_1m_in": 0.215,
650 "cost_per_1m_out": 0.635,
651 "cost_per_1m_in_cached": 0,
652 "cost_per_1m_out_cached": 0,
653 "context_window": 128000,
654 "default_max_tokens": 12800,
655 "can_reason": true,
656 "reasoning_levels": [
657 "low",
658 "medium",
659 "high"
660 ],
661 "default_reasoning_effort": "medium",
662 "supports_attachments": true
663 },
664 {
665 "id": "claude-haiku-4-5",
666 "name": "Claude Haiku 4.5",
667 "cost_per_1m_in": 0.894,
668 "cost_per_1m_out": 4.472,
669 "cost_per_1m_in_cached": 0,
670 "cost_per_1m_out_cached": 0,
671 "context_window": 200000,
672 "default_max_tokens": 20000,
673 "can_reason": true,
674 "reasoning_levels": [
675 "low",
676 "medium",
677 "high"
678 ],
679 "default_reasoning_effort": "medium",
680 "supports_attachments": true
681 },
682 {
683 "id": "claude-4-5-sonnet",
684 "name": "Claude 4.5 Sonnet",
685 "cost_per_1m_in": 2.683,
686 "cost_per_1m_out": 13.416,
687 "cost_per_1m_in_cached": 0,
688 "cost_per_1m_out_cached": 0,
689 "context_window": 200000,
690 "default_max_tokens": 20000,
691 "can_reason": true,
692 "reasoning_levels": [
693 "low",
694 "medium",
695 "high"
696 ],
697 "default_reasoning_effort": "medium",
698 "supports_attachments": true
699 },
700 {
701 "id": "magistral-medium-2509",
702 "name": "Magistral Medium 2509",
703 "cost_per_1m_in": 2,
704 "cost_per_1m_out": 5,
705 "cost_per_1m_in_cached": 0,
706 "cost_per_1m_out_cached": 0,
707 "context_window": 128000,
708 "default_max_tokens": 12800,
709 "can_reason": true,
710 "reasoning_levels": [
711 "low",
712 "medium",
713 "high"
714 ],
715 "default_reasoning_effort": "medium",
716 "supports_attachments": true
717 },
718 {
719 "id": "magistral-small-2509",
720 "name": "Magistral Small 2509",
721 "cost_per_1m_in": 0.5,
722 "cost_per_1m_out": 1.5,
723 "cost_per_1m_in_cached": 0,
724 "cost_per_1m_out_cached": 0,
725 "context_window": 128000,
726 "default_max_tokens": 12800,
727 "can_reason": true,
728 "reasoning_levels": [
729 "low",
730 "medium",
731 "high"
732 ],
733 "default_reasoning_effort": "medium",
734 "supports_attachments": true
735 },
736 {
737 "id": "hermes-4-70b",
738 "name": "Hermes 4 70B",
739 "cost_per_1m_in": 0.116,
740 "cost_per_1m_out": 0.358,
741 "cost_per_1m_in_cached": 0,
742 "cost_per_1m_out_cached": 0,
743 "context_window": 128000,
744 "default_max_tokens": 12800,
745 "can_reason": false,
746 "supports_attachments": false
747 },
748 {
749 "id": "gpt-5",
750 "name": "GPT 5",
751 "cost_per_1m_in": 1.234,
752 "cost_per_1m_out": 9.838,
753 "cost_per_1m_in_cached": 0,
754 "cost_per_1m_out_cached": 0,
755 "context_window": 400000,
756 "default_max_tokens": 40000,
757 "can_reason": true,
758 "reasoning_levels": [
759 "low",
760 "medium",
761 "high"
762 ],
763 "default_reasoning_effort": "medium",
764 "supports_attachments": true
765 },
766 {
767 "id": "gpt-oss-120b",
768 "name": "GPT Oss 120b",
769 "cost_per_1m_in": 0.035496,
770 "cost_per_1m_out": 0.17748,
771 "cost_per_1m_in_cached": 0,
772 "cost_per_1m_out_cached": 0,
773 "context_window": 131000,
774 "default_max_tokens": 13100,
775 "can_reason": true,
776 "reasoning_levels": [
777 "low",
778 "medium",
779 "high"
780 ],
781 "default_reasoning_effort": "medium",
782 "supports_attachments": false
783 },
784 {
785 "id": "qwen3-30b-a3b-instruct-2507",
786 "name": "Qwen3 30B A3B Instruct 2507",
787 "cost_per_1m_in": 0.089,
788 "cost_per_1m_out": 0.268,
789 "cost_per_1m_in_cached": 0,
790 "cost_per_1m_out_cached": 0,
791 "context_window": 262000,
792 "default_max_tokens": 26200,
793 "can_reason": true,
794 "reasoning_levels": [
795 "low",
796 "medium",
797 "high"
798 ],
799 "default_reasoning_effort": "medium",
800 "supports_attachments": false
801 },
802 {
803 "id": "gpt-oss-20b",
804 "name": "GPT Oss 20b",
805 "cost_per_1m_in": 0.026622,
806 "cost_per_1m_out": 0.124236,
807 "cost_per_1m_in_cached": 0,
808 "cost_per_1m_out_cached": 0,
809 "context_window": 131000,
810 "default_max_tokens": 13100,
811 "can_reason": true,
812 "reasoning_levels": [
813 "low",
814 "medium",
815 "high"
816 ],
817 "default_reasoning_effort": "medium",
818 "supports_attachments": false
819 },
820 {
821 "id": "mistral-7b-instruct-v0.3",
822 "name": "Mistral 7B Instruct v0.3",
823 "cost_per_1m_in": 0.1,
824 "cost_per_1m_out": 0.1,
825 "cost_per_1m_in_cached": 0,
826 "cost_per_1m_out_cached": 0,
827 "context_window": 127000,
828 "default_max_tokens": 12700,
829 "can_reason": false,
830 "supports_attachments": false
831 },
832 {
833 "id": "mistral-large-2402",
834 "name": "Mistral Large 2402",
835 "cost_per_1m_in": 3.846,
836 "cost_per_1m_out": 11.627,
837 "cost_per_1m_in_cached": 0,
838 "cost_per_1m_out_cached": 0,
839 "context_window": 32000,
840 "default_max_tokens": 3200,
841 "can_reason": true,
842 "reasoning_levels": [
843 "low",
844 "medium",
845 "high"
846 ],
847 "default_reasoning_effort": "medium",
848 "supports_attachments": false
849 },
850 {
851 "id": "pixtral-large-2502",
852 "name": "Pixtral Large 25.02",
853 "cost_per_1m_in": 1.789,
854 "cost_per_1m_out": 5.366,
855 "cost_per_1m_in_cached": 0,
856 "cost_per_1m_out_cached": 0,
857 "context_window": 128000,
858 "default_max_tokens": 12800,
859 "can_reason": true,
860 "reasoning_levels": [
861 "low",
862 "medium",
863 "high"
864 ],
865 "default_reasoning_effort": "medium",
866 "supports_attachments": true
867 },
868 {
869 "id": "mistral-small-3.2-24b-instruct-2506",
870 "name": "Mistral Small 3.2 24B Instruct 2506",
871 "cost_per_1m_in": 0.09,
872 "cost_per_1m_out": 0.28,
873 "cost_per_1m_in_cached": 0,
874 "cost_per_1m_out_cached": 0,
875 "context_window": 128000,
876 "default_max_tokens": 12800,
877 "can_reason": false,
878 "supports_attachments": true
879 },
880 {
881 "id": "qwen3-32b",
882 "name": "Qwen3 32B",
883 "cost_per_1m_in": 0.089,
884 "cost_per_1m_out": 0.268,
885 "cost_per_1m_in_cached": 0,
886 "cost_per_1m_out_cached": 0,
887 "context_window": 40000,
888 "default_max_tokens": 4000,
889 "can_reason": true,
890 "reasoning_levels": [
891 "low",
892 "medium",
893 "high"
894 ],
895 "default_reasoning_effort": "medium",
896 "supports_attachments": false
897 },
898 {
899 "id": "qwen3-235b-a22b-instruct-2507",
900 "name": "Qwen3 235B A22B Instruct 2507",
901 "cost_per_1m_in": 0.062118,
902 "cost_per_1m_out": 0.408204,
903 "cost_per_1m_in_cached": 0,
904 "cost_per_1m_out_cached": 0,
905 "context_window": 131000,
906 "default_max_tokens": 13100,
907 "can_reason": true,
908 "reasoning_levels": [
909 "low",
910 "medium",
911 "high"
912 ],
913 "default_reasoning_effort": "medium",
914 "supports_attachments": false
915 },
916 {
917 "id": "qwen3-coder-30b-a3b-instruct",
918 "name": "Qwen3 Coder 30b a3b Instruct",
919 "cost_per_1m_in": 0.053244,
920 "cost_per_1m_out": 0.22185,
921 "cost_per_1m_in_cached": 0,
922 "cost_per_1m_out_cached": 0,
923 "context_window": 262000,
924 "default_max_tokens": 26200,
925 "can_reason": true,
926 "reasoning_levels": [
927 "low",
928 "medium",
929 "high"
930 ],
931 "default_reasoning_effort": "medium",
932 "supports_attachments": false
933 },
934 {
935 "id": "gpt-4.1",
936 "name": "GPT 4.1",
937 "cost_per_1m_in": 1.968,
938 "cost_per_1m_out": 7.872,
939 "cost_per_1m_in_cached": 0,
940 "cost_per_1m_out_cached": 0,
941 "context_window": 1047576,
942 "default_max_tokens": 104757,
943 "can_reason": true,
944 "reasoning_levels": [
945 "low",
946 "medium",
947 "high"
948 ],
949 "default_reasoning_effort": "medium",
950 "supports_attachments": true
951 },
952 {
953 "id": "gpt-4.1-mini",
954 "name": "GPT 4.1 mini",
955 "cost_per_1m_in": 0.39,
956 "cost_per_1m_out": 1.53,
957 "cost_per_1m_in_cached": 0,
958 "cost_per_1m_out_cached": 0,
959 "context_window": 1047576,
960 "default_max_tokens": 104757,
961 "can_reason": true,
962 "reasoning_levels": [
963 "low",
964 "medium",
965 "high"
966 ],
967 "default_reasoning_effort": "medium",
968 "supports_attachments": true
969 },
970 {
971 "id": "gpt-4.1-nano",
972 "name": "GPT 4.1 nano",
973 "cost_per_1m_in": 0.1,
974 "cost_per_1m_out": 0.39,
975 "cost_per_1m_in_cached": 0,
976 "cost_per_1m_out_cached": 0,
977 "context_window": 1047576,
978 "default_max_tokens": 104757,
979 "can_reason": true,
980 "reasoning_levels": [
981 "low",
982 "medium",
983 "high"
984 ],
985 "default_reasoning_effort": "medium",
986 "supports_attachments": true
987 },
988 {
989 "id": "nova-micro-v1",
990 "name": "Nova Micro 1.0",
991 "cost_per_1m_in": 0.036,
992 "cost_per_1m_out": 0.143,
993 "cost_per_1m_in_cached": 0,
994 "cost_per_1m_out_cached": 0,
995 "context_window": 128000,
996 "default_max_tokens": 12800,
997 "can_reason": true,
998 "reasoning_levels": [
999 "low",
1000 "medium",
1001 "high"
1002 ],
1003 "default_reasoning_effort": "medium",
1004 "supports_attachments": true
1005 },
1006 {
1007 "id": "nova-lite-v1",
1008 "name": "Nova Lite 1.0",
1009 "cost_per_1m_in": 0.062,
1010 "cost_per_1m_out": 0.247,
1011 "cost_per_1m_in_cached": 0,
1012 "cost_per_1m_out_cached": 0,
1013 "context_window": 300000,
1014 "default_max_tokens": 30000,
1015 "can_reason": true,
1016 "reasoning_levels": [
1017 "low",
1018 "medium",
1019 "high"
1020 ],
1021 "default_reasoning_effort": "medium",
1022 "supports_attachments": true
1023 },
1024 {
1025 "id": "nova-pro-v1",
1026 "name": "Nova Pro 1.0",
1027 "cost_per_1m_in": 0.824,
1028 "cost_per_1m_out": 3.295,
1029 "cost_per_1m_in_cached": 0,
1030 "cost_per_1m_out_cached": 0,
1031 "context_window": 300000,
1032 "default_max_tokens": 30000,
1033 "can_reason": true,
1034 "reasoning_levels": [
1035 "low",
1036 "medium",
1037 "high"
1038 ],
1039 "default_reasoning_effort": "medium",
1040 "supports_attachments": true
1041 },
1042 {
1043 "id": "claude-sonnet-4",
1044 "name": "Claude Sonnet 4",
1045 "cost_per_1m_in": 2.601,
1046 "cost_per_1m_out": 13.01,
1047 "cost_per_1m_in_cached": 0,
1048 "cost_per_1m_out_cached": 0,
1049 "context_window": 200000,
1050 "default_max_tokens": 20000,
1051 "can_reason": true,
1052 "reasoning_levels": [
1053 "low",
1054 "medium",
1055 "high"
1056 ],
1057 "default_reasoning_effort": "medium",
1058 "supports_attachments": true
1059 },
1060 {
1061 "id": "llama-3.1-nemotron-ultra-253b-v1",
1062 "name": "Llama 3.1 Nemotron Ultra 253B v1",
1063 "cost_per_1m_in": 0.537,
1064 "cost_per_1m_out": 1.61,
1065 "cost_per_1m_in_cached": 0,
1066 "cost_per_1m_out_cached": 0,
1067 "context_window": 128000,
1068 "default_max_tokens": 12800,
1069 "can_reason": true,
1070 "reasoning_levels": [
1071 "low",
1072 "medium",
1073 "high"
1074 ],
1075 "default_reasoning_effort": "medium",
1076 "supports_attachments": false
1077 },
1078 {
1079 "id": "llama-4-maverick",
1080 "name": "Llama 4 Maverick",
1081 "cost_per_1m_in": 0.124236,
1082 "cost_per_1m_out": 0.602832,
1083 "cost_per_1m_in_cached": 0,
1084 "cost_per_1m_out_cached": 0,
1085 "context_window": 1050000,
1086 "default_max_tokens": 105000,
1087 "can_reason": false,
1088 "supports_attachments": false
1089 },
1090 {
1091 "id": "deepseek-v3-0324",
1092 "name": "DeepSeek V3 0324",
1093 "cost_per_1m_in": 0.26622,
1094 "cost_per_1m_out": 0.8874,
1095 "cost_per_1m_in_cached": 0,
1096 "cost_per_1m_out_cached": 0,
1097 "context_window": 163840,
1098 "default_max_tokens": 16384,
1099 "can_reason": true,
1100 "reasoning_levels": [
1101 "low",
1102 "medium",
1103 "high"
1104 ],
1105 "default_reasoning_effort": "medium",
1106 "supports_attachments": false
1107 },
1108 {
1109 "id": "mistral-small-2503",
1110 "name": "Mistral Small 2503",
1111 "cost_per_1m_in": 0.1,
1112 "cost_per_1m_out": 0.3,
1113 "cost_per_1m_in_cached": 0,
1114 "cost_per_1m_out_cached": 0,
1115 "context_window": 128000,
1116 "default_max_tokens": 12800,
1117 "can_reason": false,
1118 "supports_attachments": true
1119 },
1120 {
1121 "id": "mistral-small-2506",
1122 "name": "Mistral Small 2506",
1123 "cost_per_1m_in": 0.1,
1124 "cost_per_1m_out": 0.3,
1125 "cost_per_1m_in_cached": 0,
1126 "cost_per_1m_out_cached": 0,
1127 "context_window": 131072,
1128 "default_max_tokens": 13107,
1129 "can_reason": false,
1130 "supports_attachments": true
1131 },
1132 {
1133 "id": "gemini-2.0-flash-001",
1134 "name": "Gemini 2.0 Flash",
1135 "cost_per_1m_in": 0.13416,
1136 "cost_per_1m_out": 0.53664,
1137 "cost_per_1m_in_cached": 0,
1138 "cost_per_1m_out_cached": 0,
1139 "context_window": 1048576,
1140 "default_max_tokens": 104857,
1141 "can_reason": false,
1142 "supports_attachments": true
1143 },
1144 {
1145 "id": "gemini-2.0-flash-lite-001",
1146 "name": "Gemini 2.0 Flash Lite",
1147 "cost_per_1m_in": 0.06708,
1148 "cost_per_1m_out": 0.26832,
1149 "cost_per_1m_in_cached": 0,
1150 "cost_per_1m_out_cached": 0,
1151 "context_window": 1048576,
1152 "default_max_tokens": 104857,
1153 "can_reason": false,
1154 "supports_attachments": true
1155 },
1156 {
1157 "id": "gemini-2.5-flash",
1158 "name": "Gemini 2.5 Flash",
1159 "cost_per_1m_in": 0.26832,
1160 "cost_per_1m_out": 2.236,
1161 "cost_per_1m_in_cached": 0,
1162 "cost_per_1m_out_cached": 0,
1163 "context_window": 1048576,
1164 "default_max_tokens": 104857,
1165 "can_reason": true,
1166 "reasoning_levels": [
1167 "low",
1168 "medium",
1169 "high"
1170 ],
1171 "default_reasoning_effort": "medium",
1172 "supports_attachments": true
1173 },
1174 {
1175 "id": "gemini-2.5-pro",
1176 "name": "Gemini 2.5 Pro",
1177 "cost_per_1m_in": 1.3416,
1178 "cost_per_1m_out": 8.944,
1179 "cost_per_1m_in_cached": 0,
1180 "cost_per_1m_out_cached": 0,
1181 "context_window": 1048576,
1182 "default_max_tokens": 104857,
1183 "can_reason": true,
1184 "reasoning_levels": [
1185 "low",
1186 "medium",
1187 "high"
1188 ],
1189 "default_reasoning_effort": "medium",
1190 "supports_attachments": true
1191 },
1192 {
1193 "id": "gemma-3-27b-it",
1194 "name": "Gemma 3 27b it",
1195 "cost_per_1m_in": 0.089,
1196 "cost_per_1m_out": 0.268,
1197 "cost_per_1m_in_cached": 0,
1198 "cost_per_1m_out_cached": 0,
1199 "context_window": 131000,
1200 "default_max_tokens": 13100,
1201 "can_reason": true,
1202 "reasoning_levels": [
1203 "low",
1204 "medium",
1205 "high"
1206 ],
1207 "default_reasoning_effort": "medium",
1208 "supports_attachments": true
1209 },
1210 {
1211 "id": "deepseek-r1-0528",
1212 "name": "DeepSeek R1 0528",
1213 "cost_per_1m_in": 0.585084,
1214 "cost_per_1m_out": 2.30724,
1215 "cost_per_1m_in_cached": 0,
1216 "cost_per_1m_out_cached": 0,
1217 "context_window": 164000,
1218 "default_max_tokens": 16400,
1219 "can_reason": true,
1220 "reasoning_levels": [
1221 "low",
1222 "medium",
1223 "high"
1224 ],
1225 "default_reasoning_effort": "medium",
1226 "supports_attachments": false
1227 },
1228 {
1229 "id": "codestral-2508",
1230 "name": "Codestral 25.08",
1231 "cost_per_1m_in": 0.3,
1232 "cost_per_1m_out": 0.9,
1233 "cost_per_1m_in_cached": 0,
1234 "cost_per_1m_out_cached": 0,
1235 "context_window": 256000,
1236 "default_max_tokens": 25600,
1237 "can_reason": false,
1238 "supports_attachments": false
1239 },
1240 {
1241 "id": "llama-3.3-70b-instruct",
1242 "name": "Llama 3.3 70B Instruct",
1243 "cost_per_1m_in": 0.08874,
1244 "cost_per_1m_out": 0.274994,
1245 "cost_per_1m_in_cached": 0,
1246 "cost_per_1m_out_cached": 0,
1247 "context_window": 131000,
1248 "default_max_tokens": 13100,
1249 "can_reason": true,
1250 "reasoning_levels": [
1251 "low",
1252 "medium",
1253 "high"
1254 ],
1255 "default_reasoning_effort": "medium",
1256 "supports_attachments": false
1257 },
1258 {
1259 "id": "gpt-4o",
1260 "name": "GPT 4o",
1261 "cost_per_1m_in": 2.38664,
1262 "cost_per_1m_out": 9.5466,
1263 "cost_per_1m_in_cached": 0,
1264 "cost_per_1m_out_cached": 0,
1265 "context_window": 128000,
1266 "default_max_tokens": 12800,
1267 "can_reason": true,
1268 "reasoning_levels": [
1269 "low",
1270 "medium",
1271 "high"
1272 ],
1273 "default_reasoning_effort": "medium",
1274 "supports_attachments": true
1275 },
1276 {
1277 "id": "gpt-5-mini",
1278 "name": "GPT 5 mini",
1279 "cost_per_1m_in": 0.25,
1280 "cost_per_1m_out": 1.968,
1281 "cost_per_1m_in_cached": 0,
1282 "cost_per_1m_out_cached": 0,
1283 "context_window": 400000,
1284 "default_max_tokens": 40000,
1285 "can_reason": true,
1286 "reasoning_levels": [
1287 "low",
1288 "medium",
1289 "high"
1290 ],
1291 "default_reasoning_effort": "medium",
1292 "supports_attachments": true
1293 },
1294 {
1295 "id": "gpt-5-nano",
1296 "name": "GPT 5 nano",
1297 "cost_per_1m_in": 0.054,
1298 "cost_per_1m_out": 0.394,
1299 "cost_per_1m_in_cached": 0,
1300 "cost_per_1m_out_cached": 0,
1301 "context_window": 400000,
1302 "default_max_tokens": 40000,
1303 "can_reason": true,
1304 "reasoning_levels": [
1305 "low",
1306 "medium",
1307 "high"
1308 ],
1309 "default_reasoning_effort": "medium",
1310 "supports_attachments": true
1311 },
1312 {
1313 "id": "mistral-large-2411",
1314 "name": "Mistral Large 2411",
1315 "cost_per_1m_in": 1.8,
1316 "cost_per_1m_out": 5.4,
1317 "cost_per_1m_in_cached": 0,
1318 "cost_per_1m_out_cached": 0,
1319 "context_window": 131072,
1320 "default_max_tokens": 13107,
1321 "can_reason": true,
1322 "reasoning_levels": [
1323 "low",
1324 "medium",
1325 "high"
1326 ],
1327 "default_reasoning_effort": "medium",
1328 "supports_attachments": false
1329 },
1330 {
1331 "id": "hermes-4-405b",
1332 "name": "Hermes 4 405B",
1333 "cost_per_1m_in": 0.894,
1334 "cost_per_1m_out": 2.683,
1335 "cost_per_1m_in_cached": 0,
1336 "cost_per_1m_out_cached": 0,
1337 "context_window": 128000,
1338 "default_max_tokens": 12800,
1339 "can_reason": false,
1340 "supports_attachments": false
1341 },
1342 {
1343 "id": "mistral-nemo-instruct-2407",
1344 "name": "Mistral Nemo 2407",
1345 "cost_per_1m_in": 0.13,
1346 "cost_per_1m_out": 0.13,
1347 "cost_per_1m_in_cached": 0,
1348 "cost_per_1m_out_cached": 0,
1349 "context_window": 131072,
1350 "default_max_tokens": 13107,
1351 "can_reason": false,
1352 "supports_attachments": false
1353 },
1354 {
1355 "id": "devstral-medium-2507",
1356 "name": "Devstral Medium 2507",
1357 "cost_per_1m_in": 0.4,
1358 "cost_per_1m_out": 2,
1359 "cost_per_1m_in_cached": 0,
1360 "cost_per_1m_out_cached": 0,
1361 "context_window": 131072,
1362 "default_max_tokens": 13107,
1363 "can_reason": false,
1364 "supports_attachments": false
1365 },
1366 {
1367 "id": "devstral-small-2507",
1368 "name": "Devstral Small 2507",
1369 "cost_per_1m_in": 0.1,
1370 "cost_per_1m_out": 0.3,
1371 "cost_per_1m_in_cached": 0,
1372 "cost_per_1m_out_cached": 0,
1373 "context_window": 131072,
1374 "default_max_tokens": 13107,
1375 "can_reason": false,
1376 "supports_attachments": false
1377 },
1378 {
1379 "id": "mistral-medium-2508",
1380 "name": "Mistral Medium 2508",
1381 "cost_per_1m_in": 0.4,
1382 "cost_per_1m_out": 2,
1383 "cost_per_1m_in_cached": 0,
1384 "cost_per_1m_out_cached": 0,
1385 "context_window": 128000,
1386 "default_max_tokens": 12800,
1387 "can_reason": true,
1388 "reasoning_levels": [
1389 "low",
1390 "medium",
1391 "high"
1392 ],
1393 "default_reasoning_effort": "medium",
1394 "supports_attachments": true
1395 },
1396 {
1397 "id": "llama-3.1-405b-instruct",
1398 "name": "Llama 3.1 405B Instruct",
1399 "cost_per_1m_in": 1.75,
1400 "cost_per_1m_out": 1.75,
1401 "cost_per_1m_in_cached": 0,
1402 "cost_per_1m_out_cached": 0,
1403 "context_window": 128000,
1404 "default_max_tokens": 12800,
1405 "can_reason": true,
1406 "reasoning_levels": [
1407 "low",
1408 "medium",
1409 "high"
1410 ],
1411 "default_reasoning_effort": "medium",
1412 "supports_attachments": false
1413 },
1414 {
1415 "id": "gpt-4o-mini",
1416 "name": "GPT 4o mini",
1417 "cost_per_1m_in": 0.1432,
1418 "cost_per_1m_out": 0.5728,
1419 "cost_per_1m_in_cached": 0,
1420 "cost_per_1m_out_cached": 0,
1421 "context_window": 128000,
1422 "default_max_tokens": 12800,
1423 "can_reason": true,
1424 "reasoning_levels": [
1425 "low",
1426 "medium",
1427 "high"
1428 ],
1429 "default_reasoning_effort": "medium",
1430 "supports_attachments": true
1431 },
1432 {
1433 "id": "llama-3.1-8b-instruct",
1434 "name": "Llama 3.1 8B Instruct",
1435 "cost_per_1m_in": 0.018,
1436 "cost_per_1m_out": 0.054,
1437 "cost_per_1m_in_cached": 0,
1438 "cost_per_1m_out_cached": 0,
1439 "context_window": 128000,
1440 "default_max_tokens": 12800,
1441 "can_reason": true,
1442 "reasoning_levels": [
1443 "low",
1444 "medium",
1445 "high"
1446 ],
1447 "default_reasoning_effort": "medium",
1448 "supports_attachments": false
1449 }
1450 ]
1451}