1{
2 "name": "Cortecs",
3 "id": "cortecs",
4 "api_key": "$CORTECS_API_KEY",
5 "api_endpoint": "https://api.cortecs.ai/v1",
6 "type": "openai",
7 "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
8 "default_small_model_id": "glm-4.7-flash",
9 "models": [
10 {
11 "id": "gpt-5.4",
12 "name": "GPT 5.4",
13 "cost_per_1m_in": 2.601,
14 "cost_per_1m_out": 13.872,
15 "cost_per_1m_in_cached": 0,
16 "cost_per_1m_out_cached": 0,
17 "context_window": 1050000,
18 "default_max_tokens": 105000,
19 "can_reason": true,
20 "reasoning_levels": [
21 "low",
22 "medium",
23 "high"
24 ],
25 "default_reasoning_effort": "medium",
26 "supports_attachments": true
27 },
28 {
29 "id": "kimi-k2.6",
30 "name": "Kimi K2.6",
31 "cost_per_1m_in": 0.6936,
32 "cost_per_1m_out": 3.0345,
33 "cost_per_1m_in_cached": 0,
34 "cost_per_1m_out_cached": 0,
35 "context_window": 256000,
36 "default_max_tokens": 25600,
37 "can_reason": true,
38 "reasoning_levels": [
39 "low",
40 "medium",
41 "high"
42 ],
43 "default_reasoning_effort": "medium",
44 "supports_attachments": true
45 },
46 {
47 "id": "claude-opus4-7",
48 "name": "Claude Opus 4.7",
49 "cost_per_1m_in": 4.7685,
50 "cost_per_1m_out": 23.8425,
51 "cost_per_1m_in_cached": 0,
52 "cost_per_1m_out_cached": 0,
53 "context_window": 1000000,
54 "default_max_tokens": 100000,
55 "can_reason": true,
56 "reasoning_levels": [
57 "low",
58 "medium",
59 "high"
60 ],
61 "default_reasoning_effort": "medium",
62 "supports_attachments": true
63 },
64 {
65 "id": "minimax-m2.7",
66 "name": "MiniMax M2.7",
67 "cost_per_1m_in": 0.26622,
68 "cost_per_1m_out": 1.06488,
69 "cost_per_1m_in_cached": 0,
70 "cost_per_1m_out_cached": 0,
71 "context_window": 196608,
72 "default_max_tokens": 19660,
73 "can_reason": true,
74 "reasoning_levels": [
75 "low",
76 "medium",
77 "high"
78 ],
79 "default_reasoning_effort": "medium",
80 "supports_attachments": false
81 },
82 {
83 "id": "glm-5.1",
84 "name": "GLM 5.1",
85 "cost_per_1m_in": 1.24236,
86 "cost_per_1m_out": 3.90336,
87 "cost_per_1m_in_cached": 0,
88 "cost_per_1m_out_cached": 0,
89 "context_window": 202752,
90 "default_max_tokens": 20275,
91 "can_reason": true,
92 "reasoning_levels": [
93 "low",
94 "medium",
95 "high"
96 ],
97 "default_reasoning_effort": "medium",
98 "supports_attachments": false
99 },
100 {
101 "id": "qwen3.5-122b-a10b",
102 "name": "Qwen3.5 122B A10B",
103 "cost_per_1m_in": 0.4437,
104 "cost_per_1m_out": 3.1059,
105 "cost_per_1m_in_cached": 0,
106 "cost_per_1m_out_cached": 0,
107 "context_window": 262144,
108 "default_max_tokens": 26214,
109 "can_reason": true,
110 "reasoning_levels": [
111 "low",
112 "medium",
113 "high"
114 ],
115 "default_reasoning_effort": "medium",
116 "supports_attachments": false
117 },
118 {
119 "id": "qwen3.5-9b",
120 "name": "Qwen3.5 9B",
121 "cost_per_1m_in": 0.1,
122 "cost_per_1m_out": 0.15,
123 "cost_per_1m_in_cached": 0,
124 "cost_per_1m_out_cached": 0,
125 "context_window": 262000,
126 "default_max_tokens": 26200,
127 "can_reason": true,
128 "reasoning_levels": [
129 "low",
130 "medium",
131 "high"
132 ],
133 "default_reasoning_effort": "medium",
134 "supports_attachments": false
135 },
136 {
137 "id": "nemotron-3-super-120b-a12b",
138 "name": "Nemotron 3 Super 120B A12B",
139 "cost_per_1m_in": 0.15606,
140 "cost_per_1m_out": 0.67626,
141 "cost_per_1m_in_cached": 0,
142 "cost_per_1m_out_cached": 0,
143 "context_window": 262000,
144 "default_max_tokens": 26214,
145 "can_reason": true,
146 "reasoning_levels": [
147 "low",
148 "medium",
149 "high"
150 ],
151 "default_reasoning_effort": "medium",
152 "supports_attachments": false
153 },
154 {
155 "id": "qwen3-coder-next",
156 "name": "Qwen3 Coder Next",
157 "cost_per_1m_in": 0.15,
158 "cost_per_1m_out": 0.8,
159 "cost_per_1m_in_cached": 0,
160 "cost_per_1m_out_cached": 0,
161 "context_window": 256000,
162 "default_max_tokens": 25600,
163 "can_reason": true,
164 "reasoning_levels": [
165 "low",
166 "medium",
167 "high"
168 ],
169 "default_reasoning_effort": "medium",
170 "supports_attachments": false
171 },
172 {
173 "id": "glm-5",
174 "name": "GLM 5",
175 "cost_per_1m_in": 0.8874,
176 "cost_per_1m_out": 2.83968,
177 "cost_per_1m_in_cached": 0,
178 "cost_per_1m_out_cached": 0,
179 "context_window": 202752,
180 "default_max_tokens": 20275,
181 "can_reason": true,
182 "reasoning_levels": [
183 "low",
184 "medium",
185 "high"
186 ],
187 "default_reasoning_effort": "medium",
188 "supports_attachments": false
189 },
190 {
191 "id": "glm-4.6",
192 "name": "GLM 4.6",
193 "cost_per_1m_in": 0.35496,
194 "cost_per_1m_out": 1.55295,
195 "cost_per_1m_in_cached": 0,
196 "cost_per_1m_out_cached": 0,
197 "context_window": 203000,
198 "default_max_tokens": 20300,
199 "can_reason": true,
200 "reasoning_levels": [
201 "low",
202 "medium",
203 "high"
204 ],
205 "default_reasoning_effort": "medium",
206 "supports_attachments": false
207 },
208 {
209 "id": "deepseek-chat-v3.1",
210 "name": "DeepSeek Chat V3.1",
211 "cost_per_1m_in": 0.17748,
212 "cost_per_1m_out": 0.70992,
213 "cost_per_1m_in_cached": 0,
214 "cost_per_1m_out_cached": 0,
215 "context_window": 164000,
216 "default_max_tokens": 16400,
217 "can_reason": true,
218 "reasoning_levels": [
219 "low",
220 "medium",
221 "high"
222 ],
223 "default_reasoning_effort": "medium",
224 "supports_attachments": false
225 },
226 {
227 "id": "qwen-2.5-72b-instruct",
228 "name": "Qwen2.5 72B Instruct",
229 "cost_per_1m_in": 0.062118,
230 "cost_per_1m_out": 0.230724,
231 "cost_per_1m_in_cached": 0,
232 "cost_per_1m_out_cached": 0,
233 "context_window": 33000,
234 "default_max_tokens": 3300,
235 "can_reason": false,
236 "supports_attachments": false
237 },
238 {
239 "id": "qwen3.5-397b-a17b",
240 "name": "Qwen3.5 397B A17B ",
241 "cost_per_1m_in": 0.53244,
242 "cost_per_1m_out": 3.19464,
243 "cost_per_1m_in_cached": 0,
244 "cost_per_1m_out_cached": 0,
245 "context_window": 262000,
246 "default_max_tokens": 25000,
247 "can_reason": true,
248 "reasoning_levels": [
249 "low",
250 "medium",
251 "high"
252 ],
253 "default_reasoning_effort": "medium",
254 "supports_attachments": false
255 },
256 {
257 "id": "deepseek-v3.2",
258 "name": "DeepSeek V3.2",
259 "cost_per_1m_in": 0.26622,
260 "cost_per_1m_out": 0.4437,
261 "cost_per_1m_in_cached": 0,
262 "cost_per_1m_out_cached": 0,
263 "context_window": 163840,
264 "default_max_tokens": 16384,
265 "can_reason": true,
266 "reasoning_levels": [
267 "low",
268 "medium",
269 "high"
270 ],
271 "default_reasoning_effort": "medium",
272 "supports_attachments": false
273 },
274 {
275 "id": "mistral-small-2603",
276 "name": "Mistral Small 4 2603",
277 "cost_per_1m_in": 0.1275,
278 "cost_per_1m_out": 0.51,
279 "cost_per_1m_in_cached": 0,
280 "cost_per_1m_out_cached": 0,
281 "context_window": 256000,
282 "default_max_tokens": 25600,
283 "can_reason": true,
284 "reasoning_levels": [
285 "low",
286 "medium",
287 "high"
288 ],
289 "default_reasoning_effort": "medium",
290 "supports_attachments": true
291 },
292 {
293 "id": "minimax-m2.5",
294 "name": "MiniMax M2.5",
295 "cost_per_1m_in": 0.26622,
296 "cost_per_1m_out": 0.97614,
297 "cost_per_1m_in_cached": 0,
298 "cost_per_1m_out_cached": 0,
299 "context_window": 196608,
300 "default_max_tokens": 19660,
301 "can_reason": true,
302 "reasoning_levels": [
303 "low",
304 "medium",
305 "high"
306 ],
307 "default_reasoning_effort": "medium",
308 "supports_attachments": false
309 },
310 {
311 "id": "claude-4-6-sonnet",
312 "name": "Claude Sonnet 4.6",
313 "cost_per_1m_in": 2.8691,
314 "cost_per_1m_out": 14.3095,
315 "cost_per_1m_in_cached": 0,
316 "cost_per_1m_out_cached": 0,
317 "context_window": 1000000,
318 "default_max_tokens": 100000,
319 "can_reason": true,
320 "reasoning_levels": [
321 "low",
322 "medium",
323 "high"
324 ],
325 "default_reasoning_effort": "medium",
326 "supports_attachments": true
327 },
328 {
329 "id": "glm-4.7-flash",
330 "name": "GLM 4.7 Flash",
331 "cost_per_1m_in": 0.0716,
332 "cost_per_1m_out": 0.4293,
333 "cost_per_1m_in_cached": 0,
334 "cost_per_1m_out_cached": 0,
335 "context_window": 203000,
336 "default_max_tokens": 20300,
337 "can_reason": false,
338 "supports_attachments": false
339 },
340 {
341 "id": "kimi-k2.5",
342 "name": "Kimi K2.5",
343 "cost_per_1m_in": 0.4437,
344 "cost_per_1m_out": 2.12976,
345 "cost_per_1m_in_cached": 0,
346 "cost_per_1m_out_cached": 0,
347 "context_window": 256000,
348 "default_max_tokens": 25600,
349 "can_reason": true,
350 "reasoning_levels": [
351 "low",
352 "medium",
353 "high"
354 ],
355 "default_reasoning_effort": "medium",
356 "supports_attachments": true
357 },
358 {
359 "id": "claude-opus4-6",
360 "name": "Claude Opus 4.6",
361 "cost_per_1m_in": 4.7685,
362 "cost_per_1m_out": 23.8425,
363 "cost_per_1m_in_cached": 0,
364 "cost_per_1m_out_cached": 0,
365 "context_window": 1000000,
366 "default_max_tokens": 100000,
367 "can_reason": true,
368 "reasoning_levels": [
369 "low",
370 "medium",
371 "high"
372 ],
373 "default_reasoning_effort": "medium",
374 "supports_attachments": true
375 },
376 {
377 "id": "minimax-m2",
378 "name": "MiniMax M2",
379 "cost_per_1m_in": 0.22185,
380 "cost_per_1m_out": 0.8874,
381 "cost_per_1m_in_cached": 0,
382 "cost_per_1m_out_cached": 0,
383 "context_window": 196608,
384 "default_max_tokens": 19660,
385 "can_reason": true,
386 "reasoning_levels": [
387 "low",
388 "medium",
389 "high"
390 ],
391 "default_reasoning_effort": "medium",
392 "supports_attachments": false
393 },
394 {
395 "id": "glm-4.7",
396 "name": "GLM 4.7",
397 "cost_per_1m_in": 0.53244,
398 "cost_per_1m_out": 1.95228,
399 "cost_per_1m_in_cached": 0,
400 "cost_per_1m_out_cached": 0,
401 "context_window": 202752,
402 "default_max_tokens": 20275,
403 "can_reason": true,
404 "reasoning_levels": [
405 "low",
406 "medium",
407 "high"
408 ],
409 "default_reasoning_effort": "medium",
410 "supports_attachments": false
411 },
412 {
413 "id": "minimax-m2.1",
414 "name": "MiniMax M2.1",
415 "cost_per_1m_in": 0.322,
416 "cost_per_1m_out": 1.2879,
417 "cost_per_1m_in_cached": 0,
418 "cost_per_1m_out_cached": 0,
419 "context_window": 196000,
420 "default_max_tokens": 19600,
421 "can_reason": true,
422 "reasoning_levels": [
423 "low",
424 "medium",
425 "high"
426 ],
427 "default_reasoning_effort": "medium",
428 "supports_attachments": false
429 },
430 {
431 "id": "qwen3-vl-235b-a22b",
432 "name": "Qwen3 VL 235B A22B",
433 "cost_per_1m_in": 0.186354,
434 "cost_per_1m_out": 1.68606,
435 "cost_per_1m_in_cached": 0,
436 "cost_per_1m_out_cached": 0,
437 "context_window": 131000,
438 "default_max_tokens": 13100,
439 "can_reason": true,
440 "reasoning_levels": [
441 "low",
442 "medium",
443 "high"
444 ],
445 "default_reasoning_effort": "medium",
446 "supports_attachments": true
447 },
448 {
449 "id": "mistral-small-creative",
450 "name": "Mistral Small Creative",
451 "cost_per_1m_in": 0.1,
452 "cost_per_1m_out": 0.3,
453 "cost_per_1m_in_cached": 0,
454 "cost_per_1m_out_cached": 0,
455 "context_window": 32000,
456 "default_max_tokens": 3200,
457 "can_reason": false,
458 "supports_attachments": false
459 },
460 {
461 "id": "nvidia-nemotron-3-nano-30b-a3b",
462 "name": "Nemotron 3 Nano 30B A3B",
463 "cost_per_1m_in": 0.0537,
464 "cost_per_1m_out": 0.215,
465 "cost_per_1m_in_cached": 0,
466 "cost_per_1m_out_cached": 0,
467 "context_window": 128000,
468 "default_max_tokens": 12800,
469 "can_reason": true,
470 "reasoning_levels": [
471 "low",
472 "medium",
473 "high"
474 ],
475 "default_reasoning_effort": "medium",
476 "supports_attachments": false
477 },
478 {
479 "id": "claude-opus4-5",
480 "name": "Claude Opus 4.5",
481 "cost_per_1m_in": 4.7695,
482 "cost_per_1m_out": 23.8485,
483 "cost_per_1m_in_cached": 0,
484 "cost_per_1m_out_cached": 0,
485 "context_window": 200000,
486 "default_max_tokens": 20000,
487 "can_reason": true,
488 "reasoning_levels": [
489 "low",
490 "medium",
491 "high"
492 ],
493 "default_reasoning_effort": "medium",
494 "supports_attachments": true
495 },
496 {
497 "id": "qwen3-next-80b-a3b-thinking",
498 "name": "Qwen3 Next 80B A3B Thinking",
499 "cost_per_1m_in": 0.13311,
500 "cost_per_1m_out": 1.06488,
501 "cost_per_1m_in_cached": 0,
502 "cost_per_1m_out_cached": 0,
503 "context_window": 262000,
504 "default_max_tokens": 12800,
505 "can_reason": true,
506 "reasoning_levels": [
507 "low",
508 "medium",
509 "high"
510 ],
511 "default_reasoning_effort": "medium",
512 "supports_attachments": false
513 },
514 {
515 "id": "holo2-30b-a3b",
516 "name": "Holo2 30B A3B",
517 "cost_per_1m_in": 0.3,
518 "cost_per_1m_out": 0.7,
519 "cost_per_1m_in_cached": 0,
520 "cost_per_1m_out_cached": 0,
521 "context_window": 22000,
522 "default_max_tokens": 2200,
523 "can_reason": true,
524 "reasoning_levels": [
525 "low",
526 "medium",
527 "high"
528 ],
529 "default_reasoning_effort": "medium",
530 "supports_attachments": true
531 },
532 {
533 "id": "devstral-2512",
534 "name": "Devstral 2 2512",
535 "cost_per_1m_in": 0.4,
536 "cost_per_1m_out": 2,
537 "cost_per_1m_in_cached": 0,
538 "cost_per_1m_out_cached": 0,
539 "context_window": 262000,
540 "default_max_tokens": 26200,
541 "can_reason": false,
542 "supports_attachments": false
543 },
544 {
545 "id": "nova-2-lite",
546 "name": "Nova 2 Lite",
547 "cost_per_1m_in": 0.335,
548 "cost_per_1m_out": 2.822,
549 "cost_per_1m_in_cached": 0,
550 "cost_per_1m_out_cached": 0,
551 "context_window": 1000000,
552 "default_max_tokens": 100000,
553 "can_reason": true,
554 "reasoning_levels": [
555 "low",
556 "medium",
557 "high"
558 ],
559 "default_reasoning_effort": "medium",
560 "supports_attachments": true
561 },
562 {
563 "id": "gpt-oss-safeguard-120b",
564 "name": "GPT OSS Safeguard 120B",
565 "cost_per_1m_in": 0.161,
566 "cost_per_1m_out": 0.626,
567 "cost_per_1m_in_cached": 0,
568 "cost_per_1m_out_cached": 0,
569 "context_window": 128000,
570 "default_max_tokens": 12800,
571 "can_reason": true,
572 "reasoning_levels": [
573 "low",
574 "medium",
575 "high"
576 ],
577 "default_reasoning_effort": "medium",
578 "supports_attachments": false
579 },
580 {
581 "id": "mistral-large-2512",
582 "name": "Mistral Large 3 2512",
583 "cost_per_1m_in": 0.5,
584 "cost_per_1m_out": 1.5,
585 "cost_per_1m_in_cached": 0,
586 "cost_per_1m_out_cached": 0,
587 "context_window": 256000,
588 "default_max_tokens": 25600,
589 "can_reason": false,
590 "supports_attachments": true
591 },
592 {
593 "id": "ministral-8b-2512",
594 "name": "Ministral 3 8b 2512",
595 "cost_per_1m_in": 0.15,
596 "cost_per_1m_out": 0.15,
597 "cost_per_1m_in_cached": 0,
598 "cost_per_1m_out_cached": 0,
599 "context_window": 256000,
600 "default_max_tokens": 25600,
601 "can_reason": false,
602 "supports_attachments": true
603 },
604 {
605 "id": "ministral-3b-2512",
606 "name": "Ministral 3 3b 2512",
607 "cost_per_1m_in": 0.1,
608 "cost_per_1m_out": 0.1,
609 "cost_per_1m_in_cached": 0,
610 "cost_per_1m_out_cached": 0,
611 "context_window": 256000,
612 "default_max_tokens": 25600,
613 "can_reason": false,
614 "supports_attachments": true
615 },
616 {
617 "id": "ministral-14b-2512",
618 "name": "Ministral 3 14b 2512",
619 "cost_per_1m_in": 0.2,
620 "cost_per_1m_out": 0.2,
621 "cost_per_1m_in_cached": 0,
622 "cost_per_1m_out_cached": 0,
623 "context_window": 256000,
624 "default_max_tokens": 25600,
625 "can_reason": false,
626 "supports_attachments": true
627 },
628 {
629 "id": "intellect-3",
630 "name": "INTELLECT-3",
631 "cost_per_1m_in": 0.179,
632 "cost_per_1m_out": 0.984,
633 "cost_per_1m_in_cached": 0,
634 "cost_per_1m_out_cached": 0,
635 "context_window": 128000,
636 "default_max_tokens": 12800,
637 "can_reason": true,
638 "reasoning_levels": [
639 "low",
640 "medium",
641 "high"
642 ],
643 "default_reasoning_effort": "medium",
644 "supports_attachments": false
645 },
646 {
647 "id": "gpt-5.1",
648 "name": "GPT 5.1",
649 "cost_per_1m_in": 1.234,
650 "cost_per_1m_out": 9.838,
651 "cost_per_1m_in_cached": 0,
652 "cost_per_1m_out_cached": 0,
653 "context_window": 400000,
654 "default_max_tokens": 40000,
655 "can_reason": true,
656 "reasoning_levels": [
657 "low",
658 "medium",
659 "high"
660 ],
661 "default_reasoning_effort": "medium",
662 "supports_attachments": true
663 },
664 {
665 "id": "nemotron-nano-v2-12b",
666 "name": "Nemotron Nano V2 12b",
667 "cost_per_1m_in": 0.215,
668 "cost_per_1m_out": 0.635,
669 "cost_per_1m_in_cached": 0,
670 "cost_per_1m_out_cached": 0,
671 "context_window": 128000,
672 "default_max_tokens": 12800,
673 "can_reason": true,
674 "reasoning_levels": [
675 "low",
676 "medium",
677 "high"
678 ],
679 "default_reasoning_effort": "medium",
680 "supports_attachments": true
681 },
682 {
683 "id": "claude-haiku-4-5",
684 "name": "Claude Haiku 4.5",
685 "cost_per_1m_in": 0.894,
686 "cost_per_1m_out": 4.472,
687 "cost_per_1m_in_cached": 0,
688 "cost_per_1m_out_cached": 0,
689 "context_window": 200000,
690 "default_max_tokens": 20000,
691 "can_reason": true,
692 "reasoning_levels": [
693 "low",
694 "medium",
695 "high"
696 ],
697 "default_reasoning_effort": "medium",
698 "supports_attachments": true
699 },
700 {
701 "id": "claude-4-5-sonnet",
702 "name": "Claude 4.5 Sonnet",
703 "cost_per_1m_in": 2.683,
704 "cost_per_1m_out": 13.416,
705 "cost_per_1m_in_cached": 0,
706 "cost_per_1m_out_cached": 0,
707 "context_window": 200000,
708 "default_max_tokens": 20000,
709 "can_reason": true,
710 "reasoning_levels": [
711 "low",
712 "medium",
713 "high"
714 ],
715 "default_reasoning_effort": "medium",
716 "supports_attachments": true
717 },
718 {
719 "id": "magistral-small-2509",
720 "name": "Magistral Small 2509",
721 "cost_per_1m_in": 0.5,
722 "cost_per_1m_out": 1.5,
723 "cost_per_1m_in_cached": 0,
724 "cost_per_1m_out_cached": 0,
725 "context_window": 128000,
726 "default_max_tokens": 12800,
727 "can_reason": true,
728 "reasoning_levels": [
729 "low",
730 "medium",
731 "high"
732 ],
733 "default_reasoning_effort": "medium",
734 "supports_attachments": true
735 },
736 {
737 "id": "magistral-medium-2509",
738 "name": "Magistral Medium 2509",
739 "cost_per_1m_in": 2,
740 "cost_per_1m_out": 5,
741 "cost_per_1m_in_cached": 0,
742 "cost_per_1m_out_cached": 0,
743 "context_window": 128000,
744 "default_max_tokens": 12800,
745 "can_reason": true,
746 "reasoning_levels": [
747 "low",
748 "medium",
749 "high"
750 ],
751 "default_reasoning_effort": "medium",
752 "supports_attachments": true
753 },
754 {
755 "id": "hermes-4-70b",
756 "name": "Hermes 4 70B",
757 "cost_per_1m_in": 0.116,
758 "cost_per_1m_out": 0.358,
759 "cost_per_1m_in_cached": 0,
760 "cost_per_1m_out_cached": 0,
761 "context_window": 128000,
762 "default_max_tokens": 12800,
763 "can_reason": false,
764 "supports_attachments": false
765 },
766 {
767 "id": "gpt-5",
768 "name": "GPT 5",
769 "cost_per_1m_in": 1.234,
770 "cost_per_1m_out": 9.838,
771 "cost_per_1m_in_cached": 0,
772 "cost_per_1m_out_cached": 0,
773 "context_window": 400000,
774 "default_max_tokens": 40000,
775 "can_reason": true,
776 "reasoning_levels": [
777 "low",
778 "medium",
779 "high"
780 ],
781 "default_reasoning_effort": "medium",
782 "supports_attachments": true
783 },
784 {
785 "id": "gpt-oss-120b",
786 "name": "GPT Oss 120b",
787 "cost_per_1m_in": 0.035496,
788 "cost_per_1m_out": 0.17748,
789 "cost_per_1m_in_cached": 0,
790 "cost_per_1m_out_cached": 0,
791 "context_window": 131000,
792 "default_max_tokens": 13100,
793 "can_reason": true,
794 "reasoning_levels": [
795 "low",
796 "medium",
797 "high"
798 ],
799 "default_reasoning_effort": "medium",
800 "supports_attachments": false
801 },
802 {
803 "id": "qwen3-30b-a3b-instruct-2507",
804 "name": "Qwen3 30B A3B Instruct 2507",
805 "cost_per_1m_in": 0.089,
806 "cost_per_1m_out": 0.268,
807 "cost_per_1m_in_cached": 0,
808 "cost_per_1m_out_cached": 0,
809 "context_window": 262000,
810 "default_max_tokens": 26200,
811 "can_reason": true,
812 "reasoning_levels": [
813 "low",
814 "medium",
815 "high"
816 ],
817 "default_reasoning_effort": "medium",
818 "supports_attachments": false
819 },
820 {
821 "id": "gpt-oss-20b",
822 "name": "GPT Oss 20b",
823 "cost_per_1m_in": 0.026622,
824 "cost_per_1m_out": 0.124236,
825 "cost_per_1m_in_cached": 0,
826 "cost_per_1m_out_cached": 0,
827 "context_window": 131000,
828 "default_max_tokens": 13100,
829 "can_reason": true,
830 "reasoning_levels": [
831 "low",
832 "medium",
833 "high"
834 ],
835 "default_reasoning_effort": "medium",
836 "supports_attachments": false
837 },
838 {
839 "id": "mistral-7b-instruct-v0.3",
840 "name": "Mistral 7B Instruct v0.3",
841 "cost_per_1m_in": 0.1,
842 "cost_per_1m_out": 0.1,
843 "cost_per_1m_in_cached": 0,
844 "cost_per_1m_out_cached": 0,
845 "context_window": 127000,
846 "default_max_tokens": 12700,
847 "can_reason": false,
848 "supports_attachments": false
849 },
850 {
851 "id": "mistral-small-3.2-24b-instruct-2506",
852 "name": "Mistral Small 3.2 24B Instruct 2506",
853 "cost_per_1m_in": 0.09,
854 "cost_per_1m_out": 0.28,
855 "cost_per_1m_in_cached": 0,
856 "cost_per_1m_out_cached": 0,
857 "context_window": 128000,
858 "default_max_tokens": 12800,
859 "can_reason": false,
860 "supports_attachments": true
861 },
862 {
863 "id": "mistral-large-2402",
864 "name": "Mistral Large 2402",
865 "cost_per_1m_in": 3.846,
866 "cost_per_1m_out": 11.627,
867 "cost_per_1m_in_cached": 0,
868 "cost_per_1m_out_cached": 0,
869 "context_window": 32000,
870 "default_max_tokens": 3200,
871 "can_reason": true,
872 "reasoning_levels": [
873 "low",
874 "medium",
875 "high"
876 ],
877 "default_reasoning_effort": "medium",
878 "supports_attachments": false
879 },
880 {
881 "id": "pixtral-large-2502",
882 "name": "Pixtral Large 25.02",
883 "cost_per_1m_in": 1.789,
884 "cost_per_1m_out": 5.366,
885 "cost_per_1m_in_cached": 0,
886 "cost_per_1m_out_cached": 0,
887 "context_window": 128000,
888 "default_max_tokens": 12800,
889 "can_reason": true,
890 "reasoning_levels": [
891 "low",
892 "medium",
893 "high"
894 ],
895 "default_reasoning_effort": "medium",
896 "supports_attachments": true
897 },
898 {
899 "id": "qwen3-235b-a22b-instruct-2507",
900 "name": "Qwen3 235B A22B Instruct 2507",
901 "cost_per_1m_in": 0.062118,
902 "cost_per_1m_out": 0.408204,
903 "cost_per_1m_in_cached": 0,
904 "cost_per_1m_out_cached": 0,
905 "context_window": 131000,
906 "default_max_tokens": 13100,
907 "can_reason": true,
908 "reasoning_levels": [
909 "low",
910 "medium",
911 "high"
912 ],
913 "default_reasoning_effort": "medium",
914 "supports_attachments": false
915 },
916 {
917 "id": "qwen3-coder-30b-a3b-instruct",
918 "name": "Qwen3 Coder 30b a3b Instruct",
919 "cost_per_1m_in": 0.053244,
920 "cost_per_1m_out": 0.22185,
921 "cost_per_1m_in_cached": 0,
922 "cost_per_1m_out_cached": 0,
923 "context_window": 262000,
924 "default_max_tokens": 26200,
925 "can_reason": true,
926 "reasoning_levels": [
927 "low",
928 "medium",
929 "high"
930 ],
931 "default_reasoning_effort": "medium",
932 "supports_attachments": false
933 },
934 {
935 "id": "qwen3-32b",
936 "name": "Qwen3 32B",
937 "cost_per_1m_in": 0.089,
938 "cost_per_1m_out": 0.268,
939 "cost_per_1m_in_cached": 0,
940 "cost_per_1m_out_cached": 0,
941 "context_window": 40000,
942 "default_max_tokens": 4000,
943 "can_reason": true,
944 "reasoning_levels": [
945 "low",
946 "medium",
947 "high"
948 ],
949 "default_reasoning_effort": "medium",
950 "supports_attachments": false
951 },
952 {
953 "id": "nova-lite-v1",
954 "name": "Nova Lite 1.0",
955 "cost_per_1m_in": 0.062,
956 "cost_per_1m_out": 0.247,
957 "cost_per_1m_in_cached": 0,
958 "cost_per_1m_out_cached": 0,
959 "context_window": 300000,
960 "default_max_tokens": 30000,
961 "can_reason": true,
962 "reasoning_levels": [
963 "low",
964 "medium",
965 "high"
966 ],
967 "default_reasoning_effort": "medium",
968 "supports_attachments": true
969 },
970 {
971 "id": "claude-sonnet-4",
972 "name": "Claude Sonnet 4",
973 "cost_per_1m_in": 2.601,
974 "cost_per_1m_out": 13.01,
975 "cost_per_1m_in_cached": 0,
976 "cost_per_1m_out_cached": 0,
977 "context_window": 200000,
978 "default_max_tokens": 20000,
979 "can_reason": true,
980 "reasoning_levels": [
981 "low",
982 "medium",
983 "high"
984 ],
985 "default_reasoning_effort": "medium",
986 "supports_attachments": true
987 },
988 {
989 "id": "gpt-4.1-mini",
990 "name": "GPT 4.1 mini",
991 "cost_per_1m_in": 0.39,
992 "cost_per_1m_out": 1.53,
993 "cost_per_1m_in_cached": 0,
994 "cost_per_1m_out_cached": 0,
995 "context_window": 1047576,
996 "default_max_tokens": 104757,
997 "can_reason": true,
998 "reasoning_levels": [
999 "low",
1000 "medium",
1001 "high"
1002 ],
1003 "default_reasoning_effort": "medium",
1004 "supports_attachments": true
1005 },
1006 {
1007 "id": "gpt-4.1-nano",
1008 "name": "GPT 4.1 nano",
1009 "cost_per_1m_in": 0.1,
1010 "cost_per_1m_out": 0.39,
1011 "cost_per_1m_in_cached": 0,
1012 "cost_per_1m_out_cached": 0,
1013 "context_window": 1047576,
1014 "default_max_tokens": 104757,
1015 "can_reason": true,
1016 "reasoning_levels": [
1017 "low",
1018 "medium",
1019 "high"
1020 ],
1021 "default_reasoning_effort": "medium",
1022 "supports_attachments": true
1023 },
1024 {
1025 "id": "nova-micro-v1",
1026 "name": "Nova Micro 1.0",
1027 "cost_per_1m_in": 0.036,
1028 "cost_per_1m_out": 0.143,
1029 "cost_per_1m_in_cached": 0,
1030 "cost_per_1m_out_cached": 0,
1031 "context_window": 128000,
1032 "default_max_tokens": 12800,
1033 "can_reason": true,
1034 "reasoning_levels": [
1035 "low",
1036 "medium",
1037 "high"
1038 ],
1039 "default_reasoning_effort": "medium",
1040 "supports_attachments": true
1041 },
1042 {
1043 "id": "gpt-4.1",
1044 "name": "GPT 4.1",
1045 "cost_per_1m_in": 1.968,
1046 "cost_per_1m_out": 7.872,
1047 "cost_per_1m_in_cached": 0,
1048 "cost_per_1m_out_cached": 0,
1049 "context_window": 1047576,
1050 "default_max_tokens": 104757,
1051 "can_reason": true,
1052 "reasoning_levels": [
1053 "low",
1054 "medium",
1055 "high"
1056 ],
1057 "default_reasoning_effort": "medium",
1058 "supports_attachments": true
1059 },
1060 {
1061 "id": "nova-pro-v1",
1062 "name": "Nova Pro 1.0",
1063 "cost_per_1m_in": 0.824,
1064 "cost_per_1m_out": 3.295,
1065 "cost_per_1m_in_cached": 0,
1066 "cost_per_1m_out_cached": 0,
1067 "context_window": 300000,
1068 "default_max_tokens": 30000,
1069 "can_reason": true,
1070 "reasoning_levels": [
1071 "low",
1072 "medium",
1073 "high"
1074 ],
1075 "default_reasoning_effort": "medium",
1076 "supports_attachments": true
1077 },
1078 {
1079 "id": "llama-3.1-nemotron-ultra-253b-v1",
1080 "name": "Llama 3.1 Nemotron Ultra 253B v1",
1081 "cost_per_1m_in": 0.537,
1082 "cost_per_1m_out": 1.61,
1083 "cost_per_1m_in_cached": 0,
1084 "cost_per_1m_out_cached": 0,
1085 "context_window": 128000,
1086 "default_max_tokens": 12800,
1087 "can_reason": true,
1088 "reasoning_levels": [
1089 "low",
1090 "medium",
1091 "high"
1092 ],
1093 "default_reasoning_effort": "medium",
1094 "supports_attachments": false
1095 },
1096 {
1097 "id": "llama-4-maverick",
1098 "name": "Llama 4 Maverick",
1099 "cost_per_1m_in": 0.124236,
1100 "cost_per_1m_out": 0.602832,
1101 "cost_per_1m_in_cached": 0,
1102 "cost_per_1m_out_cached": 0,
1103 "context_window": 1050000,
1104 "default_max_tokens": 105000,
1105 "can_reason": false,
1106 "supports_attachments": false
1107 },
1108 {
1109 "id": "deepseek-v3-0324",
1110 "name": "DeepSeek V3 0324",
1111 "cost_per_1m_in": 0.26622,
1112 "cost_per_1m_out": 0.8874,
1113 "cost_per_1m_in_cached": 0,
1114 "cost_per_1m_out_cached": 0,
1115 "context_window": 163840,
1116 "default_max_tokens": 16384,
1117 "can_reason": true,
1118 "reasoning_levels": [
1119 "low",
1120 "medium",
1121 "high"
1122 ],
1123 "default_reasoning_effort": "medium",
1124 "supports_attachments": false
1125 },
1126 {
1127 "id": "mistral-small-2503",
1128 "name": "Mistral Small 2503",
1129 "cost_per_1m_in": 0.1,
1130 "cost_per_1m_out": 0.3,
1131 "cost_per_1m_in_cached": 0,
1132 "cost_per_1m_out_cached": 0,
1133 "context_window": 128000,
1134 "default_max_tokens": 12800,
1135 "can_reason": false,
1136 "supports_attachments": true
1137 },
1138 {
1139 "id": "mistral-small-2506",
1140 "name": "Mistral Small 2506",
1141 "cost_per_1m_in": 0.1,
1142 "cost_per_1m_out": 0.3,
1143 "cost_per_1m_in_cached": 0,
1144 "cost_per_1m_out_cached": 0,
1145 "context_window": 131072,
1146 "default_max_tokens": 13107,
1147 "can_reason": false,
1148 "supports_attachments": true
1149 },
1150 {
1151 "id": "gemini-2.0-flash-001",
1152 "name": "Gemini 2.0 Flash",
1153 "cost_per_1m_in": 0.13416,
1154 "cost_per_1m_out": 0.53664,
1155 "cost_per_1m_in_cached": 0,
1156 "cost_per_1m_out_cached": 0,
1157 "context_window": 1048576,
1158 "default_max_tokens": 104857,
1159 "can_reason": false,
1160 "supports_attachments": true
1161 },
1162 {
1163 "id": "gemini-2.0-flash-lite-001",
1164 "name": "Gemini 2.0 Flash Lite",
1165 "cost_per_1m_in": 0.06708,
1166 "cost_per_1m_out": 0.26832,
1167 "cost_per_1m_in_cached": 0,
1168 "cost_per_1m_out_cached": 0,
1169 "context_window": 1048576,
1170 "default_max_tokens": 104857,
1171 "can_reason": false,
1172 "supports_attachments": true
1173 },
1174 {
1175 "id": "gemini-2.5-flash",
1176 "name": "Gemini 2.5 Flash",
1177 "cost_per_1m_in": 0.26832,
1178 "cost_per_1m_out": 2.236,
1179 "cost_per_1m_in_cached": 0,
1180 "cost_per_1m_out_cached": 0,
1181 "context_window": 1048576,
1182 "default_max_tokens": 104857,
1183 "can_reason": true,
1184 "reasoning_levels": [
1185 "low",
1186 "medium",
1187 "high"
1188 ],
1189 "default_reasoning_effort": "medium",
1190 "supports_attachments": true
1191 },
1192 {
1193 "id": "gemini-2.5-pro",
1194 "name": "Gemini 2.5 Pro",
1195 "cost_per_1m_in": 1.3416,
1196 "cost_per_1m_out": 8.944,
1197 "cost_per_1m_in_cached": 0,
1198 "cost_per_1m_out_cached": 0,
1199 "context_window": 1048576,
1200 "default_max_tokens": 104857,
1201 "can_reason": true,
1202 "reasoning_levels": [
1203 "low",
1204 "medium",
1205 "high"
1206 ],
1207 "default_reasoning_effort": "medium",
1208 "supports_attachments": true
1209 },
1210 {
1211 "id": "gemma-3-27b-it",
1212 "name": "Gemma 3 27b it",
1213 "cost_per_1m_in": 0.089,
1214 "cost_per_1m_out": 0.268,
1215 "cost_per_1m_in_cached": 0,
1216 "cost_per_1m_out_cached": 0,
1217 "context_window": 131000,
1218 "default_max_tokens": 13100,
1219 "can_reason": true,
1220 "reasoning_levels": [
1221 "low",
1222 "medium",
1223 "high"
1224 ],
1225 "default_reasoning_effort": "medium",
1226 "supports_attachments": true
1227 },
1228 {
1229 "id": "deepseek-r1-0528",
1230 "name": "DeepSeek R1 0528",
1231 "cost_per_1m_in": 0.585084,
1232 "cost_per_1m_out": 2.30724,
1233 "cost_per_1m_in_cached": 0,
1234 "cost_per_1m_out_cached": 0,
1235 "context_window": 164000,
1236 "default_max_tokens": 16400,
1237 "can_reason": true,
1238 "reasoning_levels": [
1239 "low",
1240 "medium",
1241 "high"
1242 ],
1243 "default_reasoning_effort": "medium",
1244 "supports_attachments": false
1245 },
1246 {
1247 "id": "codestral-2508",
1248 "name": "Codestral 25.08",
1249 "cost_per_1m_in": 0.3,
1250 "cost_per_1m_out": 0.9,
1251 "cost_per_1m_in_cached": 0,
1252 "cost_per_1m_out_cached": 0,
1253 "context_window": 256000,
1254 "default_max_tokens": 25600,
1255 "can_reason": false,
1256 "supports_attachments": false
1257 },
1258 {
1259 "id": "llama-3.3-70b-instruct",
1260 "name": "Llama 3.3 70B Instruct",
1261 "cost_per_1m_in": 0.08874,
1262 "cost_per_1m_out": 0.274994,
1263 "cost_per_1m_in_cached": 0,
1264 "cost_per_1m_out_cached": 0,
1265 "context_window": 131000,
1266 "default_max_tokens": 13100,
1267 "can_reason": true,
1268 "reasoning_levels": [
1269 "low",
1270 "medium",
1271 "high"
1272 ],
1273 "default_reasoning_effort": "medium",
1274 "supports_attachments": false
1275 },
1276 {
1277 "id": "gpt-4o",
1278 "name": "GPT 4o",
1279 "cost_per_1m_in": 2.38664,
1280 "cost_per_1m_out": 9.5466,
1281 "cost_per_1m_in_cached": 0,
1282 "cost_per_1m_out_cached": 0,
1283 "context_window": 128000,
1284 "default_max_tokens": 12800,
1285 "can_reason": true,
1286 "reasoning_levels": [
1287 "low",
1288 "medium",
1289 "high"
1290 ],
1291 "default_reasoning_effort": "medium",
1292 "supports_attachments": true
1293 },
1294 {
1295 "id": "gpt-5-mini",
1296 "name": "GPT 5 mini",
1297 "cost_per_1m_in": 0.25,
1298 "cost_per_1m_out": 1.968,
1299 "cost_per_1m_in_cached": 0,
1300 "cost_per_1m_out_cached": 0,
1301 "context_window": 400000,
1302 "default_max_tokens": 40000,
1303 "can_reason": true,
1304 "reasoning_levels": [
1305 "low",
1306 "medium",
1307 "high"
1308 ],
1309 "default_reasoning_effort": "medium",
1310 "supports_attachments": true
1311 },
1312 {
1313 "id": "gpt-5-nano",
1314 "name": "GPT 5 nano",
1315 "cost_per_1m_in": 0.054,
1316 "cost_per_1m_out": 0.394,
1317 "cost_per_1m_in_cached": 0,
1318 "cost_per_1m_out_cached": 0,
1319 "context_window": 400000,
1320 "default_max_tokens": 40000,
1321 "can_reason": true,
1322 "reasoning_levels": [
1323 "low",
1324 "medium",
1325 "high"
1326 ],
1327 "default_reasoning_effort": "medium",
1328 "supports_attachments": true
1329 },
1330 {
1331 "id": "mistral-large-2411",
1332 "name": "Mistral Large 2411",
1333 "cost_per_1m_in": 1.8,
1334 "cost_per_1m_out": 5.4,
1335 "cost_per_1m_in_cached": 0,
1336 "cost_per_1m_out_cached": 0,
1337 "context_window": 131072,
1338 "default_max_tokens": 13107,
1339 "can_reason": true,
1340 "reasoning_levels": [
1341 "low",
1342 "medium",
1343 "high"
1344 ],
1345 "default_reasoning_effort": "medium",
1346 "supports_attachments": false
1347 },
1348 {
1349 "id": "hermes-4-405b",
1350 "name": "Hermes 4 405B",
1351 "cost_per_1m_in": 0.894,
1352 "cost_per_1m_out": 2.683,
1353 "cost_per_1m_in_cached": 0,
1354 "cost_per_1m_out_cached": 0,
1355 "context_window": 128000,
1356 "default_max_tokens": 12800,
1357 "can_reason": false,
1358 "supports_attachments": false
1359 },
1360 {
1361 "id": "mistral-medium-2508",
1362 "name": "Mistral Medium 2508",
1363 "cost_per_1m_in": 0.4,
1364 "cost_per_1m_out": 2,
1365 "cost_per_1m_in_cached": 0,
1366 "cost_per_1m_out_cached": 0,
1367 "context_window": 128000,
1368 "default_max_tokens": 12800,
1369 "can_reason": true,
1370 "reasoning_levels": [
1371 "low",
1372 "medium",
1373 "high"
1374 ],
1375 "default_reasoning_effort": "medium",
1376 "supports_attachments": true
1377 },
1378 {
1379 "id": "devstral-medium-2507",
1380 "name": "Devstral Medium 2507",
1381 "cost_per_1m_in": 0.4,
1382 "cost_per_1m_out": 2,
1383 "cost_per_1m_in_cached": 0,
1384 "cost_per_1m_out_cached": 0,
1385 "context_window": 131072,
1386 "default_max_tokens": 13107,
1387 "can_reason": false,
1388 "supports_attachments": false
1389 },
1390 {
1391 "id": "mistral-nemo-instruct-2407",
1392 "name": "Mistral Nemo 2407",
1393 "cost_per_1m_in": 0.13,
1394 "cost_per_1m_out": 0.13,
1395 "cost_per_1m_in_cached": 0,
1396 "cost_per_1m_out_cached": 0,
1397 "context_window": 131072,
1398 "default_max_tokens": 13107,
1399 "can_reason": false,
1400 "supports_attachments": false
1401 },
1402 {
1403 "id": "devstral-small-2507",
1404 "name": "Devstral Small 2507",
1405 "cost_per_1m_in": 0.1,
1406 "cost_per_1m_out": 0.3,
1407 "cost_per_1m_in_cached": 0,
1408 "cost_per_1m_out_cached": 0,
1409 "context_window": 131072,
1410 "default_max_tokens": 13107,
1411 "can_reason": false,
1412 "supports_attachments": false
1413 },
1414 {
1415 "id": "llama-3.1-405b-instruct",
1416 "name": "Llama 3.1 405B Instruct",
1417 "cost_per_1m_in": 1.75,
1418 "cost_per_1m_out": 1.75,
1419 "cost_per_1m_in_cached": 0,
1420 "cost_per_1m_out_cached": 0,
1421 "context_window": 128000,
1422 "default_max_tokens": 12800,
1423 "can_reason": true,
1424 "reasoning_levels": [
1425 "low",
1426 "medium",
1427 "high"
1428 ],
1429 "default_reasoning_effort": "medium",
1430 "supports_attachments": false
1431 },
1432 {
1433 "id": "gpt-4o-mini",
1434 "name": "GPT 4o mini",
1435 "cost_per_1m_in": 0.1432,
1436 "cost_per_1m_out": 0.5728,
1437 "cost_per_1m_in_cached": 0,
1438 "cost_per_1m_out_cached": 0,
1439 "context_window": 128000,
1440 "default_max_tokens": 12800,
1441 "can_reason": true,
1442 "reasoning_levels": [
1443 "low",
1444 "medium",
1445 "high"
1446 ],
1447 "default_reasoning_effort": "medium",
1448 "supports_attachments": true
1449 },
1450 {
1451 "id": "llama-3.1-8b-instruct",
1452 "name": "Llama 3.1 8B Instruct",
1453 "cost_per_1m_in": 0.018,
1454 "cost_per_1m_out": 0.054,
1455 "cost_per_1m_in_cached": 0,
1456 "cost_per_1m_out_cached": 0,
1457 "context_window": 128000,
1458 "default_max_tokens": 12800,
1459 "can_reason": true,
1460 "reasoning_levels": [
1461 "low",
1462 "medium",
1463 "high"
1464 ],
1465 "default_reasoning_effort": "medium",
1466 "supports_attachments": false
1467 }
1468 ]
1469}