1{
2 "name": "Cortecs",
3 "id": "cortecs",
4 "api_key": "$CORTECS_API_KEY",
5 "api_endpoint": "https://api.cortecs.ai/v1",
6 "type": "openai",
7 "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
8 "default_small_model_id": "glm-4.7-flash",
9 "models": [
10 {
11 "id": "deepseek-v4-pro",
12 "name": "DeepSeek V4 Pro",
13 "cost_per_1m_in": 1.55295,
14 "cost_per_1m_out": 3.1059,
15 "cost_per_1m_in_cached": 0,
16 "cost_per_1m_out_cached": 0,
17 "context_window": 1048576,
18 "default_max_tokens": 104857,
19 "can_reason": true,
20 "reasoning_levels": [
21 "low",
22 "medium",
23 "high"
24 ],
25 "default_reasoning_effort": "medium",
26 "supports_attachments": false
27 },
28 {
29 "id": "deepseek-v4-flash",
30 "name": "DeepSeek V4 Flash",
31 "cost_per_1m_in": 0.13311,
32 "cost_per_1m_out": 0.26622,
33 "cost_per_1m_in_cached": 0,
34 "cost_per_1m_out_cached": 0,
35 "context_window": 1048576,
36 "default_max_tokens": 104857,
37 "can_reason": true,
38 "reasoning_levels": [
39 "low",
40 "medium",
41 "high"
42 ],
43 "default_reasoning_effort": "medium",
44 "supports_attachments": false
45 },
46 {
47 "id": "mistral-medium-3.5",
48 "name": "Mistral Medium 3.5",
49 "cost_per_1m_in": 1.25,
50 "cost_per_1m_out": 6.4,
51 "cost_per_1m_in_cached": 0,
52 "cost_per_1m_out_cached": 0,
53 "context_window": 256000,
54 "default_max_tokens": 25600,
55 "can_reason": true,
56 "reasoning_levels": [
57 "low",
58 "medium",
59 "high"
60 ],
61 "default_reasoning_effort": "medium",
62 "supports_attachments": true
63 },
64 {
65 "id": "nvidia-nemotron-3-nano-omni",
66 "name": "Nemotron 3 Nano Omni",
67 "cost_per_1m_in": 0.0532447,
68 "cost_per_1m_out": 0.212976,
69 "cost_per_1m_in_cached": 0,
70 "cost_per_1m_out_cached": 0,
71 "context_window": 300000,
72 "default_max_tokens": 30000,
73 "can_reason": true,
74 "reasoning_levels": [
75 "low",
76 "medium",
77 "high"
78 ],
79 "default_reasoning_effort": "medium",
80 "supports_attachments": false
81 },
82 {
83 "id": "gpt-5.4",
84 "name": "GPT 5.4",
85 "cost_per_1m_in": 2.601,
86 "cost_per_1m_out": 13.872,
87 "cost_per_1m_in_cached": 0,
88 "cost_per_1m_out_cached": 0,
89 "context_window": 1050000,
90 "default_max_tokens": 105000,
91 "can_reason": true,
92 "reasoning_levels": [
93 "low",
94 "medium",
95 "high"
96 ],
97 "default_reasoning_effort": "medium",
98 "supports_attachments": true
99 },
100 {
101 "id": "kimi-k2.6",
102 "name": "Kimi K2.6",
103 "cost_per_1m_in": 0.6936,
104 "cost_per_1m_out": 3.0345,
105 "cost_per_1m_in_cached": 0,
106 "cost_per_1m_out_cached": 0,
107 "context_window": 256000,
108 "default_max_tokens": 25600,
109 "can_reason": true,
110 "reasoning_levels": [
111 "low",
112 "medium",
113 "high"
114 ],
115 "default_reasoning_effort": "medium",
116 "supports_attachments": true
117 },
118 {
119 "id": "claude-opus4-7",
120 "name": "Claude Opus 4.7",
121 "cost_per_1m_in": 4.7685,
122 "cost_per_1m_out": 23.8425,
123 "cost_per_1m_in_cached": 0,
124 "cost_per_1m_out_cached": 0,
125 "context_window": 1000000,
126 "default_max_tokens": 100000,
127 "can_reason": true,
128 "reasoning_levels": [
129 "low",
130 "medium",
131 "high"
132 ],
133 "default_reasoning_effort": "medium",
134 "supports_attachments": true
135 },
136 {
137 "id": "minimax-m2.7",
138 "name": "MiniMax M2.7",
139 "cost_per_1m_in": 0.26622,
140 "cost_per_1m_out": 1.06488,
141 "cost_per_1m_in_cached": 0,
142 "cost_per_1m_out_cached": 0,
143 "context_window": 196608,
144 "default_max_tokens": 19660,
145 "can_reason": true,
146 "reasoning_levels": [
147 "low",
148 "medium",
149 "high"
150 ],
151 "default_reasoning_effort": "medium",
152 "supports_attachments": false
153 },
154 {
155 "id": "glm-5.1",
156 "name": "GLM 5.1",
157 "cost_per_1m_in": 1.24236,
158 "cost_per_1m_out": 3.90336,
159 "cost_per_1m_in_cached": 0,
160 "cost_per_1m_out_cached": 0,
161 "context_window": 202752,
162 "default_max_tokens": 20275,
163 "can_reason": true,
164 "reasoning_levels": [
165 "low",
166 "medium",
167 "high"
168 ],
169 "default_reasoning_effort": "medium",
170 "supports_attachments": false
171 },
172 {
173 "id": "qwen3.5-122b-a10b",
174 "name": "Qwen3.5 122B A10B",
175 "cost_per_1m_in": 0.4437,
176 "cost_per_1m_out": 3.1059,
177 "cost_per_1m_in_cached": 0,
178 "cost_per_1m_out_cached": 0,
179 "context_window": 262144,
180 "default_max_tokens": 26214,
181 "can_reason": true,
182 "reasoning_levels": [
183 "low",
184 "medium",
185 "high"
186 ],
187 "default_reasoning_effort": "medium",
188 "supports_attachments": false
189 },
190 {
191 "id": "qwen3.5-9b",
192 "name": "Qwen3.5 9B",
193 "cost_per_1m_in": 0.1,
194 "cost_per_1m_out": 0.15,
195 "cost_per_1m_in_cached": 0,
196 "cost_per_1m_out_cached": 0,
197 "context_window": 262000,
198 "default_max_tokens": 26200,
199 "can_reason": true,
200 "reasoning_levels": [
201 "low",
202 "medium",
203 "high"
204 ],
205 "default_reasoning_effort": "medium",
206 "supports_attachments": false
207 },
208 {
209 "id": "nemotron-3-super-120b-a12b",
210 "name": "Nemotron 3 Super 120B A12B",
211 "cost_per_1m_in": 0.15606,
212 "cost_per_1m_out": 0.67626,
213 "cost_per_1m_in_cached": 0,
214 "cost_per_1m_out_cached": 0,
215 "context_window": 262000,
216 "default_max_tokens": 26214,
217 "can_reason": true,
218 "reasoning_levels": [
219 "low",
220 "medium",
221 "high"
222 ],
223 "default_reasoning_effort": "medium",
224 "supports_attachments": false
225 },
226 {
227 "id": "qwen3-coder-next",
228 "name": "Qwen3 Coder Next",
229 "cost_per_1m_in": 0.15,
230 "cost_per_1m_out": 0.8,
231 "cost_per_1m_in_cached": 0,
232 "cost_per_1m_out_cached": 0,
233 "context_window": 256000,
234 "default_max_tokens": 25600,
235 "can_reason": true,
236 "reasoning_levels": [
237 "low",
238 "medium",
239 "high"
240 ],
241 "default_reasoning_effort": "medium",
242 "supports_attachments": false
243 },
244 {
245 "id": "glm-5",
246 "name": "GLM 5",
247 "cost_per_1m_in": 0.8874,
248 "cost_per_1m_out": 2.83968,
249 "cost_per_1m_in_cached": 0,
250 "cost_per_1m_out_cached": 0,
251 "context_window": 202752,
252 "default_max_tokens": 20275,
253 "can_reason": true,
254 "reasoning_levels": [
255 "low",
256 "medium",
257 "high"
258 ],
259 "default_reasoning_effort": "medium",
260 "supports_attachments": false
261 },
262 {
263 "id": "glm-4.6",
264 "name": "GLM 4.6",
265 "cost_per_1m_in": 0.35496,
266 "cost_per_1m_out": 1.55295,
267 "cost_per_1m_in_cached": 0,
268 "cost_per_1m_out_cached": 0,
269 "context_window": 203000,
270 "default_max_tokens": 20300,
271 "can_reason": true,
272 "reasoning_levels": [
273 "low",
274 "medium",
275 "high"
276 ],
277 "default_reasoning_effort": "medium",
278 "supports_attachments": false
279 },
280 {
281 "id": "deepseek-chat-v3.1",
282 "name": "DeepSeek Chat V3.1",
283 "cost_per_1m_in": 0.17748,
284 "cost_per_1m_out": 0.70992,
285 "cost_per_1m_in_cached": 0,
286 "cost_per_1m_out_cached": 0,
287 "context_window": 164000,
288 "default_max_tokens": 16400,
289 "can_reason": true,
290 "reasoning_levels": [
291 "low",
292 "medium",
293 "high"
294 ],
295 "default_reasoning_effort": "medium",
296 "supports_attachments": false
297 },
298 {
299 "id": "qwen-2.5-72b-instruct",
300 "name": "Qwen2.5 72B Instruct",
301 "cost_per_1m_in": 0.062118,
302 "cost_per_1m_out": 0.230724,
303 "cost_per_1m_in_cached": 0,
304 "cost_per_1m_out_cached": 0,
305 "context_window": 33000,
306 "default_max_tokens": 3300,
307 "can_reason": false,
308 "supports_attachments": false
309 },
310 {
311 "id": "qwen3.5-397b-a17b",
312 "name": "Qwen3.5 397B A17B ",
313 "cost_per_1m_in": 0.53244,
314 "cost_per_1m_out": 3.19464,
315 "cost_per_1m_in_cached": 0,
316 "cost_per_1m_out_cached": 0,
317 "context_window": 262000,
318 "default_max_tokens": 25000,
319 "can_reason": true,
320 "reasoning_levels": [
321 "low",
322 "medium",
323 "high"
324 ],
325 "default_reasoning_effort": "medium",
326 "supports_attachments": false
327 },
328 {
329 "id": "deepseek-v3.2",
330 "name": "DeepSeek V3.2",
331 "cost_per_1m_in": 0.26622,
332 "cost_per_1m_out": 0.4437,
333 "cost_per_1m_in_cached": 0,
334 "cost_per_1m_out_cached": 0,
335 "context_window": 163840,
336 "default_max_tokens": 16384,
337 "can_reason": true,
338 "reasoning_levels": [
339 "low",
340 "medium",
341 "high"
342 ],
343 "default_reasoning_effort": "medium",
344 "supports_attachments": false
345 },
346 {
347 "id": "mistral-small-2603",
348 "name": "Mistral Small 4 2603",
349 "cost_per_1m_in": 0.1275,
350 "cost_per_1m_out": 0.51,
351 "cost_per_1m_in_cached": 0,
352 "cost_per_1m_out_cached": 0,
353 "context_window": 256000,
354 "default_max_tokens": 25600,
355 "can_reason": true,
356 "reasoning_levels": [
357 "low",
358 "medium",
359 "high"
360 ],
361 "default_reasoning_effort": "medium",
362 "supports_attachments": true
363 },
364 {
365 "id": "minimax-m2.5",
366 "name": "MiniMax M2.5",
367 "cost_per_1m_in": 0.26622,
368 "cost_per_1m_out": 0.97614,
369 "cost_per_1m_in_cached": 0,
370 "cost_per_1m_out_cached": 0,
371 "context_window": 196608,
372 "default_max_tokens": 19660,
373 "can_reason": true,
374 "reasoning_levels": [
375 "low",
376 "medium",
377 "high"
378 ],
379 "default_reasoning_effort": "medium",
380 "supports_attachments": false
381 },
382 {
383 "id": "claude-4-6-sonnet",
384 "name": "Claude Sonnet 4.6",
385 "cost_per_1m_in": 2.8691,
386 "cost_per_1m_out": 14.3095,
387 "cost_per_1m_in_cached": 0,
388 "cost_per_1m_out_cached": 0,
389 "context_window": 1000000,
390 "default_max_tokens": 100000,
391 "can_reason": true,
392 "reasoning_levels": [
393 "low",
394 "medium",
395 "high"
396 ],
397 "default_reasoning_effort": "medium",
398 "supports_attachments": true
399 },
400 {
401 "id": "glm-4.7-flash",
402 "name": "GLM 4.7 Flash",
403 "cost_per_1m_in": 0.0716,
404 "cost_per_1m_out": 0.4293,
405 "cost_per_1m_in_cached": 0,
406 "cost_per_1m_out_cached": 0,
407 "context_window": 203000,
408 "default_max_tokens": 20300,
409 "can_reason": false,
410 "supports_attachments": false
411 },
412 {
413 "id": "kimi-k2.5",
414 "name": "Kimi K2.5",
415 "cost_per_1m_in": 0.4437,
416 "cost_per_1m_out": 2.12976,
417 "cost_per_1m_in_cached": 0,
418 "cost_per_1m_out_cached": 0,
419 "context_window": 256000,
420 "default_max_tokens": 25600,
421 "can_reason": true,
422 "reasoning_levels": [
423 "low",
424 "medium",
425 "high"
426 ],
427 "default_reasoning_effort": "medium",
428 "supports_attachments": true
429 },
430 {
431 "id": "claude-opus4-6",
432 "name": "Claude Opus 4.6",
433 "cost_per_1m_in": 4.7685,
434 "cost_per_1m_out": 23.8425,
435 "cost_per_1m_in_cached": 0,
436 "cost_per_1m_out_cached": 0,
437 "context_window": 1000000,
438 "default_max_tokens": 100000,
439 "can_reason": true,
440 "reasoning_levels": [
441 "low",
442 "medium",
443 "high"
444 ],
445 "default_reasoning_effort": "medium",
446 "supports_attachments": true
447 },
448 {
449 "id": "minimax-m2",
450 "name": "MiniMax M2",
451 "cost_per_1m_in": 0.22185,
452 "cost_per_1m_out": 0.8874,
453 "cost_per_1m_in_cached": 0,
454 "cost_per_1m_out_cached": 0,
455 "context_window": 196608,
456 "default_max_tokens": 19660,
457 "can_reason": true,
458 "reasoning_levels": [
459 "low",
460 "medium",
461 "high"
462 ],
463 "default_reasoning_effort": "medium",
464 "supports_attachments": false
465 },
466 {
467 "id": "glm-4.7",
468 "name": "GLM 4.7",
469 "cost_per_1m_in": 0.53244,
470 "cost_per_1m_out": 1.95228,
471 "cost_per_1m_in_cached": 0,
472 "cost_per_1m_out_cached": 0,
473 "context_window": 202752,
474 "default_max_tokens": 20275,
475 "can_reason": true,
476 "reasoning_levels": [
477 "low",
478 "medium",
479 "high"
480 ],
481 "default_reasoning_effort": "medium",
482 "supports_attachments": false
483 },
484 {
485 "id": "minimax-m2.1",
486 "name": "MiniMax M2.1",
487 "cost_per_1m_in": 0.322,
488 "cost_per_1m_out": 1.2879,
489 "cost_per_1m_in_cached": 0,
490 "cost_per_1m_out_cached": 0,
491 "context_window": 196000,
492 "default_max_tokens": 19600,
493 "can_reason": true,
494 "reasoning_levels": [
495 "low",
496 "medium",
497 "high"
498 ],
499 "default_reasoning_effort": "medium",
500 "supports_attachments": false
501 },
502 {
503 "id": "qwen3-vl-235b-a22b",
504 "name": "Qwen3 VL 235B A22B",
505 "cost_per_1m_in": 0.186354,
506 "cost_per_1m_out": 1.68606,
507 "cost_per_1m_in_cached": 0,
508 "cost_per_1m_out_cached": 0,
509 "context_window": 131000,
510 "default_max_tokens": 13100,
511 "can_reason": true,
512 "reasoning_levels": [
513 "low",
514 "medium",
515 "high"
516 ],
517 "default_reasoning_effort": "medium",
518 "supports_attachments": true
519 },
520 {
521 "id": "nvidia-nemotron-3-nano-30b-a3b",
522 "name": "Nemotron 3 Nano 30B A3B",
523 "cost_per_1m_in": 0.0537,
524 "cost_per_1m_out": 0.215,
525 "cost_per_1m_in_cached": 0,
526 "cost_per_1m_out_cached": 0,
527 "context_window": 128000,
528 "default_max_tokens": 12800,
529 "can_reason": true,
530 "reasoning_levels": [
531 "low",
532 "medium",
533 "high"
534 ],
535 "default_reasoning_effort": "medium",
536 "supports_attachments": false
537 },
538 {
539 "id": "claude-opus4-5",
540 "name": "Claude Opus 4.5",
541 "cost_per_1m_in": 4.7695,
542 "cost_per_1m_out": 23.8485,
543 "cost_per_1m_in_cached": 0,
544 "cost_per_1m_out_cached": 0,
545 "context_window": 200000,
546 "default_max_tokens": 20000,
547 "can_reason": true,
548 "reasoning_levels": [
549 "low",
550 "medium",
551 "high"
552 ],
553 "default_reasoning_effort": "medium",
554 "supports_attachments": true
555 },
556 {
557 "id": "qwen3-next-80b-a3b-thinking",
558 "name": "Qwen3 Next 80B A3B Thinking",
559 "cost_per_1m_in": 0.13311,
560 "cost_per_1m_out": 1.06488,
561 "cost_per_1m_in_cached": 0,
562 "cost_per_1m_out_cached": 0,
563 "context_window": 262000,
564 "default_max_tokens": 12800,
565 "can_reason": true,
566 "reasoning_levels": [
567 "low",
568 "medium",
569 "high"
570 ],
571 "default_reasoning_effort": "medium",
572 "supports_attachments": false
573 },
574 {
575 "id": "holo2-30b-a3b",
576 "name": "Holo2 30B A3B",
577 "cost_per_1m_in": 0.3,
578 "cost_per_1m_out": 0.7,
579 "cost_per_1m_in_cached": 0,
580 "cost_per_1m_out_cached": 0,
581 "context_window": 22000,
582 "default_max_tokens": 2200,
583 "can_reason": true,
584 "reasoning_levels": [
585 "low",
586 "medium",
587 "high"
588 ],
589 "default_reasoning_effort": "medium",
590 "supports_attachments": true
591 },
592 {
593 "id": "devstral-2512",
594 "name": "Devstral 2 2512",
595 "cost_per_1m_in": 0.4,
596 "cost_per_1m_out": 2,
597 "cost_per_1m_in_cached": 0,
598 "cost_per_1m_out_cached": 0,
599 "context_window": 262000,
600 "default_max_tokens": 26200,
601 "can_reason": false,
602 "supports_attachments": false
603 },
604 {
605 "id": "nova-2-lite",
606 "name": "Nova 2 Lite",
607 "cost_per_1m_in": 0.335,
608 "cost_per_1m_out": 2.822,
609 "cost_per_1m_in_cached": 0,
610 "cost_per_1m_out_cached": 0,
611 "context_window": 1000000,
612 "default_max_tokens": 100000,
613 "can_reason": true,
614 "reasoning_levels": [
615 "low",
616 "medium",
617 "high"
618 ],
619 "default_reasoning_effort": "medium",
620 "supports_attachments": true
621 },
622 {
623 "id": "gpt-oss-safeguard-120b",
624 "name": "GPT OSS Safeguard 120B",
625 "cost_per_1m_in": 0.161,
626 "cost_per_1m_out": 0.626,
627 "cost_per_1m_in_cached": 0,
628 "cost_per_1m_out_cached": 0,
629 "context_window": 128000,
630 "default_max_tokens": 12800,
631 "can_reason": true,
632 "reasoning_levels": [
633 "low",
634 "medium",
635 "high"
636 ],
637 "default_reasoning_effort": "medium",
638 "supports_attachments": false
639 },
640 {
641 "id": "mistral-large-2512",
642 "name": "Mistral Large 3 2512",
643 "cost_per_1m_in": 0.5,
644 "cost_per_1m_out": 1.5,
645 "cost_per_1m_in_cached": 0,
646 "cost_per_1m_out_cached": 0,
647 "context_window": 256000,
648 "default_max_tokens": 25600,
649 "can_reason": false,
650 "supports_attachments": true
651 },
652 {
653 "id": "ministral-8b-2512",
654 "name": "Ministral 3 8b 2512",
655 "cost_per_1m_in": 0.15,
656 "cost_per_1m_out": 0.15,
657 "cost_per_1m_in_cached": 0,
658 "cost_per_1m_out_cached": 0,
659 "context_window": 256000,
660 "default_max_tokens": 25600,
661 "can_reason": false,
662 "supports_attachments": true
663 },
664 {
665 "id": "ministral-3b-2512",
666 "name": "Ministral 3 3b 2512",
667 "cost_per_1m_in": 0.1,
668 "cost_per_1m_out": 0.1,
669 "cost_per_1m_in_cached": 0,
670 "cost_per_1m_out_cached": 0,
671 "context_window": 256000,
672 "default_max_tokens": 25600,
673 "can_reason": false,
674 "supports_attachments": true
675 },
676 {
677 "id": "ministral-14b-2512",
678 "name": "Ministral 3 14b 2512",
679 "cost_per_1m_in": 0.2,
680 "cost_per_1m_out": 0.2,
681 "cost_per_1m_in_cached": 0,
682 "cost_per_1m_out_cached": 0,
683 "context_window": 256000,
684 "default_max_tokens": 25600,
685 "can_reason": false,
686 "supports_attachments": true
687 },
688 {
689 "id": "intellect-3",
690 "name": "INTELLECT-3",
691 "cost_per_1m_in": 0.179,
692 "cost_per_1m_out": 0.984,
693 "cost_per_1m_in_cached": 0,
694 "cost_per_1m_out_cached": 0,
695 "context_window": 128000,
696 "default_max_tokens": 12800,
697 "can_reason": true,
698 "reasoning_levels": [
699 "low",
700 "medium",
701 "high"
702 ],
703 "default_reasoning_effort": "medium",
704 "supports_attachments": false
705 },
706 {
707 "id": "gpt-5.1",
708 "name": "GPT 5.1",
709 "cost_per_1m_in": 1.234,
710 "cost_per_1m_out": 9.838,
711 "cost_per_1m_in_cached": 0,
712 "cost_per_1m_out_cached": 0,
713 "context_window": 400000,
714 "default_max_tokens": 40000,
715 "can_reason": true,
716 "reasoning_levels": [
717 "low",
718 "medium",
719 "high"
720 ],
721 "default_reasoning_effort": "medium",
722 "supports_attachments": true
723 },
724 {
725 "id": "nemotron-nano-v2-12b",
726 "name": "Nemotron Nano V2 12b",
727 "cost_per_1m_in": 0.215,
728 "cost_per_1m_out": 0.635,
729 "cost_per_1m_in_cached": 0,
730 "cost_per_1m_out_cached": 0,
731 "context_window": 128000,
732 "default_max_tokens": 12800,
733 "can_reason": true,
734 "reasoning_levels": [
735 "low",
736 "medium",
737 "high"
738 ],
739 "default_reasoning_effort": "medium",
740 "supports_attachments": true
741 },
742 {
743 "id": "claude-haiku-4-5",
744 "name": "Claude Haiku 4.5",
745 "cost_per_1m_in": 0.894,
746 "cost_per_1m_out": 4.472,
747 "cost_per_1m_in_cached": 0,
748 "cost_per_1m_out_cached": 0,
749 "context_window": 200000,
750 "default_max_tokens": 20000,
751 "can_reason": true,
752 "reasoning_levels": [
753 "low",
754 "medium",
755 "high"
756 ],
757 "default_reasoning_effort": "medium",
758 "supports_attachments": true
759 },
760 {
761 "id": "claude-4-5-sonnet",
762 "name": "Claude 4.5 Sonnet",
763 "cost_per_1m_in": 2.683,
764 "cost_per_1m_out": 13.416,
765 "cost_per_1m_in_cached": 0,
766 "cost_per_1m_out_cached": 0,
767 "context_window": 200000,
768 "default_max_tokens": 20000,
769 "can_reason": true,
770 "reasoning_levels": [
771 "low",
772 "medium",
773 "high"
774 ],
775 "default_reasoning_effort": "medium",
776 "supports_attachments": true
777 },
778 {
779 "id": "magistral-small-2509",
780 "name": "Magistral Small 2509",
781 "cost_per_1m_in": 0.5,
782 "cost_per_1m_out": 1.5,
783 "cost_per_1m_in_cached": 0,
784 "cost_per_1m_out_cached": 0,
785 "context_window": 128000,
786 "default_max_tokens": 12800,
787 "can_reason": true,
788 "reasoning_levels": [
789 "low",
790 "medium",
791 "high"
792 ],
793 "default_reasoning_effort": "medium",
794 "supports_attachments": true
795 },
796 {
797 "id": "magistral-medium-2509",
798 "name": "Magistral Medium 2509",
799 "cost_per_1m_in": 2,
800 "cost_per_1m_out": 5,
801 "cost_per_1m_in_cached": 0,
802 "cost_per_1m_out_cached": 0,
803 "context_window": 128000,
804 "default_max_tokens": 12800,
805 "can_reason": true,
806 "reasoning_levels": [
807 "low",
808 "medium",
809 "high"
810 ],
811 "default_reasoning_effort": "medium",
812 "supports_attachments": true
813 },
814 {
815 "id": "hermes-4-70b",
816 "name": "Hermes 4 70B",
817 "cost_per_1m_in": 0.116,
818 "cost_per_1m_out": 0.358,
819 "cost_per_1m_in_cached": 0,
820 "cost_per_1m_out_cached": 0,
821 "context_window": 128000,
822 "default_max_tokens": 12800,
823 "can_reason": false,
824 "supports_attachments": false
825 },
826 {
827 "id": "gpt-5",
828 "name": "GPT 5",
829 "cost_per_1m_in": 1.234,
830 "cost_per_1m_out": 9.838,
831 "cost_per_1m_in_cached": 0,
832 "cost_per_1m_out_cached": 0,
833 "context_window": 400000,
834 "default_max_tokens": 40000,
835 "can_reason": true,
836 "reasoning_levels": [
837 "low",
838 "medium",
839 "high"
840 ],
841 "default_reasoning_effort": "medium",
842 "supports_attachments": true
843 },
844 {
845 "id": "gpt-oss-120b",
846 "name": "GPT Oss 120b",
847 "cost_per_1m_in": 0.035496,
848 "cost_per_1m_out": 0.17748,
849 "cost_per_1m_in_cached": 0,
850 "cost_per_1m_out_cached": 0,
851 "context_window": 131000,
852 "default_max_tokens": 13100,
853 "can_reason": true,
854 "reasoning_levels": [
855 "low",
856 "medium",
857 "high"
858 ],
859 "default_reasoning_effort": "medium",
860 "supports_attachments": false
861 },
862 {
863 "id": "qwen3-30b-a3b-instruct-2507",
864 "name": "Qwen3 30B A3B Instruct 2507",
865 "cost_per_1m_in": 0.089,
866 "cost_per_1m_out": 0.268,
867 "cost_per_1m_in_cached": 0,
868 "cost_per_1m_out_cached": 0,
869 "context_window": 262000,
870 "default_max_tokens": 26200,
871 "can_reason": true,
872 "reasoning_levels": [
873 "low",
874 "medium",
875 "high"
876 ],
877 "default_reasoning_effort": "medium",
878 "supports_attachments": false
879 },
880 {
881 "id": "gpt-oss-20b",
882 "name": "GPT Oss 20b",
883 "cost_per_1m_in": 0.026622,
884 "cost_per_1m_out": 0.124236,
885 "cost_per_1m_in_cached": 0,
886 "cost_per_1m_out_cached": 0,
887 "context_window": 131000,
888 "default_max_tokens": 13100,
889 "can_reason": true,
890 "reasoning_levels": [
891 "low",
892 "medium",
893 "high"
894 ],
895 "default_reasoning_effort": "medium",
896 "supports_attachments": false
897 },
898 {
899 "id": "mistral-7b-instruct-v0.3",
900 "name": "Mistral 7B Instruct v0.3",
901 "cost_per_1m_in": 0.1,
902 "cost_per_1m_out": 0.1,
903 "cost_per_1m_in_cached": 0,
904 "cost_per_1m_out_cached": 0,
905 "context_window": 127000,
906 "default_max_tokens": 12700,
907 "can_reason": false,
908 "supports_attachments": false
909 },
910 {
911 "id": "mistral-small-3.2-24b-instruct-2506",
912 "name": "Mistral Small 3.2 24B Instruct 2506",
913 "cost_per_1m_in": 0.09,
914 "cost_per_1m_out": 0.28,
915 "cost_per_1m_in_cached": 0,
916 "cost_per_1m_out_cached": 0,
917 "context_window": 128000,
918 "default_max_tokens": 12800,
919 "can_reason": false,
920 "supports_attachments": true
921 },
922 {
923 "id": "mistral-large-2402",
924 "name": "Mistral Large 2402",
925 "cost_per_1m_in": 3.846,
926 "cost_per_1m_out": 11.627,
927 "cost_per_1m_in_cached": 0,
928 "cost_per_1m_out_cached": 0,
929 "context_window": 32000,
930 "default_max_tokens": 3200,
931 "can_reason": true,
932 "reasoning_levels": [
933 "low",
934 "medium",
935 "high"
936 ],
937 "default_reasoning_effort": "medium",
938 "supports_attachments": false
939 },
940 {
941 "id": "pixtral-large-2502",
942 "name": "Pixtral Large 25.02",
943 "cost_per_1m_in": 1.789,
944 "cost_per_1m_out": 5.366,
945 "cost_per_1m_in_cached": 0,
946 "cost_per_1m_out_cached": 0,
947 "context_window": 128000,
948 "default_max_tokens": 12800,
949 "can_reason": true,
950 "reasoning_levels": [
951 "low",
952 "medium",
953 "high"
954 ],
955 "default_reasoning_effort": "medium",
956 "supports_attachments": true
957 },
958 {
959 "id": "qwen3-235b-a22b-instruct-2507",
960 "name": "Qwen3 235B A22B Instruct 2507",
961 "cost_per_1m_in": 0.062118,
962 "cost_per_1m_out": 0.408204,
963 "cost_per_1m_in_cached": 0,
964 "cost_per_1m_out_cached": 0,
965 "context_window": 131000,
966 "default_max_tokens": 13100,
967 "can_reason": true,
968 "reasoning_levels": [
969 "low",
970 "medium",
971 "high"
972 ],
973 "default_reasoning_effort": "medium",
974 "supports_attachments": false
975 },
976 {
977 "id": "qwen3-coder-30b-a3b-instruct",
978 "name": "Qwen3 Coder 30b a3b Instruct",
979 "cost_per_1m_in": 0.053244,
980 "cost_per_1m_out": 0.22185,
981 "cost_per_1m_in_cached": 0,
982 "cost_per_1m_out_cached": 0,
983 "context_window": 262000,
984 "default_max_tokens": 26200,
985 "can_reason": true,
986 "reasoning_levels": [
987 "low",
988 "medium",
989 "high"
990 ],
991 "default_reasoning_effort": "medium",
992 "supports_attachments": false
993 },
994 {
995 "id": "qwen3-32b",
996 "name": "Qwen3 32B",
997 "cost_per_1m_in": 0.089,
998 "cost_per_1m_out": 0.268,
999 "cost_per_1m_in_cached": 0,
1000 "cost_per_1m_out_cached": 0,
1001 "context_window": 40000,
1002 "default_max_tokens": 4000,
1003 "can_reason": true,
1004 "reasoning_levels": [
1005 "low",
1006 "medium",
1007 "high"
1008 ],
1009 "default_reasoning_effort": "medium",
1010 "supports_attachments": false
1011 },
1012 {
1013 "id": "nova-lite-v1",
1014 "name": "Nova Lite 1.0",
1015 "cost_per_1m_in": 0.062,
1016 "cost_per_1m_out": 0.247,
1017 "cost_per_1m_in_cached": 0,
1018 "cost_per_1m_out_cached": 0,
1019 "context_window": 300000,
1020 "default_max_tokens": 30000,
1021 "can_reason": true,
1022 "reasoning_levels": [
1023 "low",
1024 "medium",
1025 "high"
1026 ],
1027 "default_reasoning_effort": "medium",
1028 "supports_attachments": true
1029 },
1030 {
1031 "id": "claude-sonnet-4",
1032 "name": "Claude Sonnet 4",
1033 "cost_per_1m_in": 2.601,
1034 "cost_per_1m_out": 13.01,
1035 "cost_per_1m_in_cached": 0,
1036 "cost_per_1m_out_cached": 0,
1037 "context_window": 200000,
1038 "default_max_tokens": 20000,
1039 "can_reason": true,
1040 "reasoning_levels": [
1041 "low",
1042 "medium",
1043 "high"
1044 ],
1045 "default_reasoning_effort": "medium",
1046 "supports_attachments": true
1047 },
1048 {
1049 "id": "gpt-4.1-mini",
1050 "name": "GPT 4.1 mini",
1051 "cost_per_1m_in": 0.39,
1052 "cost_per_1m_out": 1.53,
1053 "cost_per_1m_in_cached": 0,
1054 "cost_per_1m_out_cached": 0,
1055 "context_window": 1047576,
1056 "default_max_tokens": 104757,
1057 "can_reason": true,
1058 "reasoning_levels": [
1059 "low",
1060 "medium",
1061 "high"
1062 ],
1063 "default_reasoning_effort": "medium",
1064 "supports_attachments": true
1065 },
1066 {
1067 "id": "gpt-4.1-nano",
1068 "name": "GPT 4.1 nano",
1069 "cost_per_1m_in": 0.1,
1070 "cost_per_1m_out": 0.39,
1071 "cost_per_1m_in_cached": 0,
1072 "cost_per_1m_out_cached": 0,
1073 "context_window": 1047576,
1074 "default_max_tokens": 104757,
1075 "can_reason": true,
1076 "reasoning_levels": [
1077 "low",
1078 "medium",
1079 "high"
1080 ],
1081 "default_reasoning_effort": "medium",
1082 "supports_attachments": true
1083 },
1084 {
1085 "id": "nova-micro-v1",
1086 "name": "Nova Micro 1.0",
1087 "cost_per_1m_in": 0.036,
1088 "cost_per_1m_out": 0.143,
1089 "cost_per_1m_in_cached": 0,
1090 "cost_per_1m_out_cached": 0,
1091 "context_window": 128000,
1092 "default_max_tokens": 12800,
1093 "can_reason": true,
1094 "reasoning_levels": [
1095 "low",
1096 "medium",
1097 "high"
1098 ],
1099 "default_reasoning_effort": "medium",
1100 "supports_attachments": true
1101 },
1102 {
1103 "id": "gpt-4.1",
1104 "name": "GPT 4.1",
1105 "cost_per_1m_in": 1.968,
1106 "cost_per_1m_out": 7.872,
1107 "cost_per_1m_in_cached": 0,
1108 "cost_per_1m_out_cached": 0,
1109 "context_window": 1047576,
1110 "default_max_tokens": 104757,
1111 "can_reason": true,
1112 "reasoning_levels": [
1113 "low",
1114 "medium",
1115 "high"
1116 ],
1117 "default_reasoning_effort": "medium",
1118 "supports_attachments": true
1119 },
1120 {
1121 "id": "nova-pro-v1",
1122 "name": "Nova Pro 1.0",
1123 "cost_per_1m_in": 0.824,
1124 "cost_per_1m_out": 3.295,
1125 "cost_per_1m_in_cached": 0,
1126 "cost_per_1m_out_cached": 0,
1127 "context_window": 300000,
1128 "default_max_tokens": 30000,
1129 "can_reason": true,
1130 "reasoning_levels": [
1131 "low",
1132 "medium",
1133 "high"
1134 ],
1135 "default_reasoning_effort": "medium",
1136 "supports_attachments": true
1137 },
1138 {
1139 "id": "llama-3.1-nemotron-ultra-253b-v1",
1140 "name": "Llama 3.1 Nemotron Ultra 253B v1",
1141 "cost_per_1m_in": 0.537,
1142 "cost_per_1m_out": 1.61,
1143 "cost_per_1m_in_cached": 0,
1144 "cost_per_1m_out_cached": 0,
1145 "context_window": 128000,
1146 "default_max_tokens": 12800,
1147 "can_reason": true,
1148 "reasoning_levels": [
1149 "low",
1150 "medium",
1151 "high"
1152 ],
1153 "default_reasoning_effort": "medium",
1154 "supports_attachments": false
1155 },
1156 {
1157 "id": "llama-4-maverick",
1158 "name": "Llama 4 Maverick",
1159 "cost_per_1m_in": 0.124236,
1160 "cost_per_1m_out": 0.602832,
1161 "cost_per_1m_in_cached": 0,
1162 "cost_per_1m_out_cached": 0,
1163 "context_window": 1050000,
1164 "default_max_tokens": 105000,
1165 "can_reason": false,
1166 "supports_attachments": false
1167 },
1168 {
1169 "id": "deepseek-v3-0324",
1170 "name": "DeepSeek V3 0324",
1171 "cost_per_1m_in": 0.26622,
1172 "cost_per_1m_out": 0.8874,
1173 "cost_per_1m_in_cached": 0,
1174 "cost_per_1m_out_cached": 0,
1175 "context_window": 163840,
1176 "default_max_tokens": 16384,
1177 "can_reason": true,
1178 "reasoning_levels": [
1179 "low",
1180 "medium",
1181 "high"
1182 ],
1183 "default_reasoning_effort": "medium",
1184 "supports_attachments": false
1185 },
1186 {
1187 "id": "mistral-small-2503",
1188 "name": "Mistral Small 2503",
1189 "cost_per_1m_in": 0.1,
1190 "cost_per_1m_out": 0.3,
1191 "cost_per_1m_in_cached": 0,
1192 "cost_per_1m_out_cached": 0,
1193 "context_window": 128000,
1194 "default_max_tokens": 12800,
1195 "can_reason": false,
1196 "supports_attachments": true
1197 },
1198 {
1199 "id": "mistral-small-2506",
1200 "name": "Mistral Small 2506",
1201 "cost_per_1m_in": 0.1,
1202 "cost_per_1m_out": 0.3,
1203 "cost_per_1m_in_cached": 0,
1204 "cost_per_1m_out_cached": 0,
1205 "context_window": 131072,
1206 "default_max_tokens": 13107,
1207 "can_reason": false,
1208 "supports_attachments": true
1209 },
1210 {
1211 "id": "gemini-2.0-flash-001",
1212 "name": "Gemini 2.0 Flash",
1213 "cost_per_1m_in": 0.13416,
1214 "cost_per_1m_out": 0.53664,
1215 "cost_per_1m_in_cached": 0,
1216 "cost_per_1m_out_cached": 0,
1217 "context_window": 1048576,
1218 "default_max_tokens": 104857,
1219 "can_reason": false,
1220 "supports_attachments": true
1221 },
1222 {
1223 "id": "gemini-2.0-flash-lite-001",
1224 "name": "Gemini 2.0 Flash Lite",
1225 "cost_per_1m_in": 0.06708,
1226 "cost_per_1m_out": 0.26832,
1227 "cost_per_1m_in_cached": 0,
1228 "cost_per_1m_out_cached": 0,
1229 "context_window": 1048576,
1230 "default_max_tokens": 104857,
1231 "can_reason": false,
1232 "supports_attachments": true
1233 },
1234 {
1235 "id": "gemini-2.5-flash",
1236 "name": "Gemini 2.5 Flash",
1237 "cost_per_1m_in": 0.26832,
1238 "cost_per_1m_out": 2.236,
1239 "cost_per_1m_in_cached": 0,
1240 "cost_per_1m_out_cached": 0,
1241 "context_window": 1048576,
1242 "default_max_tokens": 104857,
1243 "can_reason": true,
1244 "reasoning_levels": [
1245 "low",
1246 "medium",
1247 "high"
1248 ],
1249 "default_reasoning_effort": "medium",
1250 "supports_attachments": true
1251 },
1252 {
1253 "id": "gemini-2.5-pro",
1254 "name": "Gemini 2.5 Pro",
1255 "cost_per_1m_in": 1.3416,
1256 "cost_per_1m_out": 8.944,
1257 "cost_per_1m_in_cached": 0,
1258 "cost_per_1m_out_cached": 0,
1259 "context_window": 1048576,
1260 "default_max_tokens": 104857,
1261 "can_reason": true,
1262 "reasoning_levels": [
1263 "low",
1264 "medium",
1265 "high"
1266 ],
1267 "default_reasoning_effort": "medium",
1268 "supports_attachments": true
1269 },
1270 {
1271 "id": "gemma-3-27b-it",
1272 "name": "Gemma 3 27b it",
1273 "cost_per_1m_in": 0.089,
1274 "cost_per_1m_out": 0.268,
1275 "cost_per_1m_in_cached": 0,
1276 "cost_per_1m_out_cached": 0,
1277 "context_window": 131000,
1278 "default_max_tokens": 13100,
1279 "can_reason": true,
1280 "reasoning_levels": [
1281 "low",
1282 "medium",
1283 "high"
1284 ],
1285 "default_reasoning_effort": "medium",
1286 "supports_attachments": true
1287 },
1288 {
1289 "id": "deepseek-r1-0528",
1290 "name": "DeepSeek R1 0528",
1291 "cost_per_1m_in": 0.585084,
1292 "cost_per_1m_out": 2.30724,
1293 "cost_per_1m_in_cached": 0,
1294 "cost_per_1m_out_cached": 0,
1295 "context_window": 164000,
1296 "default_max_tokens": 16400,
1297 "can_reason": true,
1298 "reasoning_levels": [
1299 "low",
1300 "medium",
1301 "high"
1302 ],
1303 "default_reasoning_effort": "medium",
1304 "supports_attachments": false
1305 },
1306 {
1307 "id": "codestral-2508",
1308 "name": "Codestral 25.08",
1309 "cost_per_1m_in": 0.3,
1310 "cost_per_1m_out": 0.9,
1311 "cost_per_1m_in_cached": 0,
1312 "cost_per_1m_out_cached": 0,
1313 "context_window": 256000,
1314 "default_max_tokens": 25600,
1315 "can_reason": false,
1316 "supports_attachments": false
1317 },
1318 {
1319 "id": "llama-3.3-70b-instruct",
1320 "name": "Llama 3.3 70B Instruct",
1321 "cost_per_1m_in": 0.08874,
1322 "cost_per_1m_out": 0.274994,
1323 "cost_per_1m_in_cached": 0,
1324 "cost_per_1m_out_cached": 0,
1325 "context_window": 131000,
1326 "default_max_tokens": 13100,
1327 "can_reason": true,
1328 "reasoning_levels": [
1329 "low",
1330 "medium",
1331 "high"
1332 ],
1333 "default_reasoning_effort": "medium",
1334 "supports_attachments": false
1335 },
1336 {
1337 "id": "gpt-4o",
1338 "name": "GPT 4o",
1339 "cost_per_1m_in": 2.38664,
1340 "cost_per_1m_out": 9.5466,
1341 "cost_per_1m_in_cached": 0,
1342 "cost_per_1m_out_cached": 0,
1343 "context_window": 128000,
1344 "default_max_tokens": 12800,
1345 "can_reason": true,
1346 "reasoning_levels": [
1347 "low",
1348 "medium",
1349 "high"
1350 ],
1351 "default_reasoning_effort": "medium",
1352 "supports_attachments": true
1353 },
1354 {
1355 "id": "gpt-5-mini",
1356 "name": "GPT 5 mini",
1357 "cost_per_1m_in": 0.25,
1358 "cost_per_1m_out": 1.968,
1359 "cost_per_1m_in_cached": 0,
1360 "cost_per_1m_out_cached": 0,
1361 "context_window": 400000,
1362 "default_max_tokens": 40000,
1363 "can_reason": true,
1364 "reasoning_levels": [
1365 "low",
1366 "medium",
1367 "high"
1368 ],
1369 "default_reasoning_effort": "medium",
1370 "supports_attachments": true
1371 },
1372 {
1373 "id": "gpt-5-nano",
1374 "name": "GPT 5 nano",
1375 "cost_per_1m_in": 0.054,
1376 "cost_per_1m_out": 0.394,
1377 "cost_per_1m_in_cached": 0,
1378 "cost_per_1m_out_cached": 0,
1379 "context_window": 400000,
1380 "default_max_tokens": 40000,
1381 "can_reason": true,
1382 "reasoning_levels": [
1383 "low",
1384 "medium",
1385 "high"
1386 ],
1387 "default_reasoning_effort": "medium",
1388 "supports_attachments": true
1389 },
1390 {
1391 "id": "mistral-large-2411",
1392 "name": "Mistral Large 2411",
1393 "cost_per_1m_in": 1.8,
1394 "cost_per_1m_out": 5.4,
1395 "cost_per_1m_in_cached": 0,
1396 "cost_per_1m_out_cached": 0,
1397 "context_window": 131072,
1398 "default_max_tokens": 13107,
1399 "can_reason": true,
1400 "reasoning_levels": [
1401 "low",
1402 "medium",
1403 "high"
1404 ],
1405 "default_reasoning_effort": "medium",
1406 "supports_attachments": false
1407 },
1408 {
1409 "id": "hermes-4-405b",
1410 "name": "Hermes 4 405B",
1411 "cost_per_1m_in": 0.894,
1412 "cost_per_1m_out": 2.683,
1413 "cost_per_1m_in_cached": 0,
1414 "cost_per_1m_out_cached": 0,
1415 "context_window": 128000,
1416 "default_max_tokens": 12800,
1417 "can_reason": false,
1418 "supports_attachments": false
1419 },
1420 {
1421 "id": "mistral-medium-2508",
1422 "name": "Mistral Medium 2508",
1423 "cost_per_1m_in": 0.4,
1424 "cost_per_1m_out": 2,
1425 "cost_per_1m_in_cached": 0,
1426 "cost_per_1m_out_cached": 0,
1427 "context_window": 128000,
1428 "default_max_tokens": 12800,
1429 "can_reason": true,
1430 "reasoning_levels": [
1431 "low",
1432 "medium",
1433 "high"
1434 ],
1435 "default_reasoning_effort": "medium",
1436 "supports_attachments": true
1437 },
1438 {
1439 "id": "devstral-medium-2507",
1440 "name": "Devstral Medium 2507",
1441 "cost_per_1m_in": 0.4,
1442 "cost_per_1m_out": 2,
1443 "cost_per_1m_in_cached": 0,
1444 "cost_per_1m_out_cached": 0,
1445 "context_window": 131072,
1446 "default_max_tokens": 13107,
1447 "can_reason": false,
1448 "supports_attachments": false
1449 },
1450 {
1451 "id": "mistral-nemo-instruct-2407",
1452 "name": "Mistral Nemo 2407",
1453 "cost_per_1m_in": 0.13,
1454 "cost_per_1m_out": 0.13,
1455 "cost_per_1m_in_cached": 0,
1456 "cost_per_1m_out_cached": 0,
1457 "context_window": 131072,
1458 "default_max_tokens": 13107,
1459 "can_reason": false,
1460 "supports_attachments": false
1461 },
1462 {
1463 "id": "devstral-small-2507",
1464 "name": "Devstral Small 2507",
1465 "cost_per_1m_in": 0.1,
1466 "cost_per_1m_out": 0.3,
1467 "cost_per_1m_in_cached": 0,
1468 "cost_per_1m_out_cached": 0,
1469 "context_window": 131072,
1470 "default_max_tokens": 13107,
1471 "can_reason": false,
1472 "supports_attachments": false
1473 },
1474 {
1475 "id": "llama-3.1-405b-instruct",
1476 "name": "Llama 3.1 405B Instruct",
1477 "cost_per_1m_in": 1.75,
1478 "cost_per_1m_out": 1.75,
1479 "cost_per_1m_in_cached": 0,
1480 "cost_per_1m_out_cached": 0,
1481 "context_window": 128000,
1482 "default_max_tokens": 12800,
1483 "can_reason": true,
1484 "reasoning_levels": [
1485 "low",
1486 "medium",
1487 "high"
1488 ],
1489 "default_reasoning_effort": "medium",
1490 "supports_attachments": false
1491 },
1492 {
1493 "id": "gpt-4o-mini",
1494 "name": "GPT 4o mini",
1495 "cost_per_1m_in": 0.1432,
1496 "cost_per_1m_out": 0.5728,
1497 "cost_per_1m_in_cached": 0,
1498 "cost_per_1m_out_cached": 0,
1499 "context_window": 128000,
1500 "default_max_tokens": 12800,
1501 "can_reason": true,
1502 "reasoning_levels": [
1503 "low",
1504 "medium",
1505 "high"
1506 ],
1507 "default_reasoning_effort": "medium",
1508 "supports_attachments": true
1509 },
1510 {
1511 "id": "llama-3.1-8b-instruct",
1512 "name": "Llama 3.1 8B Instruct",
1513 "cost_per_1m_in": 0.018,
1514 "cost_per_1m_out": 0.054,
1515 "cost_per_1m_in_cached": 0,
1516 "cost_per_1m_out_cached": 0,
1517 "context_window": 128000,
1518 "default_max_tokens": 12800,
1519 "can_reason": true,
1520 "reasoning_levels": [
1521 "low",
1522 "medium",
1523 "high"
1524 ],
1525 "default_reasoning_effort": "medium",
1526 "supports_attachments": false
1527 }
1528 ]
1529}