cortecs.json

   1{
   2  "name": "Cortecs",
   3  "id": "cortecs",
   4  "api_key": "$CORTECS_API_KEY",
   5  "api_endpoint": "https://api.cortecs.ai/v1",
   6  "type": "openai",
   7  "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
   8  "default_small_model_id": "glm-4.7-flash",
   9  "models": [
  10    {
  11      "id": "nvidia-nemotron-3-nano-omni",
  12      "name": "Nemotron 3 Nano Omni",
  13      "cost_per_1m_in": 0.0532447,
  14      "cost_per_1m_out": 0.212976,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 300000,
  18      "default_max_tokens": 30000,
  19      "can_reason": true,
  20      "reasoning_levels": [
  21        "low",
  22        "medium",
  23        "high"
  24      ],
  25      "default_reasoning_effort": "medium",
  26      "supports_attachments": false
  27    },
  28    {
  29      "id": "gpt-5.4",
  30      "name": "GPT 5.4",
  31      "cost_per_1m_in": 2.601,
  32      "cost_per_1m_out": 13.872,
  33      "cost_per_1m_in_cached": 0,
  34      "cost_per_1m_out_cached": 0,
  35      "context_window": 1050000,
  36      "default_max_tokens": 105000,
  37      "can_reason": true,
  38      "reasoning_levels": [
  39        "low",
  40        "medium",
  41        "high"
  42      ],
  43      "default_reasoning_effort": "medium",
  44      "supports_attachments": true
  45    },
  46    {
  47      "id": "kimi-k2.6",
  48      "name": "Kimi K2.6",
  49      "cost_per_1m_in": 0.6936,
  50      "cost_per_1m_out": 3.0345,
  51      "cost_per_1m_in_cached": 0,
  52      "cost_per_1m_out_cached": 0,
  53      "context_window": 256000,
  54      "default_max_tokens": 25600,
  55      "can_reason": true,
  56      "reasoning_levels": [
  57        "low",
  58        "medium",
  59        "high"
  60      ],
  61      "default_reasoning_effort": "medium",
  62      "supports_attachments": true
  63    },
  64    {
  65      "id": "claude-opus4-7",
  66      "name": "Claude Opus 4.7",
  67      "cost_per_1m_in": 4.7685,
  68      "cost_per_1m_out": 23.8425,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0,
  71      "context_window": 1000000,
  72      "default_max_tokens": 100000,
  73      "can_reason": true,
  74      "reasoning_levels": [
  75        "low",
  76        "medium",
  77        "high"
  78      ],
  79      "default_reasoning_effort": "medium",
  80      "supports_attachments": true
  81    },
  82    {
  83      "id": "minimax-m2.7",
  84      "name": "MiniMax M2.7",
  85      "cost_per_1m_in": 0.26622,
  86      "cost_per_1m_out": 1.06488,
  87      "cost_per_1m_in_cached": 0,
  88      "cost_per_1m_out_cached": 0,
  89      "context_window": 196608,
  90      "default_max_tokens": 19660,
  91      "can_reason": true,
  92      "reasoning_levels": [
  93        "low",
  94        "medium",
  95        "high"
  96      ],
  97      "default_reasoning_effort": "medium",
  98      "supports_attachments": false
  99    },
 100    {
 101      "id": "glm-5.1",
 102      "name": "GLM 5.1",
 103      "cost_per_1m_in": 1.24236,
 104      "cost_per_1m_out": 3.90336,
 105      "cost_per_1m_in_cached": 0,
 106      "cost_per_1m_out_cached": 0,
 107      "context_window": 202752,
 108      "default_max_tokens": 20275,
 109      "can_reason": true,
 110      "reasoning_levels": [
 111        "low",
 112        "medium",
 113        "high"
 114      ],
 115      "default_reasoning_effort": "medium",
 116      "supports_attachments": false
 117    },
 118    {
 119      "id": "qwen3.5-122b-a10b",
 120      "name": "Qwen3.5 122B A10B",
 121      "cost_per_1m_in": 0.4437,
 122      "cost_per_1m_out": 3.1059,
 123      "cost_per_1m_in_cached": 0,
 124      "cost_per_1m_out_cached": 0,
 125      "context_window": 262144,
 126      "default_max_tokens": 26214,
 127      "can_reason": true,
 128      "reasoning_levels": [
 129        "low",
 130        "medium",
 131        "high"
 132      ],
 133      "default_reasoning_effort": "medium",
 134      "supports_attachments": false
 135    },
 136    {
 137      "id": "qwen3.5-9b",
 138      "name": "Qwen3.5 9B",
 139      "cost_per_1m_in": 0.1,
 140      "cost_per_1m_out": 0.15,
 141      "cost_per_1m_in_cached": 0,
 142      "cost_per_1m_out_cached": 0,
 143      "context_window": 262000,
 144      "default_max_tokens": 26200,
 145      "can_reason": true,
 146      "reasoning_levels": [
 147        "low",
 148        "medium",
 149        "high"
 150      ],
 151      "default_reasoning_effort": "medium",
 152      "supports_attachments": false
 153    },
 154    {
 155      "id": "nemotron-3-super-120b-a12b",
 156      "name": "Nemotron 3 Super 120B A12B",
 157      "cost_per_1m_in": 0.15606,
 158      "cost_per_1m_out": 0.67626,
 159      "cost_per_1m_in_cached": 0,
 160      "cost_per_1m_out_cached": 0,
 161      "context_window": 262000,
 162      "default_max_tokens": 26214,
 163      "can_reason": true,
 164      "reasoning_levels": [
 165        "low",
 166        "medium",
 167        "high"
 168      ],
 169      "default_reasoning_effort": "medium",
 170      "supports_attachments": false
 171    },
 172    {
 173      "id": "qwen3-coder-next",
 174      "name": "Qwen3 Coder Next",
 175      "cost_per_1m_in": 0.15,
 176      "cost_per_1m_out": 0.8,
 177      "cost_per_1m_in_cached": 0,
 178      "cost_per_1m_out_cached": 0,
 179      "context_window": 256000,
 180      "default_max_tokens": 25600,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "low",
 184        "medium",
 185        "high"
 186      ],
 187      "default_reasoning_effort": "medium",
 188      "supports_attachments": false
 189    },
 190    {
 191      "id": "glm-5",
 192      "name": "GLM 5",
 193      "cost_per_1m_in": 0.8874,
 194      "cost_per_1m_out": 2.83968,
 195      "cost_per_1m_in_cached": 0,
 196      "cost_per_1m_out_cached": 0,
 197      "context_window": 202752,
 198      "default_max_tokens": 20275,
 199      "can_reason": true,
 200      "reasoning_levels": [
 201        "low",
 202        "medium",
 203        "high"
 204      ],
 205      "default_reasoning_effort": "medium",
 206      "supports_attachments": false
 207    },
 208    {
 209      "id": "glm-4.6",
 210      "name": "GLM 4.6",
 211      "cost_per_1m_in": 0.35496,
 212      "cost_per_1m_out": 1.55295,
 213      "cost_per_1m_in_cached": 0,
 214      "cost_per_1m_out_cached": 0,
 215      "context_window": 203000,
 216      "default_max_tokens": 20300,
 217      "can_reason": true,
 218      "reasoning_levels": [
 219        "low",
 220        "medium",
 221        "high"
 222      ],
 223      "default_reasoning_effort": "medium",
 224      "supports_attachments": false
 225    },
 226    {
 227      "id": "deepseek-chat-v3.1",
 228      "name": "DeepSeek Chat V3.1",
 229      "cost_per_1m_in": 0.17748,
 230      "cost_per_1m_out": 0.70992,
 231      "cost_per_1m_in_cached": 0,
 232      "cost_per_1m_out_cached": 0,
 233      "context_window": 164000,
 234      "default_max_tokens": 16400,
 235      "can_reason": true,
 236      "reasoning_levels": [
 237        "low",
 238        "medium",
 239        "high"
 240      ],
 241      "default_reasoning_effort": "medium",
 242      "supports_attachments": false
 243    },
 244    {
 245      "id": "qwen-2.5-72b-instruct",
 246      "name": "Qwen2.5 72B Instruct",
 247      "cost_per_1m_in": 0.062118,
 248      "cost_per_1m_out": 0.230724,
 249      "cost_per_1m_in_cached": 0,
 250      "cost_per_1m_out_cached": 0,
 251      "context_window": 33000,
 252      "default_max_tokens": 3300,
 253      "can_reason": false,
 254      "supports_attachments": false
 255    },
 256    {
 257      "id": "qwen3.5-397b-a17b",
 258      "name": "Qwen3.5 397B A17B ",
 259      "cost_per_1m_in": 0.53244,
 260      "cost_per_1m_out": 3.19464,
 261      "cost_per_1m_in_cached": 0,
 262      "cost_per_1m_out_cached": 0,
 263      "context_window": 262000,
 264      "default_max_tokens": 25000,
 265      "can_reason": true,
 266      "reasoning_levels": [
 267        "low",
 268        "medium",
 269        "high"
 270      ],
 271      "default_reasoning_effort": "medium",
 272      "supports_attachments": false
 273    },
 274    {
 275      "id": "deepseek-v3.2",
 276      "name": "DeepSeek V3.2",
 277      "cost_per_1m_in": 0.26622,
 278      "cost_per_1m_out": 0.4437,
 279      "cost_per_1m_in_cached": 0,
 280      "cost_per_1m_out_cached": 0,
 281      "context_window": 163840,
 282      "default_max_tokens": 16384,
 283      "can_reason": true,
 284      "reasoning_levels": [
 285        "low",
 286        "medium",
 287        "high"
 288      ],
 289      "default_reasoning_effort": "medium",
 290      "supports_attachments": false
 291    },
 292    {
 293      "id": "mistral-small-2603",
 294      "name": "Mistral Small 4 2603",
 295      "cost_per_1m_in": 0.1275,
 296      "cost_per_1m_out": 0.51,
 297      "cost_per_1m_in_cached": 0,
 298      "cost_per_1m_out_cached": 0,
 299      "context_window": 256000,
 300      "default_max_tokens": 25600,
 301      "can_reason": true,
 302      "reasoning_levels": [
 303        "low",
 304        "medium",
 305        "high"
 306      ],
 307      "default_reasoning_effort": "medium",
 308      "supports_attachments": true
 309    },
 310    {
 311      "id": "minimax-m2.5",
 312      "name": "MiniMax M2.5",
 313      "cost_per_1m_in": 0.26622,
 314      "cost_per_1m_out": 0.97614,
 315      "cost_per_1m_in_cached": 0,
 316      "cost_per_1m_out_cached": 0,
 317      "context_window": 196608,
 318      "default_max_tokens": 19660,
 319      "can_reason": true,
 320      "reasoning_levels": [
 321        "low",
 322        "medium",
 323        "high"
 324      ],
 325      "default_reasoning_effort": "medium",
 326      "supports_attachments": false
 327    },
 328    {
 329      "id": "claude-4-6-sonnet",
 330      "name": "Claude Sonnet 4.6",
 331      "cost_per_1m_in": 2.8691,
 332      "cost_per_1m_out": 14.3095,
 333      "cost_per_1m_in_cached": 0,
 334      "cost_per_1m_out_cached": 0,
 335      "context_window": 1000000,
 336      "default_max_tokens": 100000,
 337      "can_reason": true,
 338      "reasoning_levels": [
 339        "low",
 340        "medium",
 341        "high"
 342      ],
 343      "default_reasoning_effort": "medium",
 344      "supports_attachments": true
 345    },
 346    {
 347      "id": "glm-4.7-flash",
 348      "name": "GLM 4.7 Flash",
 349      "cost_per_1m_in": 0.0716,
 350      "cost_per_1m_out": 0.4293,
 351      "cost_per_1m_in_cached": 0,
 352      "cost_per_1m_out_cached": 0,
 353      "context_window": 203000,
 354      "default_max_tokens": 20300,
 355      "can_reason": false,
 356      "supports_attachments": false
 357    },
 358    {
 359      "id": "kimi-k2.5",
 360      "name": "Kimi K2.5",
 361      "cost_per_1m_in": 0.4437,
 362      "cost_per_1m_out": 2.12976,
 363      "cost_per_1m_in_cached": 0,
 364      "cost_per_1m_out_cached": 0,
 365      "context_window": 256000,
 366      "default_max_tokens": 25600,
 367      "can_reason": true,
 368      "reasoning_levels": [
 369        "low",
 370        "medium",
 371        "high"
 372      ],
 373      "default_reasoning_effort": "medium",
 374      "supports_attachments": true
 375    },
 376    {
 377      "id": "claude-opus4-6",
 378      "name": "Claude Opus 4.6",
 379      "cost_per_1m_in": 4.7685,
 380      "cost_per_1m_out": 23.8425,
 381      "cost_per_1m_in_cached": 0,
 382      "cost_per_1m_out_cached": 0,
 383      "context_window": 1000000,
 384      "default_max_tokens": 100000,
 385      "can_reason": true,
 386      "reasoning_levels": [
 387        "low",
 388        "medium",
 389        "high"
 390      ],
 391      "default_reasoning_effort": "medium",
 392      "supports_attachments": true
 393    },
 394    {
 395      "id": "minimax-m2",
 396      "name": "MiniMax M2",
 397      "cost_per_1m_in": 0.22185,
 398      "cost_per_1m_out": 0.8874,
 399      "cost_per_1m_in_cached": 0,
 400      "cost_per_1m_out_cached": 0,
 401      "context_window": 196608,
 402      "default_max_tokens": 19660,
 403      "can_reason": true,
 404      "reasoning_levels": [
 405        "low",
 406        "medium",
 407        "high"
 408      ],
 409      "default_reasoning_effort": "medium",
 410      "supports_attachments": false
 411    },
 412    {
 413      "id": "glm-4.7",
 414      "name": "GLM 4.7",
 415      "cost_per_1m_in": 0.53244,
 416      "cost_per_1m_out": 1.95228,
 417      "cost_per_1m_in_cached": 0,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 202752,
 420      "default_max_tokens": 20275,
 421      "can_reason": true,
 422      "reasoning_levels": [
 423        "low",
 424        "medium",
 425        "high"
 426      ],
 427      "default_reasoning_effort": "medium",
 428      "supports_attachments": false
 429    },
 430    {
 431      "id": "minimax-m2.1",
 432      "name": "MiniMax M2.1",
 433      "cost_per_1m_in": 0.322,
 434      "cost_per_1m_out": 1.2879,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0,
 437      "context_window": 196000,
 438      "default_max_tokens": 19600,
 439      "can_reason": true,
 440      "reasoning_levels": [
 441        "low",
 442        "medium",
 443        "high"
 444      ],
 445      "default_reasoning_effort": "medium",
 446      "supports_attachments": false
 447    },
 448    {
 449      "id": "qwen3-vl-235b-a22b",
 450      "name": "Qwen3 VL 235B A22B",
 451      "cost_per_1m_in": 0.186354,
 452      "cost_per_1m_out": 1.68606,
 453      "cost_per_1m_in_cached": 0,
 454      "cost_per_1m_out_cached": 0,
 455      "context_window": 131000,
 456      "default_max_tokens": 13100,
 457      "can_reason": true,
 458      "reasoning_levels": [
 459        "low",
 460        "medium",
 461        "high"
 462      ],
 463      "default_reasoning_effort": "medium",
 464      "supports_attachments": true
 465    },
 466    {
 467      "id": "mistral-small-creative",
 468      "name": "Mistral Small Creative",
 469      "cost_per_1m_in": 0.1,
 470      "cost_per_1m_out": 0.3,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 32000,
 474      "default_max_tokens": 3200,
 475      "can_reason": false,
 476      "supports_attachments": false
 477    },
 478    {
 479      "id": "nvidia-nemotron-3-nano-30b-a3b",
 480      "name": "Nemotron 3 Nano 30B A3B",
 481      "cost_per_1m_in": 0.0537,
 482      "cost_per_1m_out": 0.215,
 483      "cost_per_1m_in_cached": 0,
 484      "cost_per_1m_out_cached": 0,
 485      "context_window": 128000,
 486      "default_max_tokens": 12800,
 487      "can_reason": true,
 488      "reasoning_levels": [
 489        "low",
 490        "medium",
 491        "high"
 492      ],
 493      "default_reasoning_effort": "medium",
 494      "supports_attachments": false
 495    },
 496    {
 497      "id": "claude-opus4-5",
 498      "name": "Claude Opus 4.5",
 499      "cost_per_1m_in": 4.7695,
 500      "cost_per_1m_out": 23.8485,
 501      "cost_per_1m_in_cached": 0,
 502      "cost_per_1m_out_cached": 0,
 503      "context_window": 200000,
 504      "default_max_tokens": 20000,
 505      "can_reason": true,
 506      "reasoning_levels": [
 507        "low",
 508        "medium",
 509        "high"
 510      ],
 511      "default_reasoning_effort": "medium",
 512      "supports_attachments": true
 513    },
 514    {
 515      "id": "qwen3-next-80b-a3b-thinking",
 516      "name": "Qwen3 Next 80B A3B Thinking",
 517      "cost_per_1m_in": 0.13311,
 518      "cost_per_1m_out": 1.06488,
 519      "cost_per_1m_in_cached": 0,
 520      "cost_per_1m_out_cached": 0,
 521      "context_window": 262000,
 522      "default_max_tokens": 12800,
 523      "can_reason": true,
 524      "reasoning_levels": [
 525        "low",
 526        "medium",
 527        "high"
 528      ],
 529      "default_reasoning_effort": "medium",
 530      "supports_attachments": false
 531    },
 532    {
 533      "id": "holo2-30b-a3b",
 534      "name": "Holo2 30B A3B",
 535      "cost_per_1m_in": 0.3,
 536      "cost_per_1m_out": 0.7,
 537      "cost_per_1m_in_cached": 0,
 538      "cost_per_1m_out_cached": 0,
 539      "context_window": 22000,
 540      "default_max_tokens": 2200,
 541      "can_reason": true,
 542      "reasoning_levels": [
 543        "low",
 544        "medium",
 545        "high"
 546      ],
 547      "default_reasoning_effort": "medium",
 548      "supports_attachments": true
 549    },
 550    {
 551      "id": "devstral-2512",
 552      "name": "Devstral 2 2512",
 553      "cost_per_1m_in": 0.4,
 554      "cost_per_1m_out": 2,
 555      "cost_per_1m_in_cached": 0,
 556      "cost_per_1m_out_cached": 0,
 557      "context_window": 262000,
 558      "default_max_tokens": 26200,
 559      "can_reason": false,
 560      "supports_attachments": false
 561    },
 562    {
 563      "id": "nova-2-lite",
 564      "name": "Nova 2 Lite",
 565      "cost_per_1m_in": 0.335,
 566      "cost_per_1m_out": 2.822,
 567      "cost_per_1m_in_cached": 0,
 568      "cost_per_1m_out_cached": 0,
 569      "context_window": 1000000,
 570      "default_max_tokens": 100000,
 571      "can_reason": true,
 572      "reasoning_levels": [
 573        "low",
 574        "medium",
 575        "high"
 576      ],
 577      "default_reasoning_effort": "medium",
 578      "supports_attachments": true
 579    },
 580    {
 581      "id": "gpt-oss-safeguard-120b",
 582      "name": "GPT OSS Safeguard 120B",
 583      "cost_per_1m_in": 0.161,
 584      "cost_per_1m_out": 0.626,
 585      "cost_per_1m_in_cached": 0,
 586      "cost_per_1m_out_cached": 0,
 587      "context_window": 128000,
 588      "default_max_tokens": 12800,
 589      "can_reason": true,
 590      "reasoning_levels": [
 591        "low",
 592        "medium",
 593        "high"
 594      ],
 595      "default_reasoning_effort": "medium",
 596      "supports_attachments": false
 597    },
 598    {
 599      "id": "mistral-large-2512",
 600      "name": "Mistral Large 3 2512",
 601      "cost_per_1m_in": 0.5,
 602      "cost_per_1m_out": 1.5,
 603      "cost_per_1m_in_cached": 0,
 604      "cost_per_1m_out_cached": 0,
 605      "context_window": 256000,
 606      "default_max_tokens": 25600,
 607      "can_reason": false,
 608      "supports_attachments": true
 609    },
 610    {
 611      "id": "ministral-8b-2512",
 612      "name": "Ministral 3 8b 2512",
 613      "cost_per_1m_in": 0.15,
 614      "cost_per_1m_out": 0.15,
 615      "cost_per_1m_in_cached": 0,
 616      "cost_per_1m_out_cached": 0,
 617      "context_window": 256000,
 618      "default_max_tokens": 25600,
 619      "can_reason": false,
 620      "supports_attachments": true
 621    },
 622    {
 623      "id": "ministral-3b-2512",
 624      "name": "Ministral 3 3b 2512",
 625      "cost_per_1m_in": 0.1,
 626      "cost_per_1m_out": 0.1,
 627      "cost_per_1m_in_cached": 0,
 628      "cost_per_1m_out_cached": 0,
 629      "context_window": 256000,
 630      "default_max_tokens": 25600,
 631      "can_reason": false,
 632      "supports_attachments": true
 633    },
 634    {
 635      "id": "ministral-14b-2512",
 636      "name": "Ministral 3 14b 2512",
 637      "cost_per_1m_in": 0.2,
 638      "cost_per_1m_out": 0.2,
 639      "cost_per_1m_in_cached": 0,
 640      "cost_per_1m_out_cached": 0,
 641      "context_window": 256000,
 642      "default_max_tokens": 25600,
 643      "can_reason": false,
 644      "supports_attachments": true
 645    },
 646    {
 647      "id": "intellect-3",
 648      "name": "INTELLECT-3",
 649      "cost_per_1m_in": 0.179,
 650      "cost_per_1m_out": 0.984,
 651      "cost_per_1m_in_cached": 0,
 652      "cost_per_1m_out_cached": 0,
 653      "context_window": 128000,
 654      "default_max_tokens": 12800,
 655      "can_reason": true,
 656      "reasoning_levels": [
 657        "low",
 658        "medium",
 659        "high"
 660      ],
 661      "default_reasoning_effort": "medium",
 662      "supports_attachments": false
 663    },
 664    {
 665      "id": "gpt-5.1",
 666      "name": "GPT 5.1",
 667      "cost_per_1m_in": 1.234,
 668      "cost_per_1m_out": 9.838,
 669      "cost_per_1m_in_cached": 0,
 670      "cost_per_1m_out_cached": 0,
 671      "context_window": 400000,
 672      "default_max_tokens": 40000,
 673      "can_reason": true,
 674      "reasoning_levels": [
 675        "low",
 676        "medium",
 677        "high"
 678      ],
 679      "default_reasoning_effort": "medium",
 680      "supports_attachments": true
 681    },
 682    {
 683      "id": "nemotron-nano-v2-12b",
 684      "name": "Nemotron Nano V2 12b",
 685      "cost_per_1m_in": 0.215,
 686      "cost_per_1m_out": 0.635,
 687      "cost_per_1m_in_cached": 0,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 128000,
 690      "default_max_tokens": 12800,
 691      "can_reason": true,
 692      "reasoning_levels": [
 693        "low",
 694        "medium",
 695        "high"
 696      ],
 697      "default_reasoning_effort": "medium",
 698      "supports_attachments": true
 699    },
 700    {
 701      "id": "claude-haiku-4-5",
 702      "name": "Claude Haiku 4.5",
 703      "cost_per_1m_in": 0.894,
 704      "cost_per_1m_out": 4.472,
 705      "cost_per_1m_in_cached": 0,
 706      "cost_per_1m_out_cached": 0,
 707      "context_window": 200000,
 708      "default_max_tokens": 20000,
 709      "can_reason": true,
 710      "reasoning_levels": [
 711        "low",
 712        "medium",
 713        "high"
 714      ],
 715      "default_reasoning_effort": "medium",
 716      "supports_attachments": true
 717    },
 718    {
 719      "id": "claude-4-5-sonnet",
 720      "name": "Claude 4.5 Sonnet",
 721      "cost_per_1m_in": 2.683,
 722      "cost_per_1m_out": 13.416,
 723      "cost_per_1m_in_cached": 0,
 724      "cost_per_1m_out_cached": 0,
 725      "context_window": 200000,
 726      "default_max_tokens": 20000,
 727      "can_reason": true,
 728      "reasoning_levels": [
 729        "low",
 730        "medium",
 731        "high"
 732      ],
 733      "default_reasoning_effort": "medium",
 734      "supports_attachments": true
 735    },
 736    {
 737      "id": "magistral-small-2509",
 738      "name": "Magistral Small 2509",
 739      "cost_per_1m_in": 0.5,
 740      "cost_per_1m_out": 1.5,
 741      "cost_per_1m_in_cached": 0,
 742      "cost_per_1m_out_cached": 0,
 743      "context_window": 128000,
 744      "default_max_tokens": 12800,
 745      "can_reason": true,
 746      "reasoning_levels": [
 747        "low",
 748        "medium",
 749        "high"
 750      ],
 751      "default_reasoning_effort": "medium",
 752      "supports_attachments": true
 753    },
 754    {
 755      "id": "magistral-medium-2509",
 756      "name": "Magistral Medium 2509",
 757      "cost_per_1m_in": 2,
 758      "cost_per_1m_out": 5,
 759      "cost_per_1m_in_cached": 0,
 760      "cost_per_1m_out_cached": 0,
 761      "context_window": 128000,
 762      "default_max_tokens": 12800,
 763      "can_reason": true,
 764      "reasoning_levels": [
 765        "low",
 766        "medium",
 767        "high"
 768      ],
 769      "default_reasoning_effort": "medium",
 770      "supports_attachments": true
 771    },
 772    {
 773      "id": "hermes-4-70b",
 774      "name": "Hermes 4 70B",
 775      "cost_per_1m_in": 0.116,
 776      "cost_per_1m_out": 0.358,
 777      "cost_per_1m_in_cached": 0,
 778      "cost_per_1m_out_cached": 0,
 779      "context_window": 128000,
 780      "default_max_tokens": 12800,
 781      "can_reason": false,
 782      "supports_attachments": false
 783    },
 784    {
 785      "id": "gpt-5",
 786      "name": "GPT 5",
 787      "cost_per_1m_in": 1.234,
 788      "cost_per_1m_out": 9.838,
 789      "cost_per_1m_in_cached": 0,
 790      "cost_per_1m_out_cached": 0,
 791      "context_window": 400000,
 792      "default_max_tokens": 40000,
 793      "can_reason": true,
 794      "reasoning_levels": [
 795        "low",
 796        "medium",
 797        "high"
 798      ],
 799      "default_reasoning_effort": "medium",
 800      "supports_attachments": true
 801    },
 802    {
 803      "id": "gpt-oss-120b",
 804      "name": "GPT Oss 120b",
 805      "cost_per_1m_in": 0.035496,
 806      "cost_per_1m_out": 0.17748,
 807      "cost_per_1m_in_cached": 0,
 808      "cost_per_1m_out_cached": 0,
 809      "context_window": 131000,
 810      "default_max_tokens": 13100,
 811      "can_reason": true,
 812      "reasoning_levels": [
 813        "low",
 814        "medium",
 815        "high"
 816      ],
 817      "default_reasoning_effort": "medium",
 818      "supports_attachments": false
 819    },
 820    {
 821      "id": "qwen3-30b-a3b-instruct-2507",
 822      "name": "Qwen3 30B A3B Instruct 2507",
 823      "cost_per_1m_in": 0.089,
 824      "cost_per_1m_out": 0.268,
 825      "cost_per_1m_in_cached": 0,
 826      "cost_per_1m_out_cached": 0,
 827      "context_window": 262000,
 828      "default_max_tokens": 26200,
 829      "can_reason": true,
 830      "reasoning_levels": [
 831        "low",
 832        "medium",
 833        "high"
 834      ],
 835      "default_reasoning_effort": "medium",
 836      "supports_attachments": false
 837    },
 838    {
 839      "id": "gpt-oss-20b",
 840      "name": "GPT Oss 20b",
 841      "cost_per_1m_in": 0.026622,
 842      "cost_per_1m_out": 0.124236,
 843      "cost_per_1m_in_cached": 0,
 844      "cost_per_1m_out_cached": 0,
 845      "context_window": 131000,
 846      "default_max_tokens": 13100,
 847      "can_reason": true,
 848      "reasoning_levels": [
 849        "low",
 850        "medium",
 851        "high"
 852      ],
 853      "default_reasoning_effort": "medium",
 854      "supports_attachments": false
 855    },
 856    {
 857      "id": "mistral-7b-instruct-v0.3",
 858      "name": "Mistral 7B Instruct v0.3",
 859      "cost_per_1m_in": 0.1,
 860      "cost_per_1m_out": 0.1,
 861      "cost_per_1m_in_cached": 0,
 862      "cost_per_1m_out_cached": 0,
 863      "context_window": 127000,
 864      "default_max_tokens": 12700,
 865      "can_reason": false,
 866      "supports_attachments": false
 867    },
 868    {
 869      "id": "mistral-small-3.2-24b-instruct-2506",
 870      "name": "Mistral Small 3.2 24B Instruct 2506",
 871      "cost_per_1m_in": 0.09,
 872      "cost_per_1m_out": 0.28,
 873      "cost_per_1m_in_cached": 0,
 874      "cost_per_1m_out_cached": 0,
 875      "context_window": 128000,
 876      "default_max_tokens": 12800,
 877      "can_reason": false,
 878      "supports_attachments": true
 879    },
 880    {
 881      "id": "mistral-large-2402",
 882      "name": "Mistral Large 2402",
 883      "cost_per_1m_in": 3.846,
 884      "cost_per_1m_out": 11.627,
 885      "cost_per_1m_in_cached": 0,
 886      "cost_per_1m_out_cached": 0,
 887      "context_window": 32000,
 888      "default_max_tokens": 3200,
 889      "can_reason": true,
 890      "reasoning_levels": [
 891        "low",
 892        "medium",
 893        "high"
 894      ],
 895      "default_reasoning_effort": "medium",
 896      "supports_attachments": false
 897    },
 898    {
 899      "id": "pixtral-large-2502",
 900      "name": "Pixtral Large 25.02",
 901      "cost_per_1m_in": 1.789,
 902      "cost_per_1m_out": 5.366,
 903      "cost_per_1m_in_cached": 0,
 904      "cost_per_1m_out_cached": 0,
 905      "context_window": 128000,
 906      "default_max_tokens": 12800,
 907      "can_reason": true,
 908      "reasoning_levels": [
 909        "low",
 910        "medium",
 911        "high"
 912      ],
 913      "default_reasoning_effort": "medium",
 914      "supports_attachments": true
 915    },
 916    {
 917      "id": "qwen3-235b-a22b-instruct-2507",
 918      "name": "Qwen3 235B A22B Instruct 2507",
 919      "cost_per_1m_in": 0.062118,
 920      "cost_per_1m_out": 0.408204,
 921      "cost_per_1m_in_cached": 0,
 922      "cost_per_1m_out_cached": 0,
 923      "context_window": 131000,
 924      "default_max_tokens": 13100,
 925      "can_reason": true,
 926      "reasoning_levels": [
 927        "low",
 928        "medium",
 929        "high"
 930      ],
 931      "default_reasoning_effort": "medium",
 932      "supports_attachments": false
 933    },
 934    {
 935      "id": "qwen3-coder-30b-a3b-instruct",
 936      "name": "Qwen3 Coder 30b a3b Instruct",
 937      "cost_per_1m_in": 0.053244,
 938      "cost_per_1m_out": 0.22185,
 939      "cost_per_1m_in_cached": 0,
 940      "cost_per_1m_out_cached": 0,
 941      "context_window": 262000,
 942      "default_max_tokens": 26200,
 943      "can_reason": true,
 944      "reasoning_levels": [
 945        "low",
 946        "medium",
 947        "high"
 948      ],
 949      "default_reasoning_effort": "medium",
 950      "supports_attachments": false
 951    },
 952    {
 953      "id": "qwen3-32b",
 954      "name": "Qwen3 32B",
 955      "cost_per_1m_in": 0.089,
 956      "cost_per_1m_out": 0.268,
 957      "cost_per_1m_in_cached": 0,
 958      "cost_per_1m_out_cached": 0,
 959      "context_window": 40000,
 960      "default_max_tokens": 4000,
 961      "can_reason": true,
 962      "reasoning_levels": [
 963        "low",
 964        "medium",
 965        "high"
 966      ],
 967      "default_reasoning_effort": "medium",
 968      "supports_attachments": false
 969    },
 970    {
 971      "id": "nova-lite-v1",
 972      "name": "Nova Lite 1.0",
 973      "cost_per_1m_in": 0.062,
 974      "cost_per_1m_out": 0.247,
 975      "cost_per_1m_in_cached": 0,
 976      "cost_per_1m_out_cached": 0,
 977      "context_window": 300000,
 978      "default_max_tokens": 30000,
 979      "can_reason": true,
 980      "reasoning_levels": [
 981        "low",
 982        "medium",
 983        "high"
 984      ],
 985      "default_reasoning_effort": "medium",
 986      "supports_attachments": true
 987    },
 988    {
 989      "id": "claude-sonnet-4",
 990      "name": "Claude Sonnet 4",
 991      "cost_per_1m_in": 2.601,
 992      "cost_per_1m_out": 13.01,
 993      "cost_per_1m_in_cached": 0,
 994      "cost_per_1m_out_cached": 0,
 995      "context_window": 200000,
 996      "default_max_tokens": 20000,
 997      "can_reason": true,
 998      "reasoning_levels": [
 999        "low",
1000        "medium",
1001        "high"
1002      ],
1003      "default_reasoning_effort": "medium",
1004      "supports_attachments": true
1005    },
1006    {
1007      "id": "gpt-4.1-mini",
1008      "name": "GPT 4.1 mini",
1009      "cost_per_1m_in": 0.39,
1010      "cost_per_1m_out": 1.53,
1011      "cost_per_1m_in_cached": 0,
1012      "cost_per_1m_out_cached": 0,
1013      "context_window": 1047576,
1014      "default_max_tokens": 104757,
1015      "can_reason": true,
1016      "reasoning_levels": [
1017        "low",
1018        "medium",
1019        "high"
1020      ],
1021      "default_reasoning_effort": "medium",
1022      "supports_attachments": true
1023    },
1024    {
1025      "id": "gpt-4.1-nano",
1026      "name": "GPT 4.1 nano",
1027      "cost_per_1m_in": 0.1,
1028      "cost_per_1m_out": 0.39,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 1047576,
1032      "default_max_tokens": 104757,
1033      "can_reason": true,
1034      "reasoning_levels": [
1035        "low",
1036        "medium",
1037        "high"
1038      ],
1039      "default_reasoning_effort": "medium",
1040      "supports_attachments": true
1041    },
1042    {
1043      "id": "nova-micro-v1",
1044      "name": "Nova Micro 1.0",
1045      "cost_per_1m_in": 0.036,
1046      "cost_per_1m_out": 0.143,
1047      "cost_per_1m_in_cached": 0,
1048      "cost_per_1m_out_cached": 0,
1049      "context_window": 128000,
1050      "default_max_tokens": 12800,
1051      "can_reason": true,
1052      "reasoning_levels": [
1053        "low",
1054        "medium",
1055        "high"
1056      ],
1057      "default_reasoning_effort": "medium",
1058      "supports_attachments": true
1059    },
1060    {
1061      "id": "gpt-4.1",
1062      "name": "GPT 4.1",
1063      "cost_per_1m_in": 1.968,
1064      "cost_per_1m_out": 7.872,
1065      "cost_per_1m_in_cached": 0,
1066      "cost_per_1m_out_cached": 0,
1067      "context_window": 1047576,
1068      "default_max_tokens": 104757,
1069      "can_reason": true,
1070      "reasoning_levels": [
1071        "low",
1072        "medium",
1073        "high"
1074      ],
1075      "default_reasoning_effort": "medium",
1076      "supports_attachments": true
1077    },
1078    {
1079      "id": "nova-pro-v1",
1080      "name": "Nova Pro 1.0",
1081      "cost_per_1m_in": 0.824,
1082      "cost_per_1m_out": 3.295,
1083      "cost_per_1m_in_cached": 0,
1084      "cost_per_1m_out_cached": 0,
1085      "context_window": 300000,
1086      "default_max_tokens": 30000,
1087      "can_reason": true,
1088      "reasoning_levels": [
1089        "low",
1090        "medium",
1091        "high"
1092      ],
1093      "default_reasoning_effort": "medium",
1094      "supports_attachments": true
1095    },
1096    {
1097      "id": "llama-3.1-nemotron-ultra-253b-v1",
1098      "name": "Llama 3.1 Nemotron Ultra 253B v1",
1099      "cost_per_1m_in": 0.537,
1100      "cost_per_1m_out": 1.61,
1101      "cost_per_1m_in_cached": 0,
1102      "cost_per_1m_out_cached": 0,
1103      "context_window": 128000,
1104      "default_max_tokens": 12800,
1105      "can_reason": true,
1106      "reasoning_levels": [
1107        "low",
1108        "medium",
1109        "high"
1110      ],
1111      "default_reasoning_effort": "medium",
1112      "supports_attachments": false
1113    },
1114    {
1115      "id": "llama-4-maverick",
1116      "name": "Llama 4 Maverick",
1117      "cost_per_1m_in": 0.124236,
1118      "cost_per_1m_out": 0.602832,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 1050000,
1122      "default_max_tokens": 105000,
1123      "can_reason": false,
1124      "supports_attachments": false
1125    },
1126    {
1127      "id": "deepseek-v3-0324",
1128      "name": "DeepSeek V3 0324",
1129      "cost_per_1m_in": 0.26622,
1130      "cost_per_1m_out": 0.8874,
1131      "cost_per_1m_in_cached": 0,
1132      "cost_per_1m_out_cached": 0,
1133      "context_window": 163840,
1134      "default_max_tokens": 16384,
1135      "can_reason": true,
1136      "reasoning_levels": [
1137        "low",
1138        "medium",
1139        "high"
1140      ],
1141      "default_reasoning_effort": "medium",
1142      "supports_attachments": false
1143    },
1144    {
1145      "id": "mistral-small-2503",
1146      "name": "Mistral Small 2503",
1147      "cost_per_1m_in": 0.1,
1148      "cost_per_1m_out": 0.3,
1149      "cost_per_1m_in_cached": 0,
1150      "cost_per_1m_out_cached": 0,
1151      "context_window": 128000,
1152      "default_max_tokens": 12800,
1153      "can_reason": false,
1154      "supports_attachments": true
1155    },
1156    {
1157      "id": "mistral-small-2506",
1158      "name": "Mistral Small 2506",
1159      "cost_per_1m_in": 0.1,
1160      "cost_per_1m_out": 0.3,
1161      "cost_per_1m_in_cached": 0,
1162      "cost_per_1m_out_cached": 0,
1163      "context_window": 131072,
1164      "default_max_tokens": 13107,
1165      "can_reason": false,
1166      "supports_attachments": true
1167    },
1168    {
1169      "id": "gemini-2.0-flash-001",
1170      "name": "Gemini 2.0 Flash",
1171      "cost_per_1m_in": 0.13416,
1172      "cost_per_1m_out": 0.53664,
1173      "cost_per_1m_in_cached": 0,
1174      "cost_per_1m_out_cached": 0,
1175      "context_window": 1048576,
1176      "default_max_tokens": 104857,
1177      "can_reason": false,
1178      "supports_attachments": true
1179    },
1180    {
1181      "id": "gemini-2.0-flash-lite-001",
1182      "name": "Gemini 2.0 Flash Lite",
1183      "cost_per_1m_in": 0.06708,
1184      "cost_per_1m_out": 0.26832,
1185      "cost_per_1m_in_cached": 0,
1186      "cost_per_1m_out_cached": 0,
1187      "context_window": 1048576,
1188      "default_max_tokens": 104857,
1189      "can_reason": false,
1190      "supports_attachments": true
1191    },
1192    {
1193      "id": "gemini-2.5-flash",
1194      "name": "Gemini 2.5 Flash",
1195      "cost_per_1m_in": 0.26832,
1196      "cost_per_1m_out": 2.236,
1197      "cost_per_1m_in_cached": 0,
1198      "cost_per_1m_out_cached": 0,
1199      "context_window": 1048576,
1200      "default_max_tokens": 104857,
1201      "can_reason": true,
1202      "reasoning_levels": [
1203        "low",
1204        "medium",
1205        "high"
1206      ],
1207      "default_reasoning_effort": "medium",
1208      "supports_attachments": true
1209    },
1210    {
1211      "id": "gemini-2.5-pro",
1212      "name": "Gemini 2.5 Pro",
1213      "cost_per_1m_in": 1.3416,
1214      "cost_per_1m_out": 8.944,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0,
1217      "context_window": 1048576,
1218      "default_max_tokens": 104857,
1219      "can_reason": true,
1220      "reasoning_levels": [
1221        "low",
1222        "medium",
1223        "high"
1224      ],
1225      "default_reasoning_effort": "medium",
1226      "supports_attachments": true
1227    },
1228    {
1229      "id": "gemma-3-27b-it",
1230      "name": "Gemma 3 27b it",
1231      "cost_per_1m_in": 0.089,
1232      "cost_per_1m_out": 0.268,
1233      "cost_per_1m_in_cached": 0,
1234      "cost_per_1m_out_cached": 0,
1235      "context_window": 131000,
1236      "default_max_tokens": 13100,
1237      "can_reason": true,
1238      "reasoning_levels": [
1239        "low",
1240        "medium",
1241        "high"
1242      ],
1243      "default_reasoning_effort": "medium",
1244      "supports_attachments": true
1245    },
1246    {
1247      "id": "deepseek-r1-0528",
1248      "name": "DeepSeek R1 0528",
1249      "cost_per_1m_in": 0.585084,
1250      "cost_per_1m_out": 2.30724,
1251      "cost_per_1m_in_cached": 0,
1252      "cost_per_1m_out_cached": 0,
1253      "context_window": 164000,
1254      "default_max_tokens": 16400,
1255      "can_reason": true,
1256      "reasoning_levels": [
1257        "low",
1258        "medium",
1259        "high"
1260      ],
1261      "default_reasoning_effort": "medium",
1262      "supports_attachments": false
1263    },
1264    {
1265      "id": "codestral-2508",
1266      "name": "Codestral 25.08",
1267      "cost_per_1m_in": 0.3,
1268      "cost_per_1m_out": 0.9,
1269      "cost_per_1m_in_cached": 0,
1270      "cost_per_1m_out_cached": 0,
1271      "context_window": 256000,
1272      "default_max_tokens": 25600,
1273      "can_reason": false,
1274      "supports_attachments": false
1275    },
1276    {
1277      "id": "llama-3.3-70b-instruct",
1278      "name": "Llama 3.3 70B Instruct",
1279      "cost_per_1m_in": 0.08874,
1280      "cost_per_1m_out": 0.274994,
1281      "cost_per_1m_in_cached": 0,
1282      "cost_per_1m_out_cached": 0,
1283      "context_window": 131000,
1284      "default_max_tokens": 13100,
1285      "can_reason": true,
1286      "reasoning_levels": [
1287        "low",
1288        "medium",
1289        "high"
1290      ],
1291      "default_reasoning_effort": "medium",
1292      "supports_attachments": false
1293    },
1294    {
1295      "id": "gpt-4o",
1296      "name": "GPT 4o",
1297      "cost_per_1m_in": 2.38664,
1298      "cost_per_1m_out": 9.5466,
1299      "cost_per_1m_in_cached": 0,
1300      "cost_per_1m_out_cached": 0,
1301      "context_window": 128000,
1302      "default_max_tokens": 12800,
1303      "can_reason": true,
1304      "reasoning_levels": [
1305        "low",
1306        "medium",
1307        "high"
1308      ],
1309      "default_reasoning_effort": "medium",
1310      "supports_attachments": true
1311    },
1312    {
1313      "id": "gpt-5-mini",
1314      "name": "GPT 5 mini",
1315      "cost_per_1m_in": 0.25,
1316      "cost_per_1m_out": 1.968,
1317      "cost_per_1m_in_cached": 0,
1318      "cost_per_1m_out_cached": 0,
1319      "context_window": 400000,
1320      "default_max_tokens": 40000,
1321      "can_reason": true,
1322      "reasoning_levels": [
1323        "low",
1324        "medium",
1325        "high"
1326      ],
1327      "default_reasoning_effort": "medium",
1328      "supports_attachments": true
1329    },
1330    {
1331      "id": "gpt-5-nano",
1332      "name": "GPT 5 nano",
1333      "cost_per_1m_in": 0.054,
1334      "cost_per_1m_out": 0.394,
1335      "cost_per_1m_in_cached": 0,
1336      "cost_per_1m_out_cached": 0,
1337      "context_window": 400000,
1338      "default_max_tokens": 40000,
1339      "can_reason": true,
1340      "reasoning_levels": [
1341        "low",
1342        "medium",
1343        "high"
1344      ],
1345      "default_reasoning_effort": "medium",
1346      "supports_attachments": true
1347    },
1348    {
1349      "id": "mistral-large-2411",
1350      "name": "Mistral Large 2411",
1351      "cost_per_1m_in": 1.8,
1352      "cost_per_1m_out": 5.4,
1353      "cost_per_1m_in_cached": 0,
1354      "cost_per_1m_out_cached": 0,
1355      "context_window": 131072,
1356      "default_max_tokens": 13107,
1357      "can_reason": true,
1358      "reasoning_levels": [
1359        "low",
1360        "medium",
1361        "high"
1362      ],
1363      "default_reasoning_effort": "medium",
1364      "supports_attachments": false
1365    },
1366    {
1367      "id": "hermes-4-405b",
1368      "name": "Hermes 4 405B",
1369      "cost_per_1m_in": 0.894,
1370      "cost_per_1m_out": 2.683,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0,
1373      "context_window": 128000,
1374      "default_max_tokens": 12800,
1375      "can_reason": false,
1376      "supports_attachments": false
1377    },
1378    {
1379      "id": "mistral-medium-2508",
1380      "name": "Mistral Medium 2508",
1381      "cost_per_1m_in": 0.4,
1382      "cost_per_1m_out": 2,
1383      "cost_per_1m_in_cached": 0,
1384      "cost_per_1m_out_cached": 0,
1385      "context_window": 128000,
1386      "default_max_tokens": 12800,
1387      "can_reason": true,
1388      "reasoning_levels": [
1389        "low",
1390        "medium",
1391        "high"
1392      ],
1393      "default_reasoning_effort": "medium",
1394      "supports_attachments": true
1395    },
1396    {
1397      "id": "devstral-medium-2507",
1398      "name": "Devstral Medium 2507",
1399      "cost_per_1m_in": 0.4,
1400      "cost_per_1m_out": 2,
1401      "cost_per_1m_in_cached": 0,
1402      "cost_per_1m_out_cached": 0,
1403      "context_window": 131072,
1404      "default_max_tokens": 13107,
1405      "can_reason": false,
1406      "supports_attachments": false
1407    },
1408    {
1409      "id": "mistral-nemo-instruct-2407",
1410      "name": "Mistral Nemo 2407",
1411      "cost_per_1m_in": 0.13,
1412      "cost_per_1m_out": 0.13,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0,
1415      "context_window": 131072,
1416      "default_max_tokens": 13107,
1417      "can_reason": false,
1418      "supports_attachments": false
1419    },
1420    {
1421      "id": "devstral-small-2507",
1422      "name": "Devstral Small 2507",
1423      "cost_per_1m_in": 0.1,
1424      "cost_per_1m_out": 0.3,
1425      "cost_per_1m_in_cached": 0,
1426      "cost_per_1m_out_cached": 0,
1427      "context_window": 131072,
1428      "default_max_tokens": 13107,
1429      "can_reason": false,
1430      "supports_attachments": false
1431    },
1432    {
1433      "id": "llama-3.1-405b-instruct",
1434      "name": "Llama 3.1 405B Instruct",
1435      "cost_per_1m_in": 1.75,
1436      "cost_per_1m_out": 1.75,
1437      "cost_per_1m_in_cached": 0,
1438      "cost_per_1m_out_cached": 0,
1439      "context_window": 128000,
1440      "default_max_tokens": 12800,
1441      "can_reason": true,
1442      "reasoning_levels": [
1443        "low",
1444        "medium",
1445        "high"
1446      ],
1447      "default_reasoning_effort": "medium",
1448      "supports_attachments": false
1449    },
1450    {
1451      "id": "gpt-4o-mini",
1452      "name": "GPT 4o mini",
1453      "cost_per_1m_in": 0.1432,
1454      "cost_per_1m_out": 0.5728,
1455      "cost_per_1m_in_cached": 0,
1456      "cost_per_1m_out_cached": 0,
1457      "context_window": 128000,
1458      "default_max_tokens": 12800,
1459      "can_reason": true,
1460      "reasoning_levels": [
1461        "low",
1462        "medium",
1463        "high"
1464      ],
1465      "default_reasoning_effort": "medium",
1466      "supports_attachments": true
1467    },
1468    {
1469      "id": "llama-3.1-8b-instruct",
1470      "name": "Llama 3.1 8B Instruct",
1471      "cost_per_1m_in": 0.018,
1472      "cost_per_1m_out": 0.054,
1473      "cost_per_1m_in_cached": 0,
1474      "cost_per_1m_out_cached": 0,
1475      "context_window": 128000,
1476      "default_max_tokens": 12800,
1477      "can_reason": true,
1478      "reasoning_levels": [
1479        "low",
1480        "medium",
1481        "high"
1482      ],
1483      "default_reasoning_effort": "medium",
1484      "supports_attachments": false
1485    }
1486  ]
1487}