cortecs.json

   1{
   2  "name": "Cortecs",
   3  "id": "cortecs",
   4  "api_key": "$CORTECS_API_KEY",
   5  "api_endpoint": "https://api.cortecs.ai/v1",
   6  "type": "openai",
   7  "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
   8  "default_small_model_id": "glm-4.7-flash",
   9  "models": [
  10    {
  11      "id": "mistral-medium-3.5",
  12      "name": "Mistral Medium 3.5",
  13      "cost_per_1m_in": 1.25,
  14      "cost_per_1m_out": 6.4,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 25600,
  19      "can_reason": true,
  20      "reasoning_levels": [
  21        "low",
  22        "medium",
  23        "high"
  24      ],
  25      "default_reasoning_effort": "medium",
  26      "supports_attachments": true
  27    },
  28    {
  29      "id": "nvidia-nemotron-3-nano-omni",
  30      "name": "Nemotron 3 Nano Omni",
  31      "cost_per_1m_in": 0.0532447,
  32      "cost_per_1m_out": 0.212976,
  33      "cost_per_1m_in_cached": 0,
  34      "cost_per_1m_out_cached": 0,
  35      "context_window": 300000,
  36      "default_max_tokens": 30000,
  37      "can_reason": true,
  38      "reasoning_levels": [
  39        "low",
  40        "medium",
  41        "high"
  42      ],
  43      "default_reasoning_effort": "medium",
  44      "supports_attachments": false
  45    },
  46    {
  47      "id": "gpt-5.4",
  48      "name": "GPT 5.4",
  49      "cost_per_1m_in": 2.601,
  50      "cost_per_1m_out": 13.872,
  51      "cost_per_1m_in_cached": 0,
  52      "cost_per_1m_out_cached": 0,
  53      "context_window": 1050000,
  54      "default_max_tokens": 105000,
  55      "can_reason": true,
  56      "reasoning_levels": [
  57        "low",
  58        "medium",
  59        "high"
  60      ],
  61      "default_reasoning_effort": "medium",
  62      "supports_attachments": true
  63    },
  64    {
  65      "id": "kimi-k2.6",
  66      "name": "Kimi K2.6",
  67      "cost_per_1m_in": 0.6936,
  68      "cost_per_1m_out": 3.0345,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0,
  71      "context_window": 256000,
  72      "default_max_tokens": 25600,
  73      "can_reason": true,
  74      "reasoning_levels": [
  75        "low",
  76        "medium",
  77        "high"
  78      ],
  79      "default_reasoning_effort": "medium",
  80      "supports_attachments": true
  81    },
  82    {
  83      "id": "claude-opus4-7",
  84      "name": "Claude Opus 4.7",
  85      "cost_per_1m_in": 4.7685,
  86      "cost_per_1m_out": 23.8425,
  87      "cost_per_1m_in_cached": 0,
  88      "cost_per_1m_out_cached": 0,
  89      "context_window": 1000000,
  90      "default_max_tokens": 100000,
  91      "can_reason": true,
  92      "reasoning_levels": [
  93        "low",
  94        "medium",
  95        "high"
  96      ],
  97      "default_reasoning_effort": "medium",
  98      "supports_attachments": true
  99    },
 100    {
 101      "id": "minimax-m2.7",
 102      "name": "MiniMax M2.7",
 103      "cost_per_1m_in": 0.26622,
 104      "cost_per_1m_out": 1.06488,
 105      "cost_per_1m_in_cached": 0,
 106      "cost_per_1m_out_cached": 0,
 107      "context_window": 196608,
 108      "default_max_tokens": 19660,
 109      "can_reason": true,
 110      "reasoning_levels": [
 111        "low",
 112        "medium",
 113        "high"
 114      ],
 115      "default_reasoning_effort": "medium",
 116      "supports_attachments": false
 117    },
 118    {
 119      "id": "glm-5.1",
 120      "name": "GLM 5.1",
 121      "cost_per_1m_in": 1.24236,
 122      "cost_per_1m_out": 3.90336,
 123      "cost_per_1m_in_cached": 0,
 124      "cost_per_1m_out_cached": 0,
 125      "context_window": 202752,
 126      "default_max_tokens": 20275,
 127      "can_reason": true,
 128      "reasoning_levels": [
 129        "low",
 130        "medium",
 131        "high"
 132      ],
 133      "default_reasoning_effort": "medium",
 134      "supports_attachments": false
 135    },
 136    {
 137      "id": "qwen3.5-122b-a10b",
 138      "name": "Qwen3.5 122B A10B",
 139      "cost_per_1m_in": 0.4437,
 140      "cost_per_1m_out": 3.1059,
 141      "cost_per_1m_in_cached": 0,
 142      "cost_per_1m_out_cached": 0,
 143      "context_window": 262144,
 144      "default_max_tokens": 26214,
 145      "can_reason": true,
 146      "reasoning_levels": [
 147        "low",
 148        "medium",
 149        "high"
 150      ],
 151      "default_reasoning_effort": "medium",
 152      "supports_attachments": false
 153    },
 154    {
 155      "id": "qwen3.5-9b",
 156      "name": "Qwen3.5 9B",
 157      "cost_per_1m_in": 0.1,
 158      "cost_per_1m_out": 0.15,
 159      "cost_per_1m_in_cached": 0,
 160      "cost_per_1m_out_cached": 0,
 161      "context_window": 262000,
 162      "default_max_tokens": 26200,
 163      "can_reason": true,
 164      "reasoning_levels": [
 165        "low",
 166        "medium",
 167        "high"
 168      ],
 169      "default_reasoning_effort": "medium",
 170      "supports_attachments": false
 171    },
 172    {
 173      "id": "nemotron-3-super-120b-a12b",
 174      "name": "Nemotron 3 Super 120B A12B",
 175      "cost_per_1m_in": 0.15606,
 176      "cost_per_1m_out": 0.67626,
 177      "cost_per_1m_in_cached": 0,
 178      "cost_per_1m_out_cached": 0,
 179      "context_window": 262000,
 180      "default_max_tokens": 26214,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "low",
 184        "medium",
 185        "high"
 186      ],
 187      "default_reasoning_effort": "medium",
 188      "supports_attachments": false
 189    },
 190    {
 191      "id": "qwen3-coder-next",
 192      "name": "Qwen3 Coder Next",
 193      "cost_per_1m_in": 0.15,
 194      "cost_per_1m_out": 0.8,
 195      "cost_per_1m_in_cached": 0,
 196      "cost_per_1m_out_cached": 0,
 197      "context_window": 256000,
 198      "default_max_tokens": 25600,
 199      "can_reason": true,
 200      "reasoning_levels": [
 201        "low",
 202        "medium",
 203        "high"
 204      ],
 205      "default_reasoning_effort": "medium",
 206      "supports_attachments": false
 207    },
 208    {
 209      "id": "glm-5",
 210      "name": "GLM 5",
 211      "cost_per_1m_in": 0.8874,
 212      "cost_per_1m_out": 2.83968,
 213      "cost_per_1m_in_cached": 0,
 214      "cost_per_1m_out_cached": 0,
 215      "context_window": 202752,
 216      "default_max_tokens": 20275,
 217      "can_reason": true,
 218      "reasoning_levels": [
 219        "low",
 220        "medium",
 221        "high"
 222      ],
 223      "default_reasoning_effort": "medium",
 224      "supports_attachments": false
 225    },
 226    {
 227      "id": "glm-4.6",
 228      "name": "GLM 4.6",
 229      "cost_per_1m_in": 0.35496,
 230      "cost_per_1m_out": 1.55295,
 231      "cost_per_1m_in_cached": 0,
 232      "cost_per_1m_out_cached": 0,
 233      "context_window": 203000,
 234      "default_max_tokens": 20300,
 235      "can_reason": true,
 236      "reasoning_levels": [
 237        "low",
 238        "medium",
 239        "high"
 240      ],
 241      "default_reasoning_effort": "medium",
 242      "supports_attachments": false
 243    },
 244    {
 245      "id": "deepseek-chat-v3.1",
 246      "name": "DeepSeek Chat V3.1",
 247      "cost_per_1m_in": 0.17748,
 248      "cost_per_1m_out": 0.70992,
 249      "cost_per_1m_in_cached": 0,
 250      "cost_per_1m_out_cached": 0,
 251      "context_window": 164000,
 252      "default_max_tokens": 16400,
 253      "can_reason": true,
 254      "reasoning_levels": [
 255        "low",
 256        "medium",
 257        "high"
 258      ],
 259      "default_reasoning_effort": "medium",
 260      "supports_attachments": false
 261    },
 262    {
 263      "id": "qwen-2.5-72b-instruct",
 264      "name": "Qwen2.5 72B Instruct",
 265      "cost_per_1m_in": 0.062118,
 266      "cost_per_1m_out": 0.230724,
 267      "cost_per_1m_in_cached": 0,
 268      "cost_per_1m_out_cached": 0,
 269      "context_window": 33000,
 270      "default_max_tokens": 3300,
 271      "can_reason": false,
 272      "supports_attachments": false
 273    },
 274    {
 275      "id": "qwen3.5-397b-a17b",
 276      "name": "Qwen3.5 397B A17B ",
 277      "cost_per_1m_in": 0.53244,
 278      "cost_per_1m_out": 3.19464,
 279      "cost_per_1m_in_cached": 0,
 280      "cost_per_1m_out_cached": 0,
 281      "context_window": 262000,
 282      "default_max_tokens": 25000,
 283      "can_reason": true,
 284      "reasoning_levels": [
 285        "low",
 286        "medium",
 287        "high"
 288      ],
 289      "default_reasoning_effort": "medium",
 290      "supports_attachments": false
 291    },
 292    {
 293      "id": "deepseek-v3.2",
 294      "name": "DeepSeek V3.2",
 295      "cost_per_1m_in": 0.26622,
 296      "cost_per_1m_out": 0.4437,
 297      "cost_per_1m_in_cached": 0,
 298      "cost_per_1m_out_cached": 0,
 299      "context_window": 163840,
 300      "default_max_tokens": 16384,
 301      "can_reason": true,
 302      "reasoning_levels": [
 303        "low",
 304        "medium",
 305        "high"
 306      ],
 307      "default_reasoning_effort": "medium",
 308      "supports_attachments": false
 309    },
 310    {
 311      "id": "mistral-small-2603",
 312      "name": "Mistral Small 4 2603",
 313      "cost_per_1m_in": 0.1275,
 314      "cost_per_1m_out": 0.51,
 315      "cost_per_1m_in_cached": 0,
 316      "cost_per_1m_out_cached": 0,
 317      "context_window": 256000,
 318      "default_max_tokens": 25600,
 319      "can_reason": true,
 320      "reasoning_levels": [
 321        "low",
 322        "medium",
 323        "high"
 324      ],
 325      "default_reasoning_effort": "medium",
 326      "supports_attachments": true
 327    },
 328    {
 329      "id": "minimax-m2.5",
 330      "name": "MiniMax M2.5",
 331      "cost_per_1m_in": 0.26622,
 332      "cost_per_1m_out": 0.97614,
 333      "cost_per_1m_in_cached": 0,
 334      "cost_per_1m_out_cached": 0,
 335      "context_window": 196608,
 336      "default_max_tokens": 19660,
 337      "can_reason": true,
 338      "reasoning_levels": [
 339        "low",
 340        "medium",
 341        "high"
 342      ],
 343      "default_reasoning_effort": "medium",
 344      "supports_attachments": false
 345    },
 346    {
 347      "id": "claude-4-6-sonnet",
 348      "name": "Claude Sonnet 4.6",
 349      "cost_per_1m_in": 2.8691,
 350      "cost_per_1m_out": 14.3095,
 351      "cost_per_1m_in_cached": 0,
 352      "cost_per_1m_out_cached": 0,
 353      "context_window": 1000000,
 354      "default_max_tokens": 100000,
 355      "can_reason": true,
 356      "reasoning_levels": [
 357        "low",
 358        "medium",
 359        "high"
 360      ],
 361      "default_reasoning_effort": "medium",
 362      "supports_attachments": true
 363    },
 364    {
 365      "id": "glm-4.7-flash",
 366      "name": "GLM 4.7 Flash",
 367      "cost_per_1m_in": 0.0716,
 368      "cost_per_1m_out": 0.4293,
 369      "cost_per_1m_in_cached": 0,
 370      "cost_per_1m_out_cached": 0,
 371      "context_window": 203000,
 372      "default_max_tokens": 20300,
 373      "can_reason": false,
 374      "supports_attachments": false
 375    },
 376    {
 377      "id": "kimi-k2.5",
 378      "name": "Kimi K2.5",
 379      "cost_per_1m_in": 0.4437,
 380      "cost_per_1m_out": 2.12976,
 381      "cost_per_1m_in_cached": 0,
 382      "cost_per_1m_out_cached": 0,
 383      "context_window": 256000,
 384      "default_max_tokens": 25600,
 385      "can_reason": true,
 386      "reasoning_levels": [
 387        "low",
 388        "medium",
 389        "high"
 390      ],
 391      "default_reasoning_effort": "medium",
 392      "supports_attachments": true
 393    },
 394    {
 395      "id": "claude-opus4-6",
 396      "name": "Claude Opus 4.6",
 397      "cost_per_1m_in": 4.7685,
 398      "cost_per_1m_out": 23.8425,
 399      "cost_per_1m_in_cached": 0,
 400      "cost_per_1m_out_cached": 0,
 401      "context_window": 1000000,
 402      "default_max_tokens": 100000,
 403      "can_reason": true,
 404      "reasoning_levels": [
 405        "low",
 406        "medium",
 407        "high"
 408      ],
 409      "default_reasoning_effort": "medium",
 410      "supports_attachments": true
 411    },
 412    {
 413      "id": "minimax-m2",
 414      "name": "MiniMax M2",
 415      "cost_per_1m_in": 0.22185,
 416      "cost_per_1m_out": 0.8874,
 417      "cost_per_1m_in_cached": 0,
 418      "cost_per_1m_out_cached": 0,
 419      "context_window": 196608,
 420      "default_max_tokens": 19660,
 421      "can_reason": true,
 422      "reasoning_levels": [
 423        "low",
 424        "medium",
 425        "high"
 426      ],
 427      "default_reasoning_effort": "medium",
 428      "supports_attachments": false
 429    },
 430    {
 431      "id": "glm-4.7",
 432      "name": "GLM 4.7",
 433      "cost_per_1m_in": 0.53244,
 434      "cost_per_1m_out": 1.95228,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0,
 437      "context_window": 202752,
 438      "default_max_tokens": 20275,
 439      "can_reason": true,
 440      "reasoning_levels": [
 441        "low",
 442        "medium",
 443        "high"
 444      ],
 445      "default_reasoning_effort": "medium",
 446      "supports_attachments": false
 447    },
 448    {
 449      "id": "minimax-m2.1",
 450      "name": "MiniMax M2.1",
 451      "cost_per_1m_in": 0.322,
 452      "cost_per_1m_out": 1.2879,
 453      "cost_per_1m_in_cached": 0,
 454      "cost_per_1m_out_cached": 0,
 455      "context_window": 196000,
 456      "default_max_tokens": 19600,
 457      "can_reason": true,
 458      "reasoning_levels": [
 459        "low",
 460        "medium",
 461        "high"
 462      ],
 463      "default_reasoning_effort": "medium",
 464      "supports_attachments": false
 465    },
 466    {
 467      "id": "qwen3-vl-235b-a22b",
 468      "name": "Qwen3 VL 235B A22B",
 469      "cost_per_1m_in": 0.186354,
 470      "cost_per_1m_out": 1.68606,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 131000,
 474      "default_max_tokens": 13100,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": true
 483    },
 484    {
 485      "id": "mistral-small-creative",
 486      "name": "Mistral Small Creative",
 487      "cost_per_1m_in": 0.1,
 488      "cost_per_1m_out": 0.3,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 32000,
 492      "default_max_tokens": 3200,
 493      "can_reason": false,
 494      "supports_attachments": false
 495    },
 496    {
 497      "id": "nvidia-nemotron-3-nano-30b-a3b",
 498      "name": "Nemotron 3 Nano 30B A3B",
 499      "cost_per_1m_in": 0.0537,
 500      "cost_per_1m_out": 0.215,
 501      "cost_per_1m_in_cached": 0,
 502      "cost_per_1m_out_cached": 0,
 503      "context_window": 128000,
 504      "default_max_tokens": 12800,
 505      "can_reason": true,
 506      "reasoning_levels": [
 507        "low",
 508        "medium",
 509        "high"
 510      ],
 511      "default_reasoning_effort": "medium",
 512      "supports_attachments": false
 513    },
 514    {
 515      "id": "claude-opus4-5",
 516      "name": "Claude Opus 4.5",
 517      "cost_per_1m_in": 4.7695,
 518      "cost_per_1m_out": 23.8485,
 519      "cost_per_1m_in_cached": 0,
 520      "cost_per_1m_out_cached": 0,
 521      "context_window": 200000,
 522      "default_max_tokens": 20000,
 523      "can_reason": true,
 524      "reasoning_levels": [
 525        "low",
 526        "medium",
 527        "high"
 528      ],
 529      "default_reasoning_effort": "medium",
 530      "supports_attachments": true
 531    },
 532    {
 533      "id": "qwen3-next-80b-a3b-thinking",
 534      "name": "Qwen3 Next 80B A3B Thinking",
 535      "cost_per_1m_in": 0.13311,
 536      "cost_per_1m_out": 1.06488,
 537      "cost_per_1m_in_cached": 0,
 538      "cost_per_1m_out_cached": 0,
 539      "context_window": 262000,
 540      "default_max_tokens": 12800,
 541      "can_reason": true,
 542      "reasoning_levels": [
 543        "low",
 544        "medium",
 545        "high"
 546      ],
 547      "default_reasoning_effort": "medium",
 548      "supports_attachments": false
 549    },
 550    {
 551      "id": "holo2-30b-a3b",
 552      "name": "Holo2 30B A3B",
 553      "cost_per_1m_in": 0.3,
 554      "cost_per_1m_out": 0.7,
 555      "cost_per_1m_in_cached": 0,
 556      "cost_per_1m_out_cached": 0,
 557      "context_window": 22000,
 558      "default_max_tokens": 2200,
 559      "can_reason": true,
 560      "reasoning_levels": [
 561        "low",
 562        "medium",
 563        "high"
 564      ],
 565      "default_reasoning_effort": "medium",
 566      "supports_attachments": true
 567    },
 568    {
 569      "id": "devstral-2512",
 570      "name": "Devstral 2 2512",
 571      "cost_per_1m_in": 0.4,
 572      "cost_per_1m_out": 2,
 573      "cost_per_1m_in_cached": 0,
 574      "cost_per_1m_out_cached": 0,
 575      "context_window": 262000,
 576      "default_max_tokens": 26200,
 577      "can_reason": false,
 578      "supports_attachments": false
 579    },
 580    {
 581      "id": "nova-2-lite",
 582      "name": "Nova 2 Lite",
 583      "cost_per_1m_in": 0.335,
 584      "cost_per_1m_out": 2.822,
 585      "cost_per_1m_in_cached": 0,
 586      "cost_per_1m_out_cached": 0,
 587      "context_window": 1000000,
 588      "default_max_tokens": 100000,
 589      "can_reason": true,
 590      "reasoning_levels": [
 591        "low",
 592        "medium",
 593        "high"
 594      ],
 595      "default_reasoning_effort": "medium",
 596      "supports_attachments": true
 597    },
 598    {
 599      "id": "gpt-oss-safeguard-120b",
 600      "name": "GPT OSS Safeguard 120B",
 601      "cost_per_1m_in": 0.161,
 602      "cost_per_1m_out": 0.626,
 603      "cost_per_1m_in_cached": 0,
 604      "cost_per_1m_out_cached": 0,
 605      "context_window": 128000,
 606      "default_max_tokens": 12800,
 607      "can_reason": true,
 608      "reasoning_levels": [
 609        "low",
 610        "medium",
 611        "high"
 612      ],
 613      "default_reasoning_effort": "medium",
 614      "supports_attachments": false
 615    },
 616    {
 617      "id": "mistral-large-2512",
 618      "name": "Mistral Large 3 2512",
 619      "cost_per_1m_in": 0.5,
 620      "cost_per_1m_out": 1.5,
 621      "cost_per_1m_in_cached": 0,
 622      "cost_per_1m_out_cached": 0,
 623      "context_window": 256000,
 624      "default_max_tokens": 25600,
 625      "can_reason": false,
 626      "supports_attachments": true
 627    },
 628    {
 629      "id": "ministral-8b-2512",
 630      "name": "Ministral 3 8b 2512",
 631      "cost_per_1m_in": 0.15,
 632      "cost_per_1m_out": 0.15,
 633      "cost_per_1m_in_cached": 0,
 634      "cost_per_1m_out_cached": 0,
 635      "context_window": 256000,
 636      "default_max_tokens": 25600,
 637      "can_reason": false,
 638      "supports_attachments": true
 639    },
 640    {
 641      "id": "ministral-3b-2512",
 642      "name": "Ministral 3 3b 2512",
 643      "cost_per_1m_in": 0.1,
 644      "cost_per_1m_out": 0.1,
 645      "cost_per_1m_in_cached": 0,
 646      "cost_per_1m_out_cached": 0,
 647      "context_window": 256000,
 648      "default_max_tokens": 25600,
 649      "can_reason": false,
 650      "supports_attachments": true
 651    },
 652    {
 653      "id": "ministral-14b-2512",
 654      "name": "Ministral 3 14b 2512",
 655      "cost_per_1m_in": 0.2,
 656      "cost_per_1m_out": 0.2,
 657      "cost_per_1m_in_cached": 0,
 658      "cost_per_1m_out_cached": 0,
 659      "context_window": 256000,
 660      "default_max_tokens": 25600,
 661      "can_reason": false,
 662      "supports_attachments": true
 663    },
 664    {
 665      "id": "intellect-3",
 666      "name": "INTELLECT-3",
 667      "cost_per_1m_in": 0.179,
 668      "cost_per_1m_out": 0.984,
 669      "cost_per_1m_in_cached": 0,
 670      "cost_per_1m_out_cached": 0,
 671      "context_window": 128000,
 672      "default_max_tokens": 12800,
 673      "can_reason": true,
 674      "reasoning_levels": [
 675        "low",
 676        "medium",
 677        "high"
 678      ],
 679      "default_reasoning_effort": "medium",
 680      "supports_attachments": false
 681    },
 682    {
 683      "id": "gpt-5.1",
 684      "name": "GPT 5.1",
 685      "cost_per_1m_in": 1.234,
 686      "cost_per_1m_out": 9.838,
 687      "cost_per_1m_in_cached": 0,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 400000,
 690      "default_max_tokens": 40000,
 691      "can_reason": true,
 692      "reasoning_levels": [
 693        "low",
 694        "medium",
 695        "high"
 696      ],
 697      "default_reasoning_effort": "medium",
 698      "supports_attachments": true
 699    },
 700    {
 701      "id": "nemotron-nano-v2-12b",
 702      "name": "Nemotron Nano V2 12b",
 703      "cost_per_1m_in": 0.215,
 704      "cost_per_1m_out": 0.635,
 705      "cost_per_1m_in_cached": 0,
 706      "cost_per_1m_out_cached": 0,
 707      "context_window": 128000,
 708      "default_max_tokens": 12800,
 709      "can_reason": true,
 710      "reasoning_levels": [
 711        "low",
 712        "medium",
 713        "high"
 714      ],
 715      "default_reasoning_effort": "medium",
 716      "supports_attachments": true
 717    },
 718    {
 719      "id": "claude-haiku-4-5",
 720      "name": "Claude Haiku 4.5",
 721      "cost_per_1m_in": 0.894,
 722      "cost_per_1m_out": 4.472,
 723      "cost_per_1m_in_cached": 0,
 724      "cost_per_1m_out_cached": 0,
 725      "context_window": 200000,
 726      "default_max_tokens": 20000,
 727      "can_reason": true,
 728      "reasoning_levels": [
 729        "low",
 730        "medium",
 731        "high"
 732      ],
 733      "default_reasoning_effort": "medium",
 734      "supports_attachments": true
 735    },
 736    {
 737      "id": "claude-4-5-sonnet",
 738      "name": "Claude 4.5 Sonnet",
 739      "cost_per_1m_in": 2.683,
 740      "cost_per_1m_out": 13.416,
 741      "cost_per_1m_in_cached": 0,
 742      "cost_per_1m_out_cached": 0,
 743      "context_window": 200000,
 744      "default_max_tokens": 20000,
 745      "can_reason": true,
 746      "reasoning_levels": [
 747        "low",
 748        "medium",
 749        "high"
 750      ],
 751      "default_reasoning_effort": "medium",
 752      "supports_attachments": true
 753    },
 754    {
 755      "id": "magistral-small-2509",
 756      "name": "Magistral Small 2509",
 757      "cost_per_1m_in": 0.5,
 758      "cost_per_1m_out": 1.5,
 759      "cost_per_1m_in_cached": 0,
 760      "cost_per_1m_out_cached": 0,
 761      "context_window": 128000,
 762      "default_max_tokens": 12800,
 763      "can_reason": true,
 764      "reasoning_levels": [
 765        "low",
 766        "medium",
 767        "high"
 768      ],
 769      "default_reasoning_effort": "medium",
 770      "supports_attachments": true
 771    },
 772    {
 773      "id": "magistral-medium-2509",
 774      "name": "Magistral Medium 2509",
 775      "cost_per_1m_in": 2,
 776      "cost_per_1m_out": 5,
 777      "cost_per_1m_in_cached": 0,
 778      "cost_per_1m_out_cached": 0,
 779      "context_window": 128000,
 780      "default_max_tokens": 12800,
 781      "can_reason": true,
 782      "reasoning_levels": [
 783        "low",
 784        "medium",
 785        "high"
 786      ],
 787      "default_reasoning_effort": "medium",
 788      "supports_attachments": true
 789    },
 790    {
 791      "id": "hermes-4-70b",
 792      "name": "Hermes 4 70B",
 793      "cost_per_1m_in": 0.116,
 794      "cost_per_1m_out": 0.358,
 795      "cost_per_1m_in_cached": 0,
 796      "cost_per_1m_out_cached": 0,
 797      "context_window": 128000,
 798      "default_max_tokens": 12800,
 799      "can_reason": false,
 800      "supports_attachments": false
 801    },
 802    {
 803      "id": "gpt-5",
 804      "name": "GPT 5",
 805      "cost_per_1m_in": 1.234,
 806      "cost_per_1m_out": 9.838,
 807      "cost_per_1m_in_cached": 0,
 808      "cost_per_1m_out_cached": 0,
 809      "context_window": 400000,
 810      "default_max_tokens": 40000,
 811      "can_reason": true,
 812      "reasoning_levels": [
 813        "low",
 814        "medium",
 815        "high"
 816      ],
 817      "default_reasoning_effort": "medium",
 818      "supports_attachments": true
 819    },
 820    {
 821      "id": "gpt-oss-120b",
 822      "name": "GPT Oss 120b",
 823      "cost_per_1m_in": 0.035496,
 824      "cost_per_1m_out": 0.17748,
 825      "cost_per_1m_in_cached": 0,
 826      "cost_per_1m_out_cached": 0,
 827      "context_window": 131000,
 828      "default_max_tokens": 13100,
 829      "can_reason": true,
 830      "reasoning_levels": [
 831        "low",
 832        "medium",
 833        "high"
 834      ],
 835      "default_reasoning_effort": "medium",
 836      "supports_attachments": false
 837    },
 838    {
 839      "id": "qwen3-30b-a3b-instruct-2507",
 840      "name": "Qwen3 30B A3B Instruct 2507",
 841      "cost_per_1m_in": 0.089,
 842      "cost_per_1m_out": 0.268,
 843      "cost_per_1m_in_cached": 0,
 844      "cost_per_1m_out_cached": 0,
 845      "context_window": 262000,
 846      "default_max_tokens": 26200,
 847      "can_reason": true,
 848      "reasoning_levels": [
 849        "low",
 850        "medium",
 851        "high"
 852      ],
 853      "default_reasoning_effort": "medium",
 854      "supports_attachments": false
 855    },
 856    {
 857      "id": "gpt-oss-20b",
 858      "name": "GPT Oss 20b",
 859      "cost_per_1m_in": 0.026622,
 860      "cost_per_1m_out": 0.124236,
 861      "cost_per_1m_in_cached": 0,
 862      "cost_per_1m_out_cached": 0,
 863      "context_window": 131000,
 864      "default_max_tokens": 13100,
 865      "can_reason": true,
 866      "reasoning_levels": [
 867        "low",
 868        "medium",
 869        "high"
 870      ],
 871      "default_reasoning_effort": "medium",
 872      "supports_attachments": false
 873    },
 874    {
 875      "id": "mistral-7b-instruct-v0.3",
 876      "name": "Mistral 7B Instruct v0.3",
 877      "cost_per_1m_in": 0.1,
 878      "cost_per_1m_out": 0.1,
 879      "cost_per_1m_in_cached": 0,
 880      "cost_per_1m_out_cached": 0,
 881      "context_window": 127000,
 882      "default_max_tokens": 12700,
 883      "can_reason": false,
 884      "supports_attachments": false
 885    },
 886    {
 887      "id": "mistral-small-3.2-24b-instruct-2506",
 888      "name": "Mistral Small 3.2 24B Instruct 2506",
 889      "cost_per_1m_in": 0.09,
 890      "cost_per_1m_out": 0.28,
 891      "cost_per_1m_in_cached": 0,
 892      "cost_per_1m_out_cached": 0,
 893      "context_window": 128000,
 894      "default_max_tokens": 12800,
 895      "can_reason": false,
 896      "supports_attachments": true
 897    },
 898    {
 899      "id": "mistral-large-2402",
 900      "name": "Mistral Large 2402",
 901      "cost_per_1m_in": 3.846,
 902      "cost_per_1m_out": 11.627,
 903      "cost_per_1m_in_cached": 0,
 904      "cost_per_1m_out_cached": 0,
 905      "context_window": 32000,
 906      "default_max_tokens": 3200,
 907      "can_reason": true,
 908      "reasoning_levels": [
 909        "low",
 910        "medium",
 911        "high"
 912      ],
 913      "default_reasoning_effort": "medium",
 914      "supports_attachments": false
 915    },
 916    {
 917      "id": "pixtral-large-2502",
 918      "name": "Pixtral Large 25.02",
 919      "cost_per_1m_in": 1.789,
 920      "cost_per_1m_out": 5.366,
 921      "cost_per_1m_in_cached": 0,
 922      "cost_per_1m_out_cached": 0,
 923      "context_window": 128000,
 924      "default_max_tokens": 12800,
 925      "can_reason": true,
 926      "reasoning_levels": [
 927        "low",
 928        "medium",
 929        "high"
 930      ],
 931      "default_reasoning_effort": "medium",
 932      "supports_attachments": true
 933    },
 934    {
 935      "id": "qwen3-235b-a22b-instruct-2507",
 936      "name": "Qwen3 235B A22B Instruct 2507",
 937      "cost_per_1m_in": 0.062118,
 938      "cost_per_1m_out": 0.408204,
 939      "cost_per_1m_in_cached": 0,
 940      "cost_per_1m_out_cached": 0,
 941      "context_window": 131000,
 942      "default_max_tokens": 13100,
 943      "can_reason": true,
 944      "reasoning_levels": [
 945        "low",
 946        "medium",
 947        "high"
 948      ],
 949      "default_reasoning_effort": "medium",
 950      "supports_attachments": false
 951    },
 952    {
 953      "id": "qwen3-coder-30b-a3b-instruct",
 954      "name": "Qwen3 Coder 30b a3b Instruct",
 955      "cost_per_1m_in": 0.053244,
 956      "cost_per_1m_out": 0.22185,
 957      "cost_per_1m_in_cached": 0,
 958      "cost_per_1m_out_cached": 0,
 959      "context_window": 262000,
 960      "default_max_tokens": 26200,
 961      "can_reason": true,
 962      "reasoning_levels": [
 963        "low",
 964        "medium",
 965        "high"
 966      ],
 967      "default_reasoning_effort": "medium",
 968      "supports_attachments": false
 969    },
 970    {
 971      "id": "qwen3-32b",
 972      "name": "Qwen3 32B",
 973      "cost_per_1m_in": 0.089,
 974      "cost_per_1m_out": 0.268,
 975      "cost_per_1m_in_cached": 0,
 976      "cost_per_1m_out_cached": 0,
 977      "context_window": 40000,
 978      "default_max_tokens": 4000,
 979      "can_reason": true,
 980      "reasoning_levels": [
 981        "low",
 982        "medium",
 983        "high"
 984      ],
 985      "default_reasoning_effort": "medium",
 986      "supports_attachments": false
 987    },
 988    {
 989      "id": "nova-lite-v1",
 990      "name": "Nova Lite 1.0",
 991      "cost_per_1m_in": 0.062,
 992      "cost_per_1m_out": 0.247,
 993      "cost_per_1m_in_cached": 0,
 994      "cost_per_1m_out_cached": 0,
 995      "context_window": 300000,
 996      "default_max_tokens": 30000,
 997      "can_reason": true,
 998      "reasoning_levels": [
 999        "low",
1000        "medium",
1001        "high"
1002      ],
1003      "default_reasoning_effort": "medium",
1004      "supports_attachments": true
1005    },
1006    {
1007      "id": "claude-sonnet-4",
1008      "name": "Claude Sonnet 4",
1009      "cost_per_1m_in": 2.601,
1010      "cost_per_1m_out": 13.01,
1011      "cost_per_1m_in_cached": 0,
1012      "cost_per_1m_out_cached": 0,
1013      "context_window": 200000,
1014      "default_max_tokens": 20000,
1015      "can_reason": true,
1016      "reasoning_levels": [
1017        "low",
1018        "medium",
1019        "high"
1020      ],
1021      "default_reasoning_effort": "medium",
1022      "supports_attachments": true
1023    },
1024    {
1025      "id": "gpt-4.1-mini",
1026      "name": "GPT 4.1 mini",
1027      "cost_per_1m_in": 0.39,
1028      "cost_per_1m_out": 1.53,
1029      "cost_per_1m_in_cached": 0,
1030      "cost_per_1m_out_cached": 0,
1031      "context_window": 1047576,
1032      "default_max_tokens": 104757,
1033      "can_reason": true,
1034      "reasoning_levels": [
1035        "low",
1036        "medium",
1037        "high"
1038      ],
1039      "default_reasoning_effort": "medium",
1040      "supports_attachments": true
1041    },
1042    {
1043      "id": "gpt-4.1-nano",
1044      "name": "GPT 4.1 nano",
1045      "cost_per_1m_in": 0.1,
1046      "cost_per_1m_out": 0.39,
1047      "cost_per_1m_in_cached": 0,
1048      "cost_per_1m_out_cached": 0,
1049      "context_window": 1047576,
1050      "default_max_tokens": 104757,
1051      "can_reason": true,
1052      "reasoning_levels": [
1053        "low",
1054        "medium",
1055        "high"
1056      ],
1057      "default_reasoning_effort": "medium",
1058      "supports_attachments": true
1059    },
1060    {
1061      "id": "nova-micro-v1",
1062      "name": "Nova Micro 1.0",
1063      "cost_per_1m_in": 0.036,
1064      "cost_per_1m_out": 0.143,
1065      "cost_per_1m_in_cached": 0,
1066      "cost_per_1m_out_cached": 0,
1067      "context_window": 128000,
1068      "default_max_tokens": 12800,
1069      "can_reason": true,
1070      "reasoning_levels": [
1071        "low",
1072        "medium",
1073        "high"
1074      ],
1075      "default_reasoning_effort": "medium",
1076      "supports_attachments": true
1077    },
1078    {
1079      "id": "gpt-4.1",
1080      "name": "GPT 4.1",
1081      "cost_per_1m_in": 1.968,
1082      "cost_per_1m_out": 7.872,
1083      "cost_per_1m_in_cached": 0,
1084      "cost_per_1m_out_cached": 0,
1085      "context_window": 1047576,
1086      "default_max_tokens": 104757,
1087      "can_reason": true,
1088      "reasoning_levels": [
1089        "low",
1090        "medium",
1091        "high"
1092      ],
1093      "default_reasoning_effort": "medium",
1094      "supports_attachments": true
1095    },
1096    {
1097      "id": "nova-pro-v1",
1098      "name": "Nova Pro 1.0",
1099      "cost_per_1m_in": 0.824,
1100      "cost_per_1m_out": 3.295,
1101      "cost_per_1m_in_cached": 0,
1102      "cost_per_1m_out_cached": 0,
1103      "context_window": 300000,
1104      "default_max_tokens": 30000,
1105      "can_reason": true,
1106      "reasoning_levels": [
1107        "low",
1108        "medium",
1109        "high"
1110      ],
1111      "default_reasoning_effort": "medium",
1112      "supports_attachments": true
1113    },
1114    {
1115      "id": "llama-3.1-nemotron-ultra-253b-v1",
1116      "name": "Llama 3.1 Nemotron Ultra 253B v1",
1117      "cost_per_1m_in": 0.537,
1118      "cost_per_1m_out": 1.61,
1119      "cost_per_1m_in_cached": 0,
1120      "cost_per_1m_out_cached": 0,
1121      "context_window": 128000,
1122      "default_max_tokens": 12800,
1123      "can_reason": true,
1124      "reasoning_levels": [
1125        "low",
1126        "medium",
1127        "high"
1128      ],
1129      "default_reasoning_effort": "medium",
1130      "supports_attachments": false
1131    },
1132    {
1133      "id": "llama-4-maverick",
1134      "name": "Llama 4 Maverick",
1135      "cost_per_1m_in": 0.124236,
1136      "cost_per_1m_out": 0.602832,
1137      "cost_per_1m_in_cached": 0,
1138      "cost_per_1m_out_cached": 0,
1139      "context_window": 1050000,
1140      "default_max_tokens": 105000,
1141      "can_reason": false,
1142      "supports_attachments": false
1143    },
1144    {
1145      "id": "deepseek-v3-0324",
1146      "name": "DeepSeek V3 0324",
1147      "cost_per_1m_in": 0.26622,
1148      "cost_per_1m_out": 0.8874,
1149      "cost_per_1m_in_cached": 0,
1150      "cost_per_1m_out_cached": 0,
1151      "context_window": 163840,
1152      "default_max_tokens": 16384,
1153      "can_reason": true,
1154      "reasoning_levels": [
1155        "low",
1156        "medium",
1157        "high"
1158      ],
1159      "default_reasoning_effort": "medium",
1160      "supports_attachments": false
1161    },
1162    {
1163      "id": "mistral-small-2503",
1164      "name": "Mistral Small 2503",
1165      "cost_per_1m_in": 0.1,
1166      "cost_per_1m_out": 0.3,
1167      "cost_per_1m_in_cached": 0,
1168      "cost_per_1m_out_cached": 0,
1169      "context_window": 128000,
1170      "default_max_tokens": 12800,
1171      "can_reason": false,
1172      "supports_attachments": true
1173    },
1174    {
1175      "id": "mistral-small-2506",
1176      "name": "Mistral Small 2506",
1177      "cost_per_1m_in": 0.1,
1178      "cost_per_1m_out": 0.3,
1179      "cost_per_1m_in_cached": 0,
1180      "cost_per_1m_out_cached": 0,
1181      "context_window": 131072,
1182      "default_max_tokens": 13107,
1183      "can_reason": false,
1184      "supports_attachments": true
1185    },
1186    {
1187      "id": "gemini-2.0-flash-001",
1188      "name": "Gemini 2.0 Flash",
1189      "cost_per_1m_in": 0.13416,
1190      "cost_per_1m_out": 0.53664,
1191      "cost_per_1m_in_cached": 0,
1192      "cost_per_1m_out_cached": 0,
1193      "context_window": 1048576,
1194      "default_max_tokens": 104857,
1195      "can_reason": false,
1196      "supports_attachments": true
1197    },
1198    {
1199      "id": "gemini-2.0-flash-lite-001",
1200      "name": "Gemini 2.0 Flash Lite",
1201      "cost_per_1m_in": 0.06708,
1202      "cost_per_1m_out": 0.26832,
1203      "cost_per_1m_in_cached": 0,
1204      "cost_per_1m_out_cached": 0,
1205      "context_window": 1048576,
1206      "default_max_tokens": 104857,
1207      "can_reason": false,
1208      "supports_attachments": true
1209    },
1210    {
1211      "id": "gemini-2.5-flash",
1212      "name": "Gemini 2.5 Flash",
1213      "cost_per_1m_in": 0.26832,
1214      "cost_per_1m_out": 2.236,
1215      "cost_per_1m_in_cached": 0,
1216      "cost_per_1m_out_cached": 0,
1217      "context_window": 1048576,
1218      "default_max_tokens": 104857,
1219      "can_reason": true,
1220      "reasoning_levels": [
1221        "low",
1222        "medium",
1223        "high"
1224      ],
1225      "default_reasoning_effort": "medium",
1226      "supports_attachments": true
1227    },
1228    {
1229      "id": "gemini-2.5-pro",
1230      "name": "Gemini 2.5 Pro",
1231      "cost_per_1m_in": 1.3416,
1232      "cost_per_1m_out": 8.944,
1233      "cost_per_1m_in_cached": 0,
1234      "cost_per_1m_out_cached": 0,
1235      "context_window": 1048576,
1236      "default_max_tokens": 104857,
1237      "can_reason": true,
1238      "reasoning_levels": [
1239        "low",
1240        "medium",
1241        "high"
1242      ],
1243      "default_reasoning_effort": "medium",
1244      "supports_attachments": true
1245    },
1246    {
1247      "id": "gemma-3-27b-it",
1248      "name": "Gemma 3 27b it",
1249      "cost_per_1m_in": 0.089,
1250      "cost_per_1m_out": 0.268,
1251      "cost_per_1m_in_cached": 0,
1252      "cost_per_1m_out_cached": 0,
1253      "context_window": 131000,
1254      "default_max_tokens": 13100,
1255      "can_reason": true,
1256      "reasoning_levels": [
1257        "low",
1258        "medium",
1259        "high"
1260      ],
1261      "default_reasoning_effort": "medium",
1262      "supports_attachments": true
1263    },
1264    {
1265      "id": "deepseek-r1-0528",
1266      "name": "DeepSeek R1 0528",
1267      "cost_per_1m_in": 0.585084,
1268      "cost_per_1m_out": 2.30724,
1269      "cost_per_1m_in_cached": 0,
1270      "cost_per_1m_out_cached": 0,
1271      "context_window": 164000,
1272      "default_max_tokens": 16400,
1273      "can_reason": true,
1274      "reasoning_levels": [
1275        "low",
1276        "medium",
1277        "high"
1278      ],
1279      "default_reasoning_effort": "medium",
1280      "supports_attachments": false
1281    },
1282    {
1283      "id": "codestral-2508",
1284      "name": "Codestral 25.08",
1285      "cost_per_1m_in": 0.3,
1286      "cost_per_1m_out": 0.9,
1287      "cost_per_1m_in_cached": 0,
1288      "cost_per_1m_out_cached": 0,
1289      "context_window": 256000,
1290      "default_max_tokens": 25600,
1291      "can_reason": false,
1292      "supports_attachments": false
1293    },
1294    {
1295      "id": "llama-3.3-70b-instruct",
1296      "name": "Llama 3.3 70B Instruct",
1297      "cost_per_1m_in": 0.08874,
1298      "cost_per_1m_out": 0.274994,
1299      "cost_per_1m_in_cached": 0,
1300      "cost_per_1m_out_cached": 0,
1301      "context_window": 131000,
1302      "default_max_tokens": 13100,
1303      "can_reason": true,
1304      "reasoning_levels": [
1305        "low",
1306        "medium",
1307        "high"
1308      ],
1309      "default_reasoning_effort": "medium",
1310      "supports_attachments": false
1311    },
1312    {
1313      "id": "gpt-4o",
1314      "name": "GPT 4o",
1315      "cost_per_1m_in": 2.38664,
1316      "cost_per_1m_out": 9.5466,
1317      "cost_per_1m_in_cached": 0,
1318      "cost_per_1m_out_cached": 0,
1319      "context_window": 128000,
1320      "default_max_tokens": 12800,
1321      "can_reason": true,
1322      "reasoning_levels": [
1323        "low",
1324        "medium",
1325        "high"
1326      ],
1327      "default_reasoning_effort": "medium",
1328      "supports_attachments": true
1329    },
1330    {
1331      "id": "gpt-5-mini",
1332      "name": "GPT 5 mini",
1333      "cost_per_1m_in": 0.25,
1334      "cost_per_1m_out": 1.968,
1335      "cost_per_1m_in_cached": 0,
1336      "cost_per_1m_out_cached": 0,
1337      "context_window": 400000,
1338      "default_max_tokens": 40000,
1339      "can_reason": true,
1340      "reasoning_levels": [
1341        "low",
1342        "medium",
1343        "high"
1344      ],
1345      "default_reasoning_effort": "medium",
1346      "supports_attachments": true
1347    },
1348    {
1349      "id": "gpt-5-nano",
1350      "name": "GPT 5 nano",
1351      "cost_per_1m_in": 0.054,
1352      "cost_per_1m_out": 0.394,
1353      "cost_per_1m_in_cached": 0,
1354      "cost_per_1m_out_cached": 0,
1355      "context_window": 400000,
1356      "default_max_tokens": 40000,
1357      "can_reason": true,
1358      "reasoning_levels": [
1359        "low",
1360        "medium",
1361        "high"
1362      ],
1363      "default_reasoning_effort": "medium",
1364      "supports_attachments": true
1365    },
1366    {
1367      "id": "mistral-large-2411",
1368      "name": "Mistral Large 2411",
1369      "cost_per_1m_in": 1.8,
1370      "cost_per_1m_out": 5.4,
1371      "cost_per_1m_in_cached": 0,
1372      "cost_per_1m_out_cached": 0,
1373      "context_window": 131072,
1374      "default_max_tokens": 13107,
1375      "can_reason": true,
1376      "reasoning_levels": [
1377        "low",
1378        "medium",
1379        "high"
1380      ],
1381      "default_reasoning_effort": "medium",
1382      "supports_attachments": false
1383    },
1384    {
1385      "id": "hermes-4-405b",
1386      "name": "Hermes 4 405B",
1387      "cost_per_1m_in": 0.894,
1388      "cost_per_1m_out": 2.683,
1389      "cost_per_1m_in_cached": 0,
1390      "cost_per_1m_out_cached": 0,
1391      "context_window": 128000,
1392      "default_max_tokens": 12800,
1393      "can_reason": false,
1394      "supports_attachments": false
1395    },
1396    {
1397      "id": "mistral-medium-2508",
1398      "name": "Mistral Medium 2508",
1399      "cost_per_1m_in": 0.4,
1400      "cost_per_1m_out": 2,
1401      "cost_per_1m_in_cached": 0,
1402      "cost_per_1m_out_cached": 0,
1403      "context_window": 128000,
1404      "default_max_tokens": 12800,
1405      "can_reason": true,
1406      "reasoning_levels": [
1407        "low",
1408        "medium",
1409        "high"
1410      ],
1411      "default_reasoning_effort": "medium",
1412      "supports_attachments": true
1413    },
1414    {
1415      "id": "devstral-medium-2507",
1416      "name": "Devstral Medium 2507",
1417      "cost_per_1m_in": 0.4,
1418      "cost_per_1m_out": 2,
1419      "cost_per_1m_in_cached": 0,
1420      "cost_per_1m_out_cached": 0,
1421      "context_window": 131072,
1422      "default_max_tokens": 13107,
1423      "can_reason": false,
1424      "supports_attachments": false
1425    },
1426    {
1427      "id": "mistral-nemo-instruct-2407",
1428      "name": "Mistral Nemo 2407",
1429      "cost_per_1m_in": 0.13,
1430      "cost_per_1m_out": 0.13,
1431      "cost_per_1m_in_cached": 0,
1432      "cost_per_1m_out_cached": 0,
1433      "context_window": 131072,
1434      "default_max_tokens": 13107,
1435      "can_reason": false,
1436      "supports_attachments": false
1437    },
1438    {
1439      "id": "devstral-small-2507",
1440      "name": "Devstral Small 2507",
1441      "cost_per_1m_in": 0.1,
1442      "cost_per_1m_out": 0.3,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 131072,
1446      "default_max_tokens": 13107,
1447      "can_reason": false,
1448      "supports_attachments": false
1449    },
1450    {
1451      "id": "llama-3.1-405b-instruct",
1452      "name": "Llama 3.1 405B Instruct",
1453      "cost_per_1m_in": 1.75,
1454      "cost_per_1m_out": 1.75,
1455      "cost_per_1m_in_cached": 0,
1456      "cost_per_1m_out_cached": 0,
1457      "context_window": 128000,
1458      "default_max_tokens": 12800,
1459      "can_reason": true,
1460      "reasoning_levels": [
1461        "low",
1462        "medium",
1463        "high"
1464      ],
1465      "default_reasoning_effort": "medium",
1466      "supports_attachments": false
1467    },
1468    {
1469      "id": "gpt-4o-mini",
1470      "name": "GPT 4o mini",
1471      "cost_per_1m_in": 0.1432,
1472      "cost_per_1m_out": 0.5728,
1473      "cost_per_1m_in_cached": 0,
1474      "cost_per_1m_out_cached": 0,
1475      "context_window": 128000,
1476      "default_max_tokens": 12800,
1477      "can_reason": true,
1478      "reasoning_levels": [
1479        "low",
1480        "medium",
1481        "high"
1482      ],
1483      "default_reasoning_effort": "medium",
1484      "supports_attachments": true
1485    },
1486    {
1487      "id": "llama-3.1-8b-instruct",
1488      "name": "Llama 3.1 8B Instruct",
1489      "cost_per_1m_in": 0.018,
1490      "cost_per_1m_out": 0.054,
1491      "cost_per_1m_in_cached": 0,
1492      "cost_per_1m_out_cached": 0,
1493      "context_window": 128000,
1494      "default_max_tokens": 12800,
1495      "can_reason": true,
1496      "reasoning_levels": [
1497        "low",
1498        "medium",
1499        "high"
1500      ],
1501      "default_reasoning_effort": "medium",
1502      "supports_attachments": false
1503    }
1504  ]
1505}