cortecs.json

   1{
   2  "name": "Cortecs",
   3  "id": "cortecs",
   4  "api_key": "$CORTECS_API_KEY",
   5  "api_endpoint": "https://api.cortecs.ai/v1",
   6  "type": "openai",
   7  "default_large_model_id": "qwen3-coder-30b-a3b-instruct",
   8  "default_small_model_id": "glm-4.7-flash",
   9  "models": [
  10    {
  11      "id": "gemma-4-26b-a4b-it",
  12      "name": "Gemma 4 26B A4B",
  13      "cost_per_1m_in": 0.25,
  14      "cost_per_1m_out": 0.5,
  15      "cost_per_1m_in_cached": 0,
  16      "cost_per_1m_out_cached": 0,
  17      "context_window": 256000,
  18      "default_max_tokens": 25600,
  19      "can_reason": true,
  20      "reasoning_levels": [
  21        "low",
  22        "medium",
  23        "high"
  24      ],
  25      "default_reasoning_effort": "medium",
  26      "supports_attachments": true
  27    },
  28    {
  29      "id": "deepseek-v4-pro",
  30      "name": "DeepSeek V4 Pro",
  31      "cost_per_1m_in": 1.55295,
  32      "cost_per_1m_out": 3.1059,
  33      "cost_per_1m_in_cached": 0,
  34      "cost_per_1m_out_cached": 0,
  35      "context_window": 1048576,
  36      "default_max_tokens": 104857,
  37      "can_reason": true,
  38      "reasoning_levels": [
  39        "low",
  40        "medium",
  41        "high"
  42      ],
  43      "default_reasoning_effort": "medium",
  44      "supports_attachments": false
  45    },
  46    {
  47      "id": "deepseek-v4-flash",
  48      "name": "DeepSeek V4 Flash",
  49      "cost_per_1m_in": 0.13311,
  50      "cost_per_1m_out": 0.26622,
  51      "cost_per_1m_in_cached": 0,
  52      "cost_per_1m_out_cached": 0,
  53      "context_window": 1048576,
  54      "default_max_tokens": 104857,
  55      "can_reason": true,
  56      "reasoning_levels": [
  57        "low",
  58        "medium",
  59        "high"
  60      ],
  61      "default_reasoning_effort": "medium",
  62      "supports_attachments": false
  63    },
  64    {
  65      "id": "mistral-medium-3.5",
  66      "name": "Mistral Medium 3.5",
  67      "cost_per_1m_in": 1.25,
  68      "cost_per_1m_out": 6.4,
  69      "cost_per_1m_in_cached": 0,
  70      "cost_per_1m_out_cached": 0,
  71      "context_window": 256000,
  72      "default_max_tokens": 25600,
  73      "can_reason": true,
  74      "reasoning_levels": [
  75        "low",
  76        "medium",
  77        "high"
  78      ],
  79      "default_reasoning_effort": "medium",
  80      "supports_attachments": true
  81    },
  82    {
  83      "id": "nvidia-nemotron-3-nano-omni",
  84      "name": "Nemotron 3 Nano Omni",
  85      "cost_per_1m_in": 0.0532447,
  86      "cost_per_1m_out": 0.212976,
  87      "cost_per_1m_in_cached": 0,
  88      "cost_per_1m_out_cached": 0,
  89      "context_window": 300000,
  90      "default_max_tokens": 30000,
  91      "can_reason": true,
  92      "reasoning_levels": [
  93        "low",
  94        "medium",
  95        "high"
  96      ],
  97      "default_reasoning_effort": "medium",
  98      "supports_attachments": false
  99    },
 100    {
 101      "id": "gpt-5.4",
 102      "name": "GPT 5.4",
 103      "cost_per_1m_in": 2.601,
 104      "cost_per_1m_out": 13.872,
 105      "cost_per_1m_in_cached": 0,
 106      "cost_per_1m_out_cached": 0,
 107      "context_window": 1050000,
 108      "default_max_tokens": 105000,
 109      "can_reason": true,
 110      "reasoning_levels": [
 111        "low",
 112        "medium",
 113        "high"
 114      ],
 115      "default_reasoning_effort": "medium",
 116      "supports_attachments": true
 117    },
 118    {
 119      "id": "kimi-k2.6",
 120      "name": "Kimi K2.6",
 121      "cost_per_1m_in": 0.6936,
 122      "cost_per_1m_out": 3.0345,
 123      "cost_per_1m_in_cached": 0,
 124      "cost_per_1m_out_cached": 0,
 125      "context_window": 256000,
 126      "default_max_tokens": 25600,
 127      "can_reason": true,
 128      "reasoning_levels": [
 129        "low",
 130        "medium",
 131        "high"
 132      ],
 133      "default_reasoning_effort": "medium",
 134      "supports_attachments": true
 135    },
 136    {
 137      "id": "claude-opus4-7",
 138      "name": "Claude Opus 4.7",
 139      "cost_per_1m_in": 4.7685,
 140      "cost_per_1m_out": 23.8425,
 141      "cost_per_1m_in_cached": 0,
 142      "cost_per_1m_out_cached": 0,
 143      "context_window": 1000000,
 144      "default_max_tokens": 100000,
 145      "can_reason": true,
 146      "reasoning_levels": [
 147        "low",
 148        "medium",
 149        "high"
 150      ],
 151      "default_reasoning_effort": "medium",
 152      "supports_attachments": true
 153    },
 154    {
 155      "id": "minimax-m2.7",
 156      "name": "MiniMax M2.7",
 157      "cost_per_1m_in": 0.26622,
 158      "cost_per_1m_out": 1.06488,
 159      "cost_per_1m_in_cached": 0,
 160      "cost_per_1m_out_cached": 0,
 161      "context_window": 196608,
 162      "default_max_tokens": 19660,
 163      "can_reason": true,
 164      "reasoning_levels": [
 165        "low",
 166        "medium",
 167        "high"
 168      ],
 169      "default_reasoning_effort": "medium",
 170      "supports_attachments": false
 171    },
 172    {
 173      "id": "glm-5.1",
 174      "name": "GLM 5.1",
 175      "cost_per_1m_in": 1.24236,
 176      "cost_per_1m_out": 3.90336,
 177      "cost_per_1m_in_cached": 0,
 178      "cost_per_1m_out_cached": 0,
 179      "context_window": 202752,
 180      "default_max_tokens": 20275,
 181      "can_reason": true,
 182      "reasoning_levels": [
 183        "low",
 184        "medium",
 185        "high"
 186      ],
 187      "default_reasoning_effort": "medium",
 188      "supports_attachments": false
 189    },
 190    {
 191      "id": "qwen3.5-122b-a10b",
 192      "name": "Qwen3.5 122B A10B",
 193      "cost_per_1m_in": 0.4437,
 194      "cost_per_1m_out": 3.1059,
 195      "cost_per_1m_in_cached": 0,
 196      "cost_per_1m_out_cached": 0,
 197      "context_window": 262144,
 198      "default_max_tokens": 26214,
 199      "can_reason": true,
 200      "reasoning_levels": [
 201        "low",
 202        "medium",
 203        "high"
 204      ],
 205      "default_reasoning_effort": "medium",
 206      "supports_attachments": false
 207    },
 208    {
 209      "id": "qwen3.5-9b",
 210      "name": "Qwen3.5 9B",
 211      "cost_per_1m_in": 0.1,
 212      "cost_per_1m_out": 0.15,
 213      "cost_per_1m_in_cached": 0,
 214      "cost_per_1m_out_cached": 0,
 215      "context_window": 262000,
 216      "default_max_tokens": 26200,
 217      "can_reason": true,
 218      "reasoning_levels": [
 219        "low",
 220        "medium",
 221        "high"
 222      ],
 223      "default_reasoning_effort": "medium",
 224      "supports_attachments": false
 225    },
 226    {
 227      "id": "nemotron-3-super-120b-a12b",
 228      "name": "Nemotron 3 Super 120B A12B",
 229      "cost_per_1m_in": 0.15606,
 230      "cost_per_1m_out": 0.67626,
 231      "cost_per_1m_in_cached": 0,
 232      "cost_per_1m_out_cached": 0,
 233      "context_window": 262000,
 234      "default_max_tokens": 26214,
 235      "can_reason": true,
 236      "reasoning_levels": [
 237        "low",
 238        "medium",
 239        "high"
 240      ],
 241      "default_reasoning_effort": "medium",
 242      "supports_attachments": false
 243    },
 244    {
 245      "id": "qwen3-coder-next",
 246      "name": "Qwen3 Coder Next",
 247      "cost_per_1m_in": 0.15,
 248      "cost_per_1m_out": 0.8,
 249      "cost_per_1m_in_cached": 0,
 250      "cost_per_1m_out_cached": 0,
 251      "context_window": 256000,
 252      "default_max_tokens": 25600,
 253      "can_reason": true,
 254      "reasoning_levels": [
 255        "low",
 256        "medium",
 257        "high"
 258      ],
 259      "default_reasoning_effort": "medium",
 260      "supports_attachments": false
 261    },
 262    {
 263      "id": "glm-5",
 264      "name": "GLM 5",
 265      "cost_per_1m_in": 0.8874,
 266      "cost_per_1m_out": 2.83968,
 267      "cost_per_1m_in_cached": 0,
 268      "cost_per_1m_out_cached": 0,
 269      "context_window": 202752,
 270      "default_max_tokens": 20275,
 271      "can_reason": true,
 272      "reasoning_levels": [
 273        "low",
 274        "medium",
 275        "high"
 276      ],
 277      "default_reasoning_effort": "medium",
 278      "supports_attachments": false
 279    },
 280    {
 281      "id": "glm-4.6",
 282      "name": "GLM 4.6",
 283      "cost_per_1m_in": 0.35496,
 284      "cost_per_1m_out": 1.55295,
 285      "cost_per_1m_in_cached": 0,
 286      "cost_per_1m_out_cached": 0,
 287      "context_window": 203000,
 288      "default_max_tokens": 20300,
 289      "can_reason": true,
 290      "reasoning_levels": [
 291        "low",
 292        "medium",
 293        "high"
 294      ],
 295      "default_reasoning_effort": "medium",
 296      "supports_attachments": false
 297    },
 298    {
 299      "id": "deepseek-chat-v3.1",
 300      "name": "DeepSeek Chat V3.1",
 301      "cost_per_1m_in": 0.17748,
 302      "cost_per_1m_out": 0.70992,
 303      "cost_per_1m_in_cached": 0,
 304      "cost_per_1m_out_cached": 0,
 305      "context_window": 164000,
 306      "default_max_tokens": 16400,
 307      "can_reason": true,
 308      "reasoning_levels": [
 309        "low",
 310        "medium",
 311        "high"
 312      ],
 313      "default_reasoning_effort": "medium",
 314      "supports_attachments": false
 315    },
 316    {
 317      "id": "qwen-2.5-72b-instruct",
 318      "name": "Qwen2.5 72B Instruct",
 319      "cost_per_1m_in": 0.062118,
 320      "cost_per_1m_out": 0.230724,
 321      "cost_per_1m_in_cached": 0,
 322      "cost_per_1m_out_cached": 0,
 323      "context_window": 33000,
 324      "default_max_tokens": 3300,
 325      "can_reason": false,
 326      "supports_attachments": false
 327    },
 328    {
 329      "id": "qwen3.5-397b-a17b",
 330      "name": "Qwen3.5 397B A17B ",
 331      "cost_per_1m_in": 0.53244,
 332      "cost_per_1m_out": 3.19464,
 333      "cost_per_1m_in_cached": 0,
 334      "cost_per_1m_out_cached": 0,
 335      "context_window": 262000,
 336      "default_max_tokens": 25000,
 337      "can_reason": true,
 338      "reasoning_levels": [
 339        "low",
 340        "medium",
 341        "high"
 342      ],
 343      "default_reasoning_effort": "medium",
 344      "supports_attachments": false
 345    },
 346    {
 347      "id": "deepseek-v3.2",
 348      "name": "DeepSeek V3.2",
 349      "cost_per_1m_in": 0.26622,
 350      "cost_per_1m_out": 0.4437,
 351      "cost_per_1m_in_cached": 0,
 352      "cost_per_1m_out_cached": 0,
 353      "context_window": 163840,
 354      "default_max_tokens": 16384,
 355      "can_reason": true,
 356      "reasoning_levels": [
 357        "low",
 358        "medium",
 359        "high"
 360      ],
 361      "default_reasoning_effort": "medium",
 362      "supports_attachments": false
 363    },
 364    {
 365      "id": "mistral-small-2603",
 366      "name": "Mistral Small 4 2603",
 367      "cost_per_1m_in": 0.1275,
 368      "cost_per_1m_out": 0.51,
 369      "cost_per_1m_in_cached": 0,
 370      "cost_per_1m_out_cached": 0,
 371      "context_window": 256000,
 372      "default_max_tokens": 25600,
 373      "can_reason": true,
 374      "reasoning_levels": [
 375        "low",
 376        "medium",
 377        "high"
 378      ],
 379      "default_reasoning_effort": "medium",
 380      "supports_attachments": true
 381    },
 382    {
 383      "id": "minimax-m2.5",
 384      "name": "MiniMax M2.5",
 385      "cost_per_1m_in": 0.26622,
 386      "cost_per_1m_out": 0.97614,
 387      "cost_per_1m_in_cached": 0,
 388      "cost_per_1m_out_cached": 0,
 389      "context_window": 196608,
 390      "default_max_tokens": 19660,
 391      "can_reason": true,
 392      "reasoning_levels": [
 393        "low",
 394        "medium",
 395        "high"
 396      ],
 397      "default_reasoning_effort": "medium",
 398      "supports_attachments": false
 399    },
 400    {
 401      "id": "claude-4-6-sonnet",
 402      "name": "Claude Sonnet 4.6",
 403      "cost_per_1m_in": 2.8691,
 404      "cost_per_1m_out": 14.3095,
 405      "cost_per_1m_in_cached": 0,
 406      "cost_per_1m_out_cached": 0,
 407      "context_window": 1000000,
 408      "default_max_tokens": 100000,
 409      "can_reason": true,
 410      "reasoning_levels": [
 411        "low",
 412        "medium",
 413        "high"
 414      ],
 415      "default_reasoning_effort": "medium",
 416      "supports_attachments": true
 417    },
 418    {
 419      "id": "glm-4.7-flash",
 420      "name": "GLM 4.7 Flash",
 421      "cost_per_1m_in": 0.0716,
 422      "cost_per_1m_out": 0.4293,
 423      "cost_per_1m_in_cached": 0,
 424      "cost_per_1m_out_cached": 0,
 425      "context_window": 203000,
 426      "default_max_tokens": 20300,
 427      "can_reason": false,
 428      "supports_attachments": false
 429    },
 430    {
 431      "id": "kimi-k2.5",
 432      "name": "Kimi K2.5",
 433      "cost_per_1m_in": 0.4437,
 434      "cost_per_1m_out": 2.12976,
 435      "cost_per_1m_in_cached": 0,
 436      "cost_per_1m_out_cached": 0,
 437      "context_window": 256000,
 438      "default_max_tokens": 26214,
 439      "can_reason": true,
 440      "reasoning_levels": [
 441        "low",
 442        "medium",
 443        "high"
 444      ],
 445      "default_reasoning_effort": "medium",
 446      "supports_attachments": true
 447    },
 448    {
 449      "id": "claude-opus4-6",
 450      "name": "Claude Opus 4.6",
 451      "cost_per_1m_in": 4.7685,
 452      "cost_per_1m_out": 23.8425,
 453      "cost_per_1m_in_cached": 0,
 454      "cost_per_1m_out_cached": 0,
 455      "context_window": 1000000,
 456      "default_max_tokens": 100000,
 457      "can_reason": true,
 458      "reasoning_levels": [
 459        "low",
 460        "medium",
 461        "high"
 462      ],
 463      "default_reasoning_effort": "medium",
 464      "supports_attachments": true
 465    },
 466    {
 467      "id": "minimax-m2",
 468      "name": "MiniMax M2",
 469      "cost_per_1m_in": 0.22185,
 470      "cost_per_1m_out": 0.8874,
 471      "cost_per_1m_in_cached": 0,
 472      "cost_per_1m_out_cached": 0,
 473      "context_window": 196608,
 474      "default_max_tokens": 19660,
 475      "can_reason": true,
 476      "reasoning_levels": [
 477        "low",
 478        "medium",
 479        "high"
 480      ],
 481      "default_reasoning_effort": "medium",
 482      "supports_attachments": false
 483    },
 484    {
 485      "id": "glm-4.7",
 486      "name": "GLM 4.7",
 487      "cost_per_1m_in": 0.53244,
 488      "cost_per_1m_out": 1.95228,
 489      "cost_per_1m_in_cached": 0,
 490      "cost_per_1m_out_cached": 0,
 491      "context_window": 202752,
 492      "default_max_tokens": 20275,
 493      "can_reason": true,
 494      "reasoning_levels": [
 495        "low",
 496        "medium",
 497        "high"
 498      ],
 499      "default_reasoning_effort": "medium",
 500      "supports_attachments": false
 501    },
 502    {
 503      "id": "minimax-m2.1",
 504      "name": "MiniMax M2.1",
 505      "cost_per_1m_in": 0.322,
 506      "cost_per_1m_out": 1.2879,
 507      "cost_per_1m_in_cached": 0,
 508      "cost_per_1m_out_cached": 0,
 509      "context_window": 196000,
 510      "default_max_tokens": 19600,
 511      "can_reason": true,
 512      "reasoning_levels": [
 513        "low",
 514        "medium",
 515        "high"
 516      ],
 517      "default_reasoning_effort": "medium",
 518      "supports_attachments": false
 519    },
 520    {
 521      "id": "qwen3-vl-235b-a22b",
 522      "name": "Qwen3 VL 235B A22B",
 523      "cost_per_1m_in": 0.186354,
 524      "cost_per_1m_out": 1.68606,
 525      "cost_per_1m_in_cached": 0,
 526      "cost_per_1m_out_cached": 0,
 527      "context_window": 131000,
 528      "default_max_tokens": 13100,
 529      "can_reason": true,
 530      "reasoning_levels": [
 531        "low",
 532        "medium",
 533        "high"
 534      ],
 535      "default_reasoning_effort": "medium",
 536      "supports_attachments": true
 537    },
 538    {
 539      "id": "nvidia-nemotron-3-nano-30b-a3b",
 540      "name": "Nemotron 3 Nano 30B A3B",
 541      "cost_per_1m_in": 0.0537,
 542      "cost_per_1m_out": 0.215,
 543      "cost_per_1m_in_cached": 0,
 544      "cost_per_1m_out_cached": 0,
 545      "context_window": 128000,
 546      "default_max_tokens": 12800,
 547      "can_reason": true,
 548      "reasoning_levels": [
 549        "low",
 550        "medium",
 551        "high"
 552      ],
 553      "default_reasoning_effort": "medium",
 554      "supports_attachments": false
 555    },
 556    {
 557      "id": "claude-opus4-5",
 558      "name": "Claude Opus 4.5",
 559      "cost_per_1m_in": 4.7695,
 560      "cost_per_1m_out": 23.8485,
 561      "cost_per_1m_in_cached": 0,
 562      "cost_per_1m_out_cached": 0,
 563      "context_window": 200000,
 564      "default_max_tokens": 20000,
 565      "can_reason": true,
 566      "reasoning_levels": [
 567        "low",
 568        "medium",
 569        "high"
 570      ],
 571      "default_reasoning_effort": "medium",
 572      "supports_attachments": true
 573    },
 574    {
 575      "id": "qwen3-next-80b-a3b-thinking",
 576      "name": "Qwen3 Next 80B A3B Thinking",
 577      "cost_per_1m_in": 0.13311,
 578      "cost_per_1m_out": 1.06488,
 579      "cost_per_1m_in_cached": 0,
 580      "cost_per_1m_out_cached": 0,
 581      "context_window": 262000,
 582      "default_max_tokens": 12800,
 583      "can_reason": true,
 584      "reasoning_levels": [
 585        "low",
 586        "medium",
 587        "high"
 588      ],
 589      "default_reasoning_effort": "medium",
 590      "supports_attachments": false
 591    },
 592    {
 593      "id": "holo2-30b-a3b",
 594      "name": "Holo2 30B A3B",
 595      "cost_per_1m_in": 0.3,
 596      "cost_per_1m_out": 0.7,
 597      "cost_per_1m_in_cached": 0,
 598      "cost_per_1m_out_cached": 0,
 599      "context_window": 22000,
 600      "default_max_tokens": 2200,
 601      "can_reason": true,
 602      "reasoning_levels": [
 603        "low",
 604        "medium",
 605        "high"
 606      ],
 607      "default_reasoning_effort": "medium",
 608      "supports_attachments": true
 609    },
 610    {
 611      "id": "devstral-2512",
 612      "name": "Devstral 2 2512",
 613      "cost_per_1m_in": 0.4,
 614      "cost_per_1m_out": 2,
 615      "cost_per_1m_in_cached": 0,
 616      "cost_per_1m_out_cached": 0,
 617      "context_window": 262000,
 618      "default_max_tokens": 26200,
 619      "can_reason": false,
 620      "supports_attachments": false
 621    },
 622    {
 623      "id": "nova-2-lite",
 624      "name": "Nova 2 Lite",
 625      "cost_per_1m_in": 0.335,
 626      "cost_per_1m_out": 2.822,
 627      "cost_per_1m_in_cached": 0,
 628      "cost_per_1m_out_cached": 0,
 629      "context_window": 1000000,
 630      "default_max_tokens": 100000,
 631      "can_reason": true,
 632      "reasoning_levels": [
 633        "low",
 634        "medium",
 635        "high"
 636      ],
 637      "default_reasoning_effort": "medium",
 638      "supports_attachments": true
 639    },
 640    {
 641      "id": "gpt-oss-safeguard-120b",
 642      "name": "GPT OSS Safeguard 120B",
 643      "cost_per_1m_in": 0.161,
 644      "cost_per_1m_out": 0.626,
 645      "cost_per_1m_in_cached": 0,
 646      "cost_per_1m_out_cached": 0,
 647      "context_window": 128000,
 648      "default_max_tokens": 12800,
 649      "can_reason": true,
 650      "reasoning_levels": [
 651        "low",
 652        "medium",
 653        "high"
 654      ],
 655      "default_reasoning_effort": "medium",
 656      "supports_attachments": false
 657    },
 658    {
 659      "id": "mistral-large-2512",
 660      "name": "Mistral Large 3 2512",
 661      "cost_per_1m_in": 0.5,
 662      "cost_per_1m_out": 1.5,
 663      "cost_per_1m_in_cached": 0,
 664      "cost_per_1m_out_cached": 0,
 665      "context_window": 256000,
 666      "default_max_tokens": 25600,
 667      "can_reason": false,
 668      "supports_attachments": true
 669    },
 670    {
 671      "id": "ministral-8b-2512",
 672      "name": "Ministral 3 8b 2512",
 673      "cost_per_1m_in": 0.15,
 674      "cost_per_1m_out": 0.15,
 675      "cost_per_1m_in_cached": 0,
 676      "cost_per_1m_out_cached": 0,
 677      "context_window": 256000,
 678      "default_max_tokens": 25600,
 679      "can_reason": false,
 680      "supports_attachments": true
 681    },
 682    {
 683      "id": "ministral-3b-2512",
 684      "name": "Ministral 3 3b 2512",
 685      "cost_per_1m_in": 0.1,
 686      "cost_per_1m_out": 0.1,
 687      "cost_per_1m_in_cached": 0,
 688      "cost_per_1m_out_cached": 0,
 689      "context_window": 256000,
 690      "default_max_tokens": 25600,
 691      "can_reason": false,
 692      "supports_attachments": true
 693    },
 694    {
 695      "id": "ministral-14b-2512",
 696      "name": "Ministral 3 14b 2512",
 697      "cost_per_1m_in": 0.2,
 698      "cost_per_1m_out": 0.2,
 699      "cost_per_1m_in_cached": 0,
 700      "cost_per_1m_out_cached": 0,
 701      "context_window": 256000,
 702      "default_max_tokens": 25600,
 703      "can_reason": false,
 704      "supports_attachments": true
 705    },
 706    {
 707      "id": "intellect-3",
 708      "name": "INTELLECT-3",
 709      "cost_per_1m_in": 0.179,
 710      "cost_per_1m_out": 0.984,
 711      "cost_per_1m_in_cached": 0,
 712      "cost_per_1m_out_cached": 0,
 713      "context_window": 128000,
 714      "default_max_tokens": 12800,
 715      "can_reason": true,
 716      "reasoning_levels": [
 717        "low",
 718        "medium",
 719        "high"
 720      ],
 721      "default_reasoning_effort": "medium",
 722      "supports_attachments": false
 723    },
 724    {
 725      "id": "gpt-5.1",
 726      "name": "GPT 5.1",
 727      "cost_per_1m_in": 1.234,
 728      "cost_per_1m_out": 9.838,
 729      "cost_per_1m_in_cached": 0,
 730      "cost_per_1m_out_cached": 0,
 731      "context_window": 400000,
 732      "default_max_tokens": 40000,
 733      "can_reason": true,
 734      "reasoning_levels": [
 735        "low",
 736        "medium",
 737        "high"
 738      ],
 739      "default_reasoning_effort": "medium",
 740      "supports_attachments": true
 741    },
 742    {
 743      "id": "nemotron-nano-v2-12b",
 744      "name": "Nemotron Nano V2 12b",
 745      "cost_per_1m_in": 0.215,
 746      "cost_per_1m_out": 0.635,
 747      "cost_per_1m_in_cached": 0,
 748      "cost_per_1m_out_cached": 0,
 749      "context_window": 128000,
 750      "default_max_tokens": 12800,
 751      "can_reason": true,
 752      "reasoning_levels": [
 753        "low",
 754        "medium",
 755        "high"
 756      ],
 757      "default_reasoning_effort": "medium",
 758      "supports_attachments": true
 759    },
 760    {
 761      "id": "claude-haiku-4-5",
 762      "name": "Claude Haiku 4.5",
 763      "cost_per_1m_in": 0.894,
 764      "cost_per_1m_out": 4.472,
 765      "cost_per_1m_in_cached": 0,
 766      "cost_per_1m_out_cached": 0,
 767      "context_window": 200000,
 768      "default_max_tokens": 20000,
 769      "can_reason": true,
 770      "reasoning_levels": [
 771        "low",
 772        "medium",
 773        "high"
 774      ],
 775      "default_reasoning_effort": "medium",
 776      "supports_attachments": true
 777    },
 778    {
 779      "id": "claude-4-5-sonnet",
 780      "name": "Claude 4.5 Sonnet",
 781      "cost_per_1m_in": 2.683,
 782      "cost_per_1m_out": 13.416,
 783      "cost_per_1m_in_cached": 0,
 784      "cost_per_1m_out_cached": 0,
 785      "context_window": 200000,
 786      "default_max_tokens": 20000,
 787      "can_reason": true,
 788      "reasoning_levels": [
 789        "low",
 790        "medium",
 791        "high"
 792      ],
 793      "default_reasoning_effort": "medium",
 794      "supports_attachments": true
 795    },
 796    {
 797      "id": "magistral-small-2509",
 798      "name": "Magistral Small 2509",
 799      "cost_per_1m_in": 0.5,
 800      "cost_per_1m_out": 1.5,
 801      "cost_per_1m_in_cached": 0,
 802      "cost_per_1m_out_cached": 0,
 803      "context_window": 128000,
 804      "default_max_tokens": 12800,
 805      "can_reason": true,
 806      "reasoning_levels": [
 807        "low",
 808        "medium",
 809        "high"
 810      ],
 811      "default_reasoning_effort": "medium",
 812      "supports_attachments": true
 813    },
 814    {
 815      "id": "magistral-medium-2509",
 816      "name": "Magistral Medium 2509",
 817      "cost_per_1m_in": 2,
 818      "cost_per_1m_out": 5,
 819      "cost_per_1m_in_cached": 0,
 820      "cost_per_1m_out_cached": 0,
 821      "context_window": 128000,
 822      "default_max_tokens": 12800,
 823      "can_reason": true,
 824      "reasoning_levels": [
 825        "low",
 826        "medium",
 827        "high"
 828      ],
 829      "default_reasoning_effort": "medium",
 830      "supports_attachments": true
 831    },
 832    {
 833      "id": "hermes-4-70b",
 834      "name": "Hermes 4 70B",
 835      "cost_per_1m_in": 0.116,
 836      "cost_per_1m_out": 0.358,
 837      "cost_per_1m_in_cached": 0,
 838      "cost_per_1m_out_cached": 0,
 839      "context_window": 128000,
 840      "default_max_tokens": 12800,
 841      "can_reason": false,
 842      "supports_attachments": false
 843    },
 844    {
 845      "id": "gpt-5",
 846      "name": "GPT 5",
 847      "cost_per_1m_in": 1.234,
 848      "cost_per_1m_out": 9.838,
 849      "cost_per_1m_in_cached": 0,
 850      "cost_per_1m_out_cached": 0,
 851      "context_window": 400000,
 852      "default_max_tokens": 40000,
 853      "can_reason": true,
 854      "reasoning_levels": [
 855        "low",
 856        "medium",
 857        "high"
 858      ],
 859      "default_reasoning_effort": "medium",
 860      "supports_attachments": true
 861    },
 862    {
 863      "id": "gpt-oss-120b",
 864      "name": "GPT Oss 120b",
 865      "cost_per_1m_in": 0.035496,
 866      "cost_per_1m_out": 0.17748,
 867      "cost_per_1m_in_cached": 0,
 868      "cost_per_1m_out_cached": 0,
 869      "context_window": 131000,
 870      "default_max_tokens": 13100,
 871      "can_reason": true,
 872      "reasoning_levels": [
 873        "low",
 874        "medium",
 875        "high"
 876      ],
 877      "default_reasoning_effort": "medium",
 878      "supports_attachments": false
 879    },
 880    {
 881      "id": "qwen3-30b-a3b-instruct-2507",
 882      "name": "Qwen3 30B A3B Instruct 2507",
 883      "cost_per_1m_in": 0.089,
 884      "cost_per_1m_out": 0.268,
 885      "cost_per_1m_in_cached": 0,
 886      "cost_per_1m_out_cached": 0,
 887      "context_window": 262000,
 888      "default_max_tokens": 26200,
 889      "can_reason": true,
 890      "reasoning_levels": [
 891        "low",
 892        "medium",
 893        "high"
 894      ],
 895      "default_reasoning_effort": "medium",
 896      "supports_attachments": false
 897    },
 898    {
 899      "id": "gpt-oss-20b",
 900      "name": "GPT Oss 20b",
 901      "cost_per_1m_in": 0.026622,
 902      "cost_per_1m_out": 0.124236,
 903      "cost_per_1m_in_cached": 0,
 904      "cost_per_1m_out_cached": 0,
 905      "context_window": 131000,
 906      "default_max_tokens": 13100,
 907      "can_reason": true,
 908      "reasoning_levels": [
 909        "low",
 910        "medium",
 911        "high"
 912      ],
 913      "default_reasoning_effort": "medium",
 914      "supports_attachments": false
 915    },
 916    {
 917      "id": "mistral-7b-instruct-v0.3",
 918      "name": "Mistral 7B Instruct v0.3",
 919      "cost_per_1m_in": 0.1,
 920      "cost_per_1m_out": 0.1,
 921      "cost_per_1m_in_cached": 0,
 922      "cost_per_1m_out_cached": 0,
 923      "context_window": 127000,
 924      "default_max_tokens": 12700,
 925      "can_reason": false,
 926      "supports_attachments": false
 927    },
 928    {
 929      "id": "mistral-small-3.2-24b-instruct-2506",
 930      "name": "Mistral Small 3.2 24B Instruct 2506",
 931      "cost_per_1m_in": 0.09,
 932      "cost_per_1m_out": 0.28,
 933      "cost_per_1m_in_cached": 0,
 934      "cost_per_1m_out_cached": 0,
 935      "context_window": 128000,
 936      "default_max_tokens": 12800,
 937      "can_reason": false,
 938      "supports_attachments": true
 939    },
 940    {
 941      "id": "mistral-large-2402",
 942      "name": "Mistral Large 2402",
 943      "cost_per_1m_in": 3.846,
 944      "cost_per_1m_out": 11.627,
 945      "cost_per_1m_in_cached": 0,
 946      "cost_per_1m_out_cached": 0,
 947      "context_window": 32000,
 948      "default_max_tokens": 3200,
 949      "can_reason": true,
 950      "reasoning_levels": [
 951        "low",
 952        "medium",
 953        "high"
 954      ],
 955      "default_reasoning_effort": "medium",
 956      "supports_attachments": false
 957    },
 958    {
 959      "id": "pixtral-large-2502",
 960      "name": "Pixtral Large 25.02",
 961      "cost_per_1m_in": 1.789,
 962      "cost_per_1m_out": 5.366,
 963      "cost_per_1m_in_cached": 0,
 964      "cost_per_1m_out_cached": 0,
 965      "context_window": 128000,
 966      "default_max_tokens": 12800,
 967      "can_reason": true,
 968      "reasoning_levels": [
 969        "low",
 970        "medium",
 971        "high"
 972      ],
 973      "default_reasoning_effort": "medium",
 974      "supports_attachments": true
 975    },
 976    {
 977      "id": "qwen3-235b-a22b-instruct-2507",
 978      "name": "Qwen3 235B A22B Instruct 2507",
 979      "cost_per_1m_in": 0.062118,
 980      "cost_per_1m_out": 0.408204,
 981      "cost_per_1m_in_cached": 0,
 982      "cost_per_1m_out_cached": 0,
 983      "context_window": 131000,
 984      "default_max_tokens": 13100,
 985      "can_reason": true,
 986      "reasoning_levels": [
 987        "low",
 988        "medium",
 989        "high"
 990      ],
 991      "default_reasoning_effort": "medium",
 992      "supports_attachments": false
 993    },
 994    {
 995      "id": "qwen3-coder-30b-a3b-instruct",
 996      "name": "Qwen3 Coder 30b a3b Instruct",
 997      "cost_per_1m_in": 0.053244,
 998      "cost_per_1m_out": 0.22185,
 999      "cost_per_1m_in_cached": 0,
1000      "cost_per_1m_out_cached": 0,
1001      "context_window": 262000,
1002      "default_max_tokens": 26200,
1003      "can_reason": true,
1004      "reasoning_levels": [
1005        "low",
1006        "medium",
1007        "high"
1008      ],
1009      "default_reasoning_effort": "medium",
1010      "supports_attachments": false
1011    },
1012    {
1013      "id": "qwen3-32b",
1014      "name": "Qwen3 32B",
1015      "cost_per_1m_in": 0.089,
1016      "cost_per_1m_out": 0.268,
1017      "cost_per_1m_in_cached": 0,
1018      "cost_per_1m_out_cached": 0,
1019      "context_window": 40000,
1020      "default_max_tokens": 4000,
1021      "can_reason": true,
1022      "reasoning_levels": [
1023        "low",
1024        "medium",
1025        "high"
1026      ],
1027      "default_reasoning_effort": "medium",
1028      "supports_attachments": false
1029    },
1030    {
1031      "id": "nova-lite-v1",
1032      "name": "Nova Lite 1.0",
1033      "cost_per_1m_in": 0.062,
1034      "cost_per_1m_out": 0.247,
1035      "cost_per_1m_in_cached": 0,
1036      "cost_per_1m_out_cached": 0,
1037      "context_window": 300000,
1038      "default_max_tokens": 30000,
1039      "can_reason": true,
1040      "reasoning_levels": [
1041        "low",
1042        "medium",
1043        "high"
1044      ],
1045      "default_reasoning_effort": "medium",
1046      "supports_attachments": true
1047    },
1048    {
1049      "id": "claude-sonnet-4",
1050      "name": "Claude Sonnet 4",
1051      "cost_per_1m_in": 2.601,
1052      "cost_per_1m_out": 13.01,
1053      "cost_per_1m_in_cached": 0,
1054      "cost_per_1m_out_cached": 0,
1055      "context_window": 200000,
1056      "default_max_tokens": 20000,
1057      "can_reason": true,
1058      "reasoning_levels": [
1059        "low",
1060        "medium",
1061        "high"
1062      ],
1063      "default_reasoning_effort": "medium",
1064      "supports_attachments": true
1065    },
1066    {
1067      "id": "gpt-4.1-mini",
1068      "name": "GPT 4.1 mini",
1069      "cost_per_1m_in": 0.39,
1070      "cost_per_1m_out": 1.53,
1071      "cost_per_1m_in_cached": 0,
1072      "cost_per_1m_out_cached": 0,
1073      "context_window": 1047576,
1074      "default_max_tokens": 104757,
1075      "can_reason": true,
1076      "reasoning_levels": [
1077        "low",
1078        "medium",
1079        "high"
1080      ],
1081      "default_reasoning_effort": "medium",
1082      "supports_attachments": true
1083    },
1084    {
1085      "id": "gpt-4.1-nano",
1086      "name": "GPT 4.1 nano",
1087      "cost_per_1m_in": 0.1,
1088      "cost_per_1m_out": 0.39,
1089      "cost_per_1m_in_cached": 0,
1090      "cost_per_1m_out_cached": 0,
1091      "context_window": 1047576,
1092      "default_max_tokens": 104757,
1093      "can_reason": true,
1094      "reasoning_levels": [
1095        "low",
1096        "medium",
1097        "high"
1098      ],
1099      "default_reasoning_effort": "medium",
1100      "supports_attachments": true
1101    },
1102    {
1103      "id": "nova-micro-v1",
1104      "name": "Nova Micro 1.0",
1105      "cost_per_1m_in": 0.036,
1106      "cost_per_1m_out": 0.143,
1107      "cost_per_1m_in_cached": 0,
1108      "cost_per_1m_out_cached": 0,
1109      "context_window": 128000,
1110      "default_max_tokens": 12800,
1111      "can_reason": true,
1112      "reasoning_levels": [
1113        "low",
1114        "medium",
1115        "high"
1116      ],
1117      "default_reasoning_effort": "medium",
1118      "supports_attachments": true
1119    },
1120    {
1121      "id": "gpt-4.1",
1122      "name": "GPT 4.1",
1123      "cost_per_1m_in": 1.968,
1124      "cost_per_1m_out": 7.872,
1125      "cost_per_1m_in_cached": 0,
1126      "cost_per_1m_out_cached": 0,
1127      "context_window": 1047576,
1128      "default_max_tokens": 104757,
1129      "can_reason": true,
1130      "reasoning_levels": [
1131        "low",
1132        "medium",
1133        "high"
1134      ],
1135      "default_reasoning_effort": "medium",
1136      "supports_attachments": true
1137    },
1138    {
1139      "id": "nova-pro-v1",
1140      "name": "Nova Pro 1.0",
1141      "cost_per_1m_in": 0.824,
1142      "cost_per_1m_out": 3.295,
1143      "cost_per_1m_in_cached": 0,
1144      "cost_per_1m_out_cached": 0,
1145      "context_window": 300000,
1146      "default_max_tokens": 30000,
1147      "can_reason": true,
1148      "reasoning_levels": [
1149        "low",
1150        "medium",
1151        "high"
1152      ],
1153      "default_reasoning_effort": "medium",
1154      "supports_attachments": true
1155    },
1156    {
1157      "id": "llama-3.1-nemotron-ultra-253b-v1",
1158      "name": "Llama 3.1 Nemotron Ultra 253B v1",
1159      "cost_per_1m_in": 0.537,
1160      "cost_per_1m_out": 1.61,
1161      "cost_per_1m_in_cached": 0,
1162      "cost_per_1m_out_cached": 0,
1163      "context_window": 128000,
1164      "default_max_tokens": 12800,
1165      "can_reason": true,
1166      "reasoning_levels": [
1167        "low",
1168        "medium",
1169        "high"
1170      ],
1171      "default_reasoning_effort": "medium",
1172      "supports_attachments": false
1173    },
1174    {
1175      "id": "llama-4-maverick",
1176      "name": "Llama 4 Maverick",
1177      "cost_per_1m_in": 0.124236,
1178      "cost_per_1m_out": 0.602832,
1179      "cost_per_1m_in_cached": 0,
1180      "cost_per_1m_out_cached": 0,
1181      "context_window": 1050000,
1182      "default_max_tokens": 105000,
1183      "can_reason": false,
1184      "supports_attachments": false
1185    },
1186    {
1187      "id": "deepseek-v3-0324",
1188      "name": "DeepSeek V3 0324",
1189      "cost_per_1m_in": 0.26622,
1190      "cost_per_1m_out": 0.8874,
1191      "cost_per_1m_in_cached": 0,
1192      "cost_per_1m_out_cached": 0,
1193      "context_window": 163840,
1194      "default_max_tokens": 16384,
1195      "can_reason": true,
1196      "reasoning_levels": [
1197        "low",
1198        "medium",
1199        "high"
1200      ],
1201      "default_reasoning_effort": "medium",
1202      "supports_attachments": false
1203    },
1204    {
1205      "id": "mistral-small-2503",
1206      "name": "Mistral Small 2503",
1207      "cost_per_1m_in": 0.1,
1208      "cost_per_1m_out": 0.3,
1209      "cost_per_1m_in_cached": 0,
1210      "cost_per_1m_out_cached": 0,
1211      "context_window": 128000,
1212      "default_max_tokens": 12800,
1213      "can_reason": false,
1214      "supports_attachments": true
1215    },
1216    {
1217      "id": "mistral-small-2506",
1218      "name": "Mistral Small 2506",
1219      "cost_per_1m_in": 0.1,
1220      "cost_per_1m_out": 0.3,
1221      "cost_per_1m_in_cached": 0,
1222      "cost_per_1m_out_cached": 0,
1223      "context_window": 131072,
1224      "default_max_tokens": 13107,
1225      "can_reason": false,
1226      "supports_attachments": true
1227    },
1228    {
1229      "id": "gemini-2.0-flash-001",
1230      "name": "Gemini 2.0 Flash",
1231      "cost_per_1m_in": 0.13416,
1232      "cost_per_1m_out": 0.53664,
1233      "cost_per_1m_in_cached": 0,
1234      "cost_per_1m_out_cached": 0,
1235      "context_window": 1048576,
1236      "default_max_tokens": 104857,
1237      "can_reason": false,
1238      "supports_attachments": true
1239    },
1240    {
1241      "id": "gemini-2.0-flash-lite-001",
1242      "name": "Gemini 2.0 Flash Lite",
1243      "cost_per_1m_in": 0.06708,
1244      "cost_per_1m_out": 0.26832,
1245      "cost_per_1m_in_cached": 0,
1246      "cost_per_1m_out_cached": 0,
1247      "context_window": 1048576,
1248      "default_max_tokens": 104857,
1249      "can_reason": false,
1250      "supports_attachments": true
1251    },
1252    {
1253      "id": "gemini-2.5-flash",
1254      "name": "Gemini 2.5 Flash",
1255      "cost_per_1m_in": 0.26832,
1256      "cost_per_1m_out": 2.236,
1257      "cost_per_1m_in_cached": 0,
1258      "cost_per_1m_out_cached": 0,
1259      "context_window": 1048576,
1260      "default_max_tokens": 104857,
1261      "can_reason": true,
1262      "reasoning_levels": [
1263        "low",
1264        "medium",
1265        "high"
1266      ],
1267      "default_reasoning_effort": "medium",
1268      "supports_attachments": true
1269    },
1270    {
1271      "id": "gemini-2.5-pro",
1272      "name": "Gemini 2.5 Pro",
1273      "cost_per_1m_in": 1.3416,
1274      "cost_per_1m_out": 8.944,
1275      "cost_per_1m_in_cached": 0,
1276      "cost_per_1m_out_cached": 0,
1277      "context_window": 1048576,
1278      "default_max_tokens": 104857,
1279      "can_reason": true,
1280      "reasoning_levels": [
1281        "low",
1282        "medium",
1283        "high"
1284      ],
1285      "default_reasoning_effort": "medium",
1286      "supports_attachments": true
1287    },
1288    {
1289      "id": "gemma-3-27b-it",
1290      "name": "Gemma 3 27b it",
1291      "cost_per_1m_in": 0.089,
1292      "cost_per_1m_out": 0.268,
1293      "cost_per_1m_in_cached": 0,
1294      "cost_per_1m_out_cached": 0,
1295      "context_window": 131000,
1296      "default_max_tokens": 13100,
1297      "can_reason": true,
1298      "reasoning_levels": [
1299        "low",
1300        "medium",
1301        "high"
1302      ],
1303      "default_reasoning_effort": "medium",
1304      "supports_attachments": true
1305    },
1306    {
1307      "id": "deepseek-r1-0528",
1308      "name": "DeepSeek R1 0528",
1309      "cost_per_1m_in": 0.585084,
1310      "cost_per_1m_out": 2.30724,
1311      "cost_per_1m_in_cached": 0,
1312      "cost_per_1m_out_cached": 0,
1313      "context_window": 164000,
1314      "default_max_tokens": 16400,
1315      "can_reason": true,
1316      "reasoning_levels": [
1317        "low",
1318        "medium",
1319        "high"
1320      ],
1321      "default_reasoning_effort": "medium",
1322      "supports_attachments": false
1323    },
1324    {
1325      "id": "codestral-2508",
1326      "name": "Codestral 25.08",
1327      "cost_per_1m_in": 0.3,
1328      "cost_per_1m_out": 0.9,
1329      "cost_per_1m_in_cached": 0,
1330      "cost_per_1m_out_cached": 0,
1331      "context_window": 256000,
1332      "default_max_tokens": 25600,
1333      "can_reason": false,
1334      "supports_attachments": false
1335    },
1336    {
1337      "id": "llama-3.3-70b-instruct",
1338      "name": "Llama 3.3 70B Instruct",
1339      "cost_per_1m_in": 0.08874,
1340      "cost_per_1m_out": 0.274994,
1341      "cost_per_1m_in_cached": 0,
1342      "cost_per_1m_out_cached": 0,
1343      "context_window": 131000,
1344      "default_max_tokens": 13100,
1345      "can_reason": true,
1346      "reasoning_levels": [
1347        "low",
1348        "medium",
1349        "high"
1350      ],
1351      "default_reasoning_effort": "medium",
1352      "supports_attachments": false
1353    },
1354    {
1355      "id": "gpt-4o",
1356      "name": "GPT 4o",
1357      "cost_per_1m_in": 2.38664,
1358      "cost_per_1m_out": 9.5466,
1359      "cost_per_1m_in_cached": 0,
1360      "cost_per_1m_out_cached": 0,
1361      "context_window": 128000,
1362      "default_max_tokens": 12800,
1363      "can_reason": true,
1364      "reasoning_levels": [
1365        "low",
1366        "medium",
1367        "high"
1368      ],
1369      "default_reasoning_effort": "medium",
1370      "supports_attachments": true
1371    },
1372    {
1373      "id": "gpt-5-mini",
1374      "name": "GPT 5 mini",
1375      "cost_per_1m_in": 0.25,
1376      "cost_per_1m_out": 1.968,
1377      "cost_per_1m_in_cached": 0,
1378      "cost_per_1m_out_cached": 0,
1379      "context_window": 400000,
1380      "default_max_tokens": 40000,
1381      "can_reason": true,
1382      "reasoning_levels": [
1383        "low",
1384        "medium",
1385        "high"
1386      ],
1387      "default_reasoning_effort": "medium",
1388      "supports_attachments": true
1389    },
1390    {
1391      "id": "gpt-5-nano",
1392      "name": "GPT 5 nano",
1393      "cost_per_1m_in": 0.054,
1394      "cost_per_1m_out": 0.394,
1395      "cost_per_1m_in_cached": 0,
1396      "cost_per_1m_out_cached": 0,
1397      "context_window": 400000,
1398      "default_max_tokens": 40000,
1399      "can_reason": true,
1400      "reasoning_levels": [
1401        "low",
1402        "medium",
1403        "high"
1404      ],
1405      "default_reasoning_effort": "medium",
1406      "supports_attachments": true
1407    },
1408    {
1409      "id": "mistral-large-2411",
1410      "name": "Mistral Large 2411",
1411      "cost_per_1m_in": 1.8,
1412      "cost_per_1m_out": 5.4,
1413      "cost_per_1m_in_cached": 0,
1414      "cost_per_1m_out_cached": 0,
1415      "context_window": 131072,
1416      "default_max_tokens": 13107,
1417      "can_reason": true,
1418      "reasoning_levels": [
1419        "low",
1420        "medium",
1421        "high"
1422      ],
1423      "default_reasoning_effort": "medium",
1424      "supports_attachments": false
1425    },
1426    {
1427      "id": "hermes-4-405b",
1428      "name": "Hermes 4 405B",
1429      "cost_per_1m_in": 0.894,
1430      "cost_per_1m_out": 2.683,
1431      "cost_per_1m_in_cached": 0,
1432      "cost_per_1m_out_cached": 0,
1433      "context_window": 128000,
1434      "default_max_tokens": 12800,
1435      "can_reason": false,
1436      "supports_attachments": false
1437    },
1438    {
1439      "id": "mistral-medium-2508",
1440      "name": "Mistral Medium 2508",
1441      "cost_per_1m_in": 0.4,
1442      "cost_per_1m_out": 2,
1443      "cost_per_1m_in_cached": 0,
1444      "cost_per_1m_out_cached": 0,
1445      "context_window": 128000,
1446      "default_max_tokens": 12800,
1447      "can_reason": true,
1448      "reasoning_levels": [
1449        "low",
1450        "medium",
1451        "high"
1452      ],
1453      "default_reasoning_effort": "medium",
1454      "supports_attachments": true
1455    },
1456    {
1457      "id": "devstral-medium-2507",
1458      "name": "Devstral Medium 2507",
1459      "cost_per_1m_in": 0.4,
1460      "cost_per_1m_out": 2,
1461      "cost_per_1m_in_cached": 0,
1462      "cost_per_1m_out_cached": 0,
1463      "context_window": 131072,
1464      "default_max_tokens": 13107,
1465      "can_reason": false,
1466      "supports_attachments": false
1467    },
1468    {
1469      "id": "mistral-nemo-instruct-2407",
1470      "name": "Mistral Nemo 2407",
1471      "cost_per_1m_in": 0.13,
1472      "cost_per_1m_out": 0.13,
1473      "cost_per_1m_in_cached": 0,
1474      "cost_per_1m_out_cached": 0,
1475      "context_window": 131072,
1476      "default_max_tokens": 13107,
1477      "can_reason": false,
1478      "supports_attachments": false
1479    },
1480    {
1481      "id": "devstral-small-2507",
1482      "name": "Devstral Small 2507",
1483      "cost_per_1m_in": 0.1,
1484      "cost_per_1m_out": 0.3,
1485      "cost_per_1m_in_cached": 0,
1486      "cost_per_1m_out_cached": 0,
1487      "context_window": 131072,
1488      "default_max_tokens": 13107,
1489      "can_reason": false,
1490      "supports_attachments": false
1491    },
1492    {
1493      "id": "llama-3.1-405b-instruct",
1494      "name": "Llama 3.1 405B Instruct",
1495      "cost_per_1m_in": 1.75,
1496      "cost_per_1m_out": 1.75,
1497      "cost_per_1m_in_cached": 0,
1498      "cost_per_1m_out_cached": 0,
1499      "context_window": 128000,
1500      "default_max_tokens": 12800,
1501      "can_reason": true,
1502      "reasoning_levels": [
1503        "low",
1504        "medium",
1505        "high"
1506      ],
1507      "default_reasoning_effort": "medium",
1508      "supports_attachments": false
1509    },
1510    {
1511      "id": "gpt-4o-mini",
1512      "name": "GPT 4o mini",
1513      "cost_per_1m_in": 0.1432,
1514      "cost_per_1m_out": 0.5728,
1515      "cost_per_1m_in_cached": 0,
1516      "cost_per_1m_out_cached": 0,
1517      "context_window": 128000,
1518      "default_max_tokens": 12800,
1519      "can_reason": true,
1520      "reasoning_levels": [
1521        "low",
1522        "medium",
1523        "high"
1524      ],
1525      "default_reasoning_effort": "medium",
1526      "supports_attachments": true
1527    },
1528    {
1529      "id": "llama-3.1-8b-instruct",
1530      "name": "Llama 3.1 8B Instruct",
1531      "cost_per_1m_in": 0.018,
1532      "cost_per_1m_out": 0.054,
1533      "cost_per_1m_in_cached": 0,
1534      "cost_per_1m_out_cached": 0,
1535      "context_window": 128000,
1536      "default_max_tokens": 12800,
1537      "can_reason": true,
1538      "reasoning_levels": [
1539        "low",
1540        "medium",
1541        "high"
1542      ],
1543      "default_reasoning_effort": "medium",
1544      "supports_attachments": false
1545    }
1546  ]
1547}